react-native-davoice 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +319 -0
- package/TTSRNBridge.podspec +38 -0
- package/android/.gradle/8.9/checksums/checksums.lock +0 -0
- package/android/.gradle/8.9/dependencies-accessors/gc.properties +0 -0
- package/android/.gradle/8.9/fileChanges/last-build.bin +0 -0
- package/android/.gradle/8.9/fileHashes/fileHashes.lock +0 -0
- package/android/.gradle/8.9/gc.properties +0 -0
- package/android/.gradle/buildOutputCleanup/buildOutputCleanup.lock +0 -0
- package/android/.gradle/buildOutputCleanup/cache.properties +2 -0
- package/android/.gradle/vcs-1/gc.properties +0 -0
- package/android/build.gradle +47 -0
- package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar +0 -0
- package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar.md5 +1 -0
- package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar.sha1 +1 -0
- package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.pom +38 -0
- package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.pom.md5 +1 -0
- package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.pom.sha1 +1 -0
- package/android/settings.gradle +2 -0
- package/android/src/main/AndroidManifest.xml +14 -0
- package/android/src/main/java/com/davoice/rn/DaVoicePackage.java +29 -0
- package/android/src/main/java/com/davoice/stt/rn/STTModule.kt +208 -0
- package/android/src/main/java/com/davoice/tts/rn/DaVoiceTTSBridge.java +733 -0
- package/android/src/main/libs/MyLibrary-release.aar +0 -0
- package/app.plugin.js +60 -0
- package/ios/STTRNBridge/STTBridge.h +7 -0
- package/ios/STTRNBridge/STTBridge.m +130 -0
- package/ios/SpeechBridge/SpeechBridge.h +7 -0
- package/ios/SpeechBridge/SpeechBridge.m +761 -0
- package/ios/TTSRNBridge/DaVoiceTTSBridge.h +7 -0
- package/ios/TTSRNBridge/DaVoiceTTSBridge.m +177 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/Info.plist +44 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/DavoiceTTS +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Headers/DavoiceTTS-Swift.h +424 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Info.plist +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.abi.json +13253 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.private.swiftinterface +213 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.swiftdoc +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.swiftinterface +213 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/module.modulemap +4 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/DavoiceTTS +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Headers/DavoiceTTS-Swift.h +844 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Info.plist +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.abi.json +13253 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.private.swiftinterface +213 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftdoc +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftinterface +213 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.abi.json +13253 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.private.swiftinterface +213 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftdoc +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftinterface +213 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/module.modulemap +4 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeDirectory +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeRequirements +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeRequirements-1 +0 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeResources +282 -0
- package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeSignature +0 -0
- package/ios/TTSRNBridge/libphonemes.a +0 -0
- package/ios/TTSRNBridge/libucd.a +0 -0
- package/package.json +46 -0
- package/react-native.config.js +10 -0
- package/speech/index.ts +1055 -0
- package/stt/index.d.ts +54 -0
- package/stt/index.ts +222 -0
- package/tts/DaVoiceTTSBridge.d.ts +18 -0
- package/tts/DaVoiceTTSBridge.js +71 -0
- package/tts/index.d.ts +3 -0
- package/tts/index.js +4 -0
|
@@ -0,0 +1,761 @@
|
|
|
1
|
+
// SpeechBridge.m
|
|
2
|
+
#import "SpeechBridge.h"
|
|
3
|
+
#import <React/RCTLog.h>
|
|
4
|
+
#import <React/RCTConvert.h>
|
|
5
|
+
#import <AudioToolbox/AudioToolbox.h>
|
|
6
|
+
|
|
7
|
+
// Import your Swift classes (names as in your project)
|
|
8
|
+
#import <DaVoiceTTS/DaVoiceTTS-Swift.h> // DaVoiceTTS + STT live here in your setup
|
|
9
|
+
|
|
10
|
+
#import <AVFAudio/AVFAudio.h>
|
|
11
|
+
#import <Speech/Speech.h>
|
|
12
|
+
|
|
13
|
+
static NSData *SB_Base64Decode(NSString *b64) {
|
|
14
|
+
if (!b64 || (id)b64 == [NSNull null]) return nil;
|
|
15
|
+
return [[NSData alloc] initWithBase64EncodedString:b64 options:0];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
static BOOL SBHasMicPermission(void) {
|
|
19
|
+
AVAudioSessionRecordPermission permission = [[AVAudioSession sharedInstance] recordPermission];
|
|
20
|
+
return permission == AVAudioSessionRecordPermissionGranted;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
static BOOL SBHasSpeechRecognitionPermission(void) {
|
|
24
|
+
SFSpeechRecognizerAuthorizationStatus status = [SFSpeechRecognizer authorizationStatus];
|
|
25
|
+
return status == SFSpeechRecognizerAuthorizationStatusAuthorized;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// Make a mono Float32 AVAudioPCMBuffer from raw PCM payload (i16 or f32).
|
|
29
|
+
// We accept either interleaved or non-interleaved input and mixdown to mono
|
|
30
|
+
// (DaVoiceTTS.playBuffer will resample / normalize as needed).
|
|
31
|
+
static AVAudioPCMBuffer *SB_MakeMonoF32Buffer(NSData *raw,
|
|
32
|
+
double sampleRate,
|
|
33
|
+
NSString *fmt, // "i16" | "f32"
|
|
34
|
+
NSInteger channels, // >= 1
|
|
35
|
+
BOOL interleaved)
|
|
36
|
+
{
|
|
37
|
+
if (!raw || raw.length == 0 || channels < 1) return nil;
|
|
38
|
+
|
|
39
|
+
// Target: mono float32, non-interleaved
|
|
40
|
+
AVAudioFormat *dstFmt = [[AVAudioFormat alloc] initStandardFormatWithSampleRate:sampleRate channels:1];
|
|
41
|
+
NSUInteger frameCount = 0;
|
|
42
|
+
|
|
43
|
+
if ([fmt.lowercaseString isEqualToString:@"i16"]) {
|
|
44
|
+
// Each sample is 2 bytes
|
|
45
|
+
const NSUInteger bytesPerSample = 2;
|
|
46
|
+
if (raw.length % (bytesPerSample * channels) != 0) return nil;
|
|
47
|
+
frameCount = raw.length / (bytesPerSample * channels);
|
|
48
|
+
|
|
49
|
+
AVAudioPCMBuffer *buf = [[AVAudioPCMBuffer alloc] initWithPCMFormat:dstFmt frameCapacity:(AVAudioFrameCount)frameCount];
|
|
50
|
+
buf.frameLength = (AVAudioFrameCount)frameCount;
|
|
51
|
+
|
|
52
|
+
const int16_t *in = (const int16_t *)raw.bytes;
|
|
53
|
+
float *out = buf.floatChannelData[0];
|
|
54
|
+
const float scale = 1.0f / 32768.0f;
|
|
55
|
+
|
|
56
|
+
if (interleaved || channels == 1) {
|
|
57
|
+
// Interleaved: [c0,c1,.., c0,c1,..]
|
|
58
|
+
for (NSUInteger f = 0; f < frameCount; ++f) {
|
|
59
|
+
int64_t acc = 0;
|
|
60
|
+
for (NSInteger ch = 0; ch < channels; ++ch) {
|
|
61
|
+
acc += in[f*channels + ch];
|
|
62
|
+
}
|
|
63
|
+
out[f] = (float)(acc / (double)channels) * scale;
|
|
64
|
+
}
|
|
65
|
+
} else {
|
|
66
|
+
// Non-interleaved planar i16 (rare): [all c0][all c1]…
|
|
67
|
+
const NSUInteger planeLen = frameCount;
|
|
68
|
+
for (NSUInteger f = 0; f < frameCount; ++f) {
|
|
69
|
+
int64_t acc = 0;
|
|
70
|
+
for (NSInteger ch = 0; ch < channels; ++ch) {
|
|
71
|
+
acc += in[ch*planeLen + f];
|
|
72
|
+
}
|
|
73
|
+
out[f] = (float)(acc / (double)channels) * scale;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return buf;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// f32 path
|
|
80
|
+
if (![fmt.lowercaseString isEqualToString:@"f32"]) return nil;
|
|
81
|
+
const NSUInteger bytesPerSample = 4;
|
|
82
|
+
if (raw.length % (bytesPerSample * channels) != 0) return nil;
|
|
83
|
+
frameCount = raw.length / (bytesPerSample * channels);
|
|
84
|
+
|
|
85
|
+
AVAudioPCMBuffer *buf = [[AVAudioPCMBuffer alloc] initWithPCMFormat:dstFmt frameCapacity:(AVAudioFrameCount)frameCount];
|
|
86
|
+
buf.frameLength = (AVAudioFrameCount)frameCount;
|
|
87
|
+
|
|
88
|
+
const float *in = (const float *)raw.bytes;
|
|
89
|
+
float *out = buf.floatChannelData[0];
|
|
90
|
+
|
|
91
|
+
if (interleaved || channels == 1) {
|
|
92
|
+
for (NSUInteger f = 0; f < frameCount; ++f) {
|
|
93
|
+
double acc = 0.0;
|
|
94
|
+
for (NSInteger ch = 0; ch < channels; ++ch) {
|
|
95
|
+
acc += in[f*channels + ch];
|
|
96
|
+
}
|
|
97
|
+
out[f] = (float)(acc / (double)channels);
|
|
98
|
+
}
|
|
99
|
+
} else {
|
|
100
|
+
// Planar f32: [all c0][all c1]…
|
|
101
|
+
const NSUInteger planeLen = frameCount;
|
|
102
|
+
for (NSUInteger f = 0; f < frameCount; ++f) {
|
|
103
|
+
double acc = 0.0;
|
|
104
|
+
for (NSInteger ch = 0; ch < channels; ++ch) {
|
|
105
|
+
acc += in[ch*planeLen + f];
|
|
106
|
+
}
|
|
107
|
+
out[f] = (float)(acc / (double)channels);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
return buf;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
@interface SpeechBridge () <STTDelegate>
|
|
114
|
+
@property (nonatomic, strong, nullable) STT *stt;
|
|
115
|
+
@property (nonatomic, strong, nullable) DaVoiceTTS *tts;
|
|
116
|
+
@property (nonatomic, assign) BOOL hasListeners;
|
|
117
|
+
@property (atomic, assign) BOOL initializing;
|
|
118
|
+
@property (atomic, assign) BOOL initialized;
|
|
119
|
+
|
|
120
|
+
// used only to gate TTS init until STT engine is “hot”
|
|
121
|
+
@property (atomic, assign) BOOL sttEngineHot;
|
|
122
|
+
@end
|
|
123
|
+
|
|
124
|
+
@implementation SpeechBridge
|
|
125
|
+
|
|
126
|
+
RCT_EXPORT_MODULE(SpeechBridge)
|
|
127
|
+
|
|
128
|
+
// We emit the union of STT + TTS events
|
|
129
|
+
- (NSArray<NSString *> *)supportedEvents
|
|
130
|
+
{
|
|
131
|
+
return @[
|
|
132
|
+
// STT events
|
|
133
|
+
@"onSpeechResults",
|
|
134
|
+
@"onSpeechStart",
|
|
135
|
+
@"onSpeechPartialResults",
|
|
136
|
+
@"onSpeechError",
|
|
137
|
+
@"onSpeechEnd",
|
|
138
|
+
@"onSpeechRecognized",
|
|
139
|
+
@"onSpeechVolumeChanged",
|
|
140
|
+
// TTS event
|
|
141
|
+
@"onFinishedSpeaking"
|
|
142
|
+
];
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
+ (BOOL)requiresMainQueueSetup { return YES; }
|
|
146
|
+
- (dispatch_queue_t)methodQueue { return dispatch_get_main_queue(); }
|
|
147
|
+
- (void)startObserving { self.hasListeners = YES; }
|
|
148
|
+
- (void)stopObserving { self.hasListeners = NO; }
|
|
149
|
+
|
|
150
|
+
- (void)dealloc
|
|
151
|
+
{
|
|
152
|
+
// destroy in the safe order: TTS → STT
|
|
153
|
+
if (_tts) { [_tts destroy]; _tts = nil; }
|
|
154
|
+
if (_stt) { [_stt destroySpeech:nil]; _stt = nil; }
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
#pragma mark - STTDelegate (forward all events)
|
|
158
|
+
|
|
159
|
+
- (void)stt:(STT *)stt didEmitEvent:(NSString *)name body:(NSDictionary *)body
|
|
160
|
+
{
|
|
161
|
+
// Use the first onSpeechStart as the “engine hot” latch.
|
|
162
|
+
if ([name isEqualToString:@"onSpeechStart"] && !self.sttEngineHot) {
|
|
163
|
+
self.sttEngineHot = YES;
|
|
164
|
+
}
|
|
165
|
+
if (self.hasListeners) {
|
|
166
|
+
[self sendEventWithName:name body:body ?: @{}];
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
#pragma mark - Helpers
|
|
171
|
+
|
|
172
|
+
- (NSURL *)resolveLocalURLFromPathOrURL:(NSString *)pathOrURL
|
|
173
|
+
{
|
|
174
|
+
if (!pathOrURL || (id)pathOrURL == [NSNull null] || pathOrURL.length == 0) return nil;
|
|
175
|
+
// ✅ Handle http(s): download to tmp and return file URL
|
|
176
|
+
if ([pathOrURL hasPrefix:@"http://"] || [pathOrURL hasPrefix:@"https://"]) {
|
|
177
|
+
NSURL *remoteURL = [NSURL URLWithString:pathOrURL];
|
|
178
|
+
if (!remoteURL) return nil;
|
|
179
|
+
|
|
180
|
+
NSData *data = [NSData dataWithContentsOfURL:remoteURL];
|
|
181
|
+
if (!data) return nil;
|
|
182
|
+
|
|
183
|
+
// keep extension if possible
|
|
184
|
+
NSString *ext = remoteURL.pathExtension.length ? remoteURL.pathExtension : @"bin";
|
|
185
|
+
NSString *tempName = [NSString stringWithFormat:@"rn_asset_%f.%@", [[NSDate date] timeIntervalSince1970], ext];
|
|
186
|
+
NSString *tempPath = [NSTemporaryDirectory() stringByAppendingPathComponent:tempName];
|
|
187
|
+
|
|
188
|
+
[[NSFileManager defaultManager] removeItemAtPath:tempPath error:nil];
|
|
189
|
+
if (![data writeToFile:tempPath atomically:YES]) return nil;
|
|
190
|
+
|
|
191
|
+
return [NSURL fileURLWithPath:tempPath];
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Already a file URL
|
|
195
|
+
if ([pathOrURL hasPrefix:@"file://"]) {
|
|
196
|
+
return [NSURL URLWithString:pathOrURL];
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// RN bundled asset path: asset:/xxx -> copy from bundle to tmp so we get a file URL
|
|
200
|
+
if ([pathOrURL hasPrefix:@"asset:/"]) {
|
|
201
|
+
NSString *assetName = [pathOrURL stringByReplacingOccurrencesOfString:@"asset:/" withString:@""];
|
|
202
|
+
NSString *bundlePath = [[NSBundle mainBundle] pathForResource:[assetName stringByDeletingPathExtension]
|
|
203
|
+
ofType:[assetName pathExtension]];
|
|
204
|
+
if (!bundlePath) return nil;
|
|
205
|
+
|
|
206
|
+
NSString *ext = [assetName pathExtension];
|
|
207
|
+
if (ext.length == 0) ext = @"bin";
|
|
208
|
+
|
|
209
|
+
NSString *tempName = [NSString stringWithFormat:@"asset_%f.%@", [[NSDate date] timeIntervalSince1970], ext];
|
|
210
|
+
NSString *tempPath = [NSTemporaryDirectory() stringByAppendingPathComponent:tempName];
|
|
211
|
+
|
|
212
|
+
// overwrite if exists
|
|
213
|
+
[[NSFileManager defaultManager] removeItemAtPath:tempPath error:nil];
|
|
214
|
+
|
|
215
|
+
NSError *copyError = nil;
|
|
216
|
+
[[NSFileManager defaultManager] copyItemAtPath:bundlePath toPath:tempPath error:©Error];
|
|
217
|
+
if (copyError) return nil;
|
|
218
|
+
|
|
219
|
+
return [NSURL fileURLWithPath:tempPath];
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// Otherwise: assume direct local path
|
|
223
|
+
return [NSURL fileURLWithPath:pathOrURL];
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
- (void)ensureSTT
|
|
227
|
+
{
|
|
228
|
+
if (!self.stt) {
|
|
229
|
+
self.stt = [STT new];
|
|
230
|
+
self.stt.delegate = self;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
- (void)wireTTSFinishedCallback
|
|
235
|
+
{
|
|
236
|
+
if (!self.tts) return;
|
|
237
|
+
|
|
238
|
+
__weak typeof(self) weakSelf = self;
|
|
239
|
+
self.tts.onLastUtteranceFinished = ^{
|
|
240
|
+
__strong typeof(weakSelf) strongSelf = weakSelf;
|
|
241
|
+
if (!strongSelf || !strongSelf.hasListeners) return;
|
|
242
|
+
|
|
243
|
+
// Match the old, working behavior
|
|
244
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
245
|
+
[strongSelf sendEventWithName:@"onFinishedSpeaking" body:@{}];
|
|
246
|
+
});
|
|
247
|
+
};
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
RCT_EXPORT_METHOD(hasMicPermissions:(RCTPromiseResolveBlock)resolve
|
|
251
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
252
|
+
{
|
|
253
|
+
resolve(@(SBHasMicPermission()));
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
RCT_EXPORT_METHOD(requestMicPermissions:(nonnull NSNumber *)wait_timeout
|
|
257
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
258
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
259
|
+
{
|
|
260
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
261
|
+
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
|
|
262
|
+
AVAudioSessionRecordPermission permission = audioSession.recordPermission;
|
|
263
|
+
if (permission == AVAudioSessionRecordPermissionGranted) {
|
|
264
|
+
resolve(@YES);
|
|
265
|
+
return;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
if (permission == AVAudioSessionRecordPermissionDenied) {
|
|
269
|
+
resolve(@NO);
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
NSTimeInterval timeoutSeconds = MAX(wait_timeout.doubleValue, 0.0) / 1000.0;
|
|
274
|
+
__block BOOL didResolve = NO;
|
|
275
|
+
void (^finish)(BOOL) = ^(BOOL granted) {
|
|
276
|
+
if (didResolve) return;
|
|
277
|
+
didResolve = YES;
|
|
278
|
+
resolve(@(granted));
|
|
279
|
+
};
|
|
280
|
+
|
|
281
|
+
if (timeoutSeconds > 0.0) {
|
|
282
|
+
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeoutSeconds * NSEC_PER_SEC)),
|
|
283
|
+
dispatch_get_main_queue(), ^{
|
|
284
|
+
finish(SBHasMicPermission());
|
|
285
|
+
});
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
[audioSession requestRecordPermission:^(BOOL granted) {
|
|
289
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
290
|
+
finish(granted);
|
|
291
|
+
});
|
|
292
|
+
}];
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
RCT_EXPORT_METHOD(hasSpeechRecognitionPermissions:(RCTPromiseResolveBlock)resolve
|
|
297
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
298
|
+
{
|
|
299
|
+
resolve(@(SBHasSpeechRecognitionPermission()));
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
RCT_EXPORT_METHOD(requestSpeechRecognitionPermissions:(nonnull NSNumber *)wait_timeout
|
|
303
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
304
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
305
|
+
{
|
|
306
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
307
|
+
SFSpeechRecognizerAuthorizationStatus status = [SFSpeechRecognizer authorizationStatus];
|
|
308
|
+
if (status == SFSpeechRecognizerAuthorizationStatusAuthorized) {
|
|
309
|
+
resolve(@YES);
|
|
310
|
+
return;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
if (status == SFSpeechRecognizerAuthorizationStatusDenied ||
|
|
314
|
+
status == SFSpeechRecognizerAuthorizationStatusRestricted) {
|
|
315
|
+
resolve(@NO);
|
|
316
|
+
return;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
NSTimeInterval timeoutSeconds = MAX(wait_timeout.doubleValue, 0.0) / 1000.0;
|
|
320
|
+
__block BOOL didResolve = NO;
|
|
321
|
+
void (^finish)(BOOL) = ^(BOOL granted) {
|
|
322
|
+
if (didResolve) return;
|
|
323
|
+
didResolve = YES;
|
|
324
|
+
resolve(@(granted));
|
|
325
|
+
};
|
|
326
|
+
|
|
327
|
+
if (timeoutSeconds > 0.0) {
|
|
328
|
+
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeoutSeconds * NSEC_PER_SEC)),
|
|
329
|
+
dispatch_get_main_queue(), ^{
|
|
330
|
+
finish(SBHasSpeechRecognitionPermission());
|
|
331
|
+
});
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
[SFSpeechRecognizer requestAuthorization:^(SFSpeechRecognizerAuthorizationStatus updatedStatus) {
|
|
335
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
336
|
+
finish(updatedStatus == SFSpeechRecognizerAuthorizationStatusAuthorized);
|
|
337
|
+
});
|
|
338
|
+
}];
|
|
339
|
+
});
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
#pragma mark - Unified API
|
|
343
|
+
|
|
344
|
+
RCT_EXPORT_METHOD(setLicense:(NSString *)licenseKey
|
|
345
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
346
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
347
|
+
{
|
|
348
|
+
if (licenseKey == nil || (id)licenseKey == [NSNull null] || licenseKey.length == 0) {
|
|
349
|
+
reject(@"invalid_args", @"Missing licenseKey", nil);
|
|
350
|
+
return;
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
354
|
+
BOOL ttsOk = [DaVoiceTTS activateLicenseWithLicenseKey:licenseKey];
|
|
355
|
+
[self ensureSTT];
|
|
356
|
+
BOOL sttOk = [self.stt setLicenseWithLicenseKey:licenseKey];
|
|
357
|
+
resolve(@(ttsOk && sttOk));
|
|
358
|
+
});
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
RCT_EXPORT_METHOD(isLicenseValid:(NSString *)licenseKey
|
|
362
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
363
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
364
|
+
{
|
|
365
|
+
if (licenseKey == nil || (id)licenseKey == [NSNull null] || licenseKey.length == 0) {
|
|
366
|
+
reject(@"invalid_args", @"Missing licenseKey", nil);
|
|
367
|
+
return;
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
371
|
+
BOOL ok = [LicenseManager isLicenseValidWithLicenseKey:licenseKey];
|
|
372
|
+
resolve(@(ok));
|
|
373
|
+
});
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
/// initAll({ locale: "en-US", model: "/path/model.onnx", timeoutMs?: 8000 })
|
|
377
|
+
RCT_EXPORT_METHOD(initAll:(NSDictionary *)opts
|
|
378
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
379
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
380
|
+
{
|
|
381
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
382
|
+
if (self.initializing) { resolve(@"already_initializing"); return; }
|
|
383
|
+
if (self.initialized) { resolve(@"already_initialized"); return; }
|
|
384
|
+
|
|
385
|
+
self.initializing = YES;
|
|
386
|
+
|
|
387
|
+
NSString *locale = opts[@"locale"] ?: @"en-US";
|
|
388
|
+
NSString *onboardingJsonPath = opts[@"onboardingJsonPath"];
|
|
389
|
+
NSString *modelPath = opts[@"model"];
|
|
390
|
+
if (modelPath.length == 0) {
|
|
391
|
+
self.initializing = NO;
|
|
392
|
+
reject(@"invalid_args", @"Missing 'model' in initAll()", nil);
|
|
393
|
+
return;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
// 1) STT first
|
|
397
|
+
if (!self.stt) {
|
|
398
|
+
self.stt = [STT new];
|
|
399
|
+
self.stt.delegate = self;
|
|
400
|
+
}
|
|
401
|
+
if (onboardingJsonPath && (id)onboardingJsonPath != [NSNull null] && onboardingJsonPath.length > 0) {
|
|
402
|
+
[self.stt startSpeechWithLocaleStr:locale onboardingJsonPath:onboardingJsonPath];
|
|
403
|
+
} else {
|
|
404
|
+
[self.stt startSpeechWithLocaleStr:locale];
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
// 2) TTS next
|
|
408
|
+
NSURL *modelURL = nil;
|
|
409
|
+
|
|
410
|
+
// ✅ BACKWARD COMPAT: plain "letters.onnx" (no slashes, no scheme) -> DO EXACTLY THIS.
|
|
411
|
+
// Do NOT resolve, do NOT check existence here. Native core will search.
|
|
412
|
+
BOOL isBareOnnxName =
|
|
413
|
+
(modelPath != nil) &&
|
|
414
|
+
([modelPath rangeOfString:@"/"].location == NSNotFound) &&
|
|
415
|
+
([modelPath rangeOfString:@"://"].location == NSNotFound) &&
|
|
416
|
+
![modelPath hasPrefix:@"file://"] &&
|
|
417
|
+
[[modelPath lowercaseString] hasSuffix:@".onnx"];
|
|
418
|
+
|
|
419
|
+
// ✅ Bare .dm name (no slashes, no scheme) — try resolution first, fall back to bare name.
|
|
420
|
+
BOOL isBareDmName =
|
|
421
|
+
(modelPath != nil) &&
|
|
422
|
+
([modelPath rangeOfString:@"/"].location == NSNotFound) &&
|
|
423
|
+
([modelPath rangeOfString:@"://"].location == NSNotFound) &&
|
|
424
|
+
![modelPath hasPrefix:@"file://"] &&
|
|
425
|
+
[[modelPath lowercaseString] hasSuffix:@".dm"];
|
|
426
|
+
|
|
427
|
+
if (isBareOnnxName) {
|
|
428
|
+
modelURL = [NSURL fileURLWithPath:modelPath];
|
|
429
|
+
} else {
|
|
430
|
+
modelURL = [self resolveLocalURLFromPathOrURL:modelPath];
|
|
431
|
+
|
|
432
|
+
// ✅ Fallback for bare .dm: if resolution failed, let native core search (same as .onnx behavior).
|
|
433
|
+
if (!modelURL && isBareDmName) {
|
|
434
|
+
NSLog(@"[TTS] INIT: bare .dm name could not be resolved, falling back to bare path: %@", modelPath);
|
|
435
|
+
modelURL = [NSURL fileURLWithPath:modelPath];
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
if (!modelURL) {
|
|
439
|
+
self.initializing = NO;
|
|
440
|
+
[self.stt destroySpeech:nil];
|
|
441
|
+
self.stt = nil;
|
|
442
|
+
reject(@"bad_model", [NSString stringWithFormat:@"Could not resolve model path: %@", modelPath], nil);
|
|
443
|
+
return;
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
// Verify file exists for local file URLs (skip for bare .dm fallback — native core will search)
|
|
447
|
+
if (!isBareDmName && modelURL.isFileURL && ![[NSFileManager defaultManager] fileExistsAtPath:modelURL.path]) {
|
|
448
|
+
self.initializing = NO;
|
|
449
|
+
[self.stt destroySpeech:nil];
|
|
450
|
+
self.stt = nil;
|
|
451
|
+
reject(@"model_missing", [NSString stringWithFormat:@"Model file missing: %@", modelURL.path], nil);
|
|
452
|
+
return;
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
NSLog(@"[TTS] INIT: modelURL == %@", modelURL);
|
|
456
|
+
|
|
457
|
+
NSError *err = nil;
|
|
458
|
+
self.tts = [[DaVoiceTTS alloc] initWithModel:modelURL error:&err];
|
|
459
|
+
if (err || !self.tts) {
|
|
460
|
+
self.initializing = NO;
|
|
461
|
+
[self.stt destroySpeech:nil];
|
|
462
|
+
self.stt = nil;
|
|
463
|
+
reject(@"tts_init_failed", err.localizedDescription ?: @"TTS init failed", err);
|
|
464
|
+
return;
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
[self wireTTSFinishedCallback];
|
|
468
|
+
|
|
469
|
+
self.initialized = YES;
|
|
470
|
+
self.initializing = NO;
|
|
471
|
+
resolve(@"initialized");
|
|
472
|
+
});
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// --- SpeechRecognitionLite pause/unpause (counter-based) ---
|
|
476
|
+
|
|
477
|
+
RCT_EXPORT_METHOD(pauseSpeechRecognitionLite:(RCTResponseSenderBlock)callback)
|
|
478
|
+
{
|
|
479
|
+
// No-op if STT isn't initialized yet (avoid JS error spam).
|
|
480
|
+
if (!self.stt) { if (callback) callback(@[@(YES)]); return; }
|
|
481
|
+
[self.stt pauseSpeechRecognitionLite];
|
|
482
|
+
if (callback) callback(@[@(YES)]);
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
RCT_EXPORT_METHOD(unPauseSpeechRecognitionLite:(nonnull NSNumber *)times
|
|
486
|
+
callback:(RCTResponseSenderBlock)callback)
|
|
487
|
+
{
|
|
488
|
+
// No-op if STT isn't initialized yet (avoid JS error spam).
|
|
489
|
+
if (!self.stt) { if (callback) callback(@[@(YES)]); return; }
|
|
490
|
+
[self.stt unPauseSpeechRecognitionLite:times];
|
|
491
|
+
if (callback) callback(@[@(YES)]);
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
// Promise-based pause that resolves ONLY when iOS is actually settled in playback (mic released)
|
|
495
|
+
RCT_EXPORT_METHOD(pauseMicrophoneAsync:(nonnull NSNumber *)timeoutMs
|
|
496
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
497
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
498
|
+
{
|
|
499
|
+
if (!self.stt) { resolve(@{@"ok": @(NO), @"reason": @"no_stt"}); return; }
|
|
500
|
+
|
|
501
|
+
// Default if caller passes null/0
|
|
502
|
+
NSNumber *t = timeoutMs ?: @(1500);
|
|
503
|
+
if (t.doubleValue <= 0) t = @(1500);
|
|
504
|
+
|
|
505
|
+
// STT.swift will do the main-queue polling internally
|
|
506
|
+
[self.stt pauseMicrophoneAndWait:t completion:^(BOOL ok, NSString * _Nullable reason) {
|
|
507
|
+
resolve(@{@"ok": @(ok), @"reason": reason ?: @""});
|
|
508
|
+
}];
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
// Promise-based unpause that resolves ONLY when engine+task are live again
|
|
512
|
+
RCT_EXPORT_METHOD(unPauseMicrophoneAsync:(nonnull NSNumber *)timeoutMs
|
|
513
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
514
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
515
|
+
{
|
|
516
|
+
if (!self.stt) { resolve(@{@"ok": @(NO), @"reason": @"no_stt"}); return; }
|
|
517
|
+
|
|
518
|
+
NSNumber *t = timeoutMs ?: @(2500);
|
|
519
|
+
if (t.doubleValue <= 0) t = @(2500);
|
|
520
|
+
|
|
521
|
+
[self.stt unPauseMicrophoneAndWait:t completion:^(BOOL ok, NSString * _Nullable reason) {
|
|
522
|
+
resolve(@{@"ok": @(ok), @"reason": reason ?: @""});
|
|
523
|
+
}];
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// ADD — pause mic
|
|
527
|
+
RCT_EXPORT_METHOD(pauseMicrophone:(RCTResponseSenderBlock)callback)
|
|
528
|
+
{
|
|
529
|
+
if (!self.stt) { if (callback) callback(@[@(NO)]); return; }
|
|
530
|
+
[self.stt pauseMicrophone]; // ← no colon
|
|
531
|
+
if (callback) callback(@[@(YES)]);
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
// ADD — unpause mic
|
|
536
|
+
RCT_EXPORT_METHOD(unPauseMicrophone:(RCTResponseSenderBlock)callback)
|
|
537
|
+
{
|
|
538
|
+
if (!self.stt) { if (callback) callback(@[@(NO)]); return; }
|
|
539
|
+
[self.stt unPauseMicrophone]; // ← no colon
|
|
540
|
+
if (callback) callback(@[@(YES)]);
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
RCT_EXPORT_METHOD(destroyAll:(RCTPromiseResolveBlock)resolve
|
|
545
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
546
|
+
{
|
|
547
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
548
|
+
if (!self.initialized && !self.initializing) {
|
|
549
|
+
resolve(@"already_destroyed");
|
|
550
|
+
return;
|
|
551
|
+
}
|
|
552
|
+
// prevent re-entry during destroy
|
|
553
|
+
self.initializing = YES;
|
|
554
|
+
|
|
555
|
+
// Destroy in reverse order: TTS -> STT
|
|
556
|
+
@try { [self.tts stopSpeaking]; [self.tts destroy]; } @catch (__unused id e) {}
|
|
557
|
+
self.tts = nil;
|
|
558
|
+
|
|
559
|
+
@try { [self.stt destroySpeech:nil]; } @catch (__unused id e) {}
|
|
560
|
+
self.stt = nil;
|
|
561
|
+
|
|
562
|
+
self.initialized = NO;
|
|
563
|
+
self.initializing = NO;
|
|
564
|
+
resolve(@"destroyed");
|
|
565
|
+
});
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
#pragma mark - Convenience passthroughs (optional)
|
|
569
|
+
|
|
570
|
+
RCT_EXPORT_METHOD(startSpeech:(NSString *)locale
|
|
571
|
+
callback:(RCTResponseSenderBlock)callback)
|
|
572
|
+
{
|
|
573
|
+
[self ensureSTT];
|
|
574
|
+
[self.stt startSpeechWithLocaleStr:locale];
|
|
575
|
+
if (callback) callback(@[@(NO)]);
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
RCT_EXPORT_METHOD(startSpeechWithSVOnboardingJson:(NSString *)locale
|
|
579
|
+
onboardingJsonPath:(NSString *)onboardingJsonPath
|
|
580
|
+
callback:(RCTResponseSenderBlock)callback)
|
|
581
|
+
{
|
|
582
|
+
[self ensureSTT];
|
|
583
|
+
if (onboardingJsonPath && (id)onboardingJsonPath != [NSNull null] && onboardingJsonPath.length > 0) {
|
|
584
|
+
[self.stt startSpeechWithLocaleStr:locale onboardingJsonPath:onboardingJsonPath];
|
|
585
|
+
} else {
|
|
586
|
+
[self.stt startSpeechWithLocaleStr:locale];
|
|
587
|
+
}
|
|
588
|
+
if (callback) callback(@[@(NO)]);
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
RCT_EXPORT_METHOD(stopSpeech:(RCTResponseSenderBlock)callback)
|
|
592
|
+
{
|
|
593
|
+
if (!self.stt) { if (callback) callback(@[@(NO)]); return; }
|
|
594
|
+
[self.stt stopSpeech:^(BOOL ok) { if (callback) callback(@[@(NO)]); }];
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
RCT_EXPORT_METHOD(cancelSpeech:(RCTResponseSenderBlock)callback)
|
|
598
|
+
{
|
|
599
|
+
if (!self.stt) { if (callback) callback(@[@(NO)]); return; }
|
|
600
|
+
[self.stt cancelSpeech:^(BOOL ok) { if (callback) callback(@[@(NO)]); }];
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
RCT_EXPORT_METHOD(isSpeechAvailable:(RCTResponseSenderBlock)callback)
|
|
604
|
+
{
|
|
605
|
+
[self ensureSTT];
|
|
606
|
+
[self.stt isSpeechAvailable:^(BOOL ok){
|
|
607
|
+
if (callback) callback(@[@(ok ? 1 : 0), [NSNull null]]);
|
|
608
|
+
}];
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
RCT_EXPORT_METHOD(isRecognizing:(RCTResponseSenderBlock)callback)
|
|
612
|
+
{
|
|
613
|
+
BOOL running = self.stt ? [self.stt isRecognizing] : NO;
|
|
614
|
+
if (callback) callback(@[@(running ? 1 : 0)]);
|
|
615
|
+
}
|
|
616
|
+
|
|
617
|
+
RCT_EXPORT_METHOD(speak:(NSString *)text
|
|
618
|
+
speakerId:(nonnull NSNumber *)speakerId
|
|
619
|
+
speed:(nonnull NSNumber *)speed
|
|
620
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
621
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
622
|
+
{
|
|
623
|
+
if (!self.tts) { reject(@"no_tts", @"Call initAll first", nil); return; }
|
|
624
|
+
|
|
625
|
+
float s = speed ? speed.floatValue : 1.0f;
|
|
626
|
+
if (!isfinite(s) || s <= 0.0f) s = 1.0f;
|
|
627
|
+
|
|
628
|
+
// NOTE: core native not changed yet; we'll actually apply speed in the Swift core next.
|
|
629
|
+
// Always forward 3 args (text, sid, speed)
|
|
630
|
+
[self.tts speak:text sid:speakerId.intValue speed:s];
|
|
631
|
+
resolve(@"Speaking");
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
RCT_EXPORT_METHOD(stopSpeaking:(RCTPromiseResolveBlock)resolve
|
|
635
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
636
|
+
{
|
|
637
|
+
if (!self.tts) { reject(@"no_tts", @"Call initAll first", nil); return; }
|
|
638
|
+
[self.tts stopSpeaking];
|
|
639
|
+
resolve(@"Stopped");
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
/// playWav(pathOrURL: string, markAsLast?: boolean)
|
|
643
|
+
RCT_EXPORT_METHOD(playWav:(NSString *)pathOrURL
|
|
644
|
+
markAsLast:(nonnull NSNumber *)markAsLast
|
|
645
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
646
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
647
|
+
{
|
|
648
|
+
if (!self.tts) {
|
|
649
|
+
reject(@"no_tts", @"Call initAll first", nil);
|
|
650
|
+
return;
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
if (pathOrURL == nil || pathOrURL.length == 0) {
|
|
654
|
+
reject(@"bad_path", @"Empty pathOrURL", nil);
|
|
655
|
+
return;
|
|
656
|
+
}
|
|
657
|
+
|
|
658
|
+
NSURL *fileURL = nil;
|
|
659
|
+
|
|
660
|
+
// 1️⃣ Handle http(s) URLs — download to temporary file first
|
|
661
|
+
if ([pathOrURL hasPrefix:@"http://"] || [pathOrURL hasPrefix:@"https://"]) {
|
|
662
|
+
NSLog(@"[TTS] Downloading asset from URL: %@", pathOrURL);
|
|
663
|
+
NSURL *remoteURL = [NSURL URLWithString:pathOrURL];
|
|
664
|
+
if (!remoteURL) {
|
|
665
|
+
reject(@"bad_url", @"Invalid remote URL", nil);
|
|
666
|
+
return;
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
NSData *data = [NSData dataWithContentsOfURL:remoteURL];
|
|
670
|
+
if (!data) {
|
|
671
|
+
reject(@"download_failed", @"Failed to download remote asset", nil);
|
|
672
|
+
return;
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
NSString *tempName = [NSString stringWithFormat:@"rn_asset_%f.wav", [[NSDate date] timeIntervalSince1970]];
|
|
676
|
+
NSString *tempPath = [NSTemporaryDirectory() stringByAppendingPathComponent:tempName];
|
|
677
|
+
if (![data writeToFile:tempPath atomically:YES]) {
|
|
678
|
+
reject(@"write_failed", @"Failed to write temporary file", nil);
|
|
679
|
+
return;
|
|
680
|
+
}
|
|
681
|
+
fileURL = [NSURL fileURLWithPath:tempPath];
|
|
682
|
+
NSLog(@"[TTS] Downloaded to temp file: %@", tempPath);
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
// 2️⃣ Handle bundled asset:/ paths (copied from main bundle)
|
|
686
|
+
else if ([pathOrURL hasPrefix:@"asset:/"]) {
|
|
687
|
+
NSString *assetName = [pathOrURL stringByReplacingOccurrencesOfString:@"asset:/" withString:@""];
|
|
688
|
+
NSLog(@"[TTS] Detected bundled asset: %@", assetName);
|
|
689
|
+
NSString *bundlePath = [[NSBundle mainBundle] pathForResource:[assetName stringByDeletingPathExtension]
|
|
690
|
+
ofType:[assetName pathExtension]];
|
|
691
|
+
if (!bundlePath) {
|
|
692
|
+
reject(@"asset_missing", [NSString stringWithFormat:@"Asset not found in bundle: %@", assetName], nil);
|
|
693
|
+
return;
|
|
694
|
+
}
|
|
695
|
+
// Copy to temp file so we have a writable/accessible URL
|
|
696
|
+
NSString *tempName = [NSString stringWithFormat:@"asset_%f.wav", [[NSDate date] timeIntervalSince1970]];
|
|
697
|
+
NSString *tempPath = [NSTemporaryDirectory() stringByAppendingPathComponent:tempName];
|
|
698
|
+
NSError *copyError = nil;
|
|
699
|
+
[[NSFileManager defaultManager] copyItemAtPath:bundlePath toPath:tempPath error:©Error];
|
|
700
|
+
if (copyError) {
|
|
701
|
+
reject(@"asset_copy_failed", copyError.localizedDescription, copyError);
|
|
702
|
+
return;
|
|
703
|
+
}
|
|
704
|
+
fileURL = [NSURL fileURLWithPath:tempPath];
|
|
705
|
+
NSLog(@"[TTS] Copied bundled asset to temp: %@", tempPath);
|
|
706
|
+
}
|
|
707
|
+
|
|
708
|
+
// 3️⃣ Handle file:// URLs
|
|
709
|
+
else if ([pathOrURL hasPrefix:@"file://"]) {
|
|
710
|
+
fileURL = [NSURL URLWithString:pathOrURL];
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
// 4️⃣ Otherwise assume direct local path
|
|
714
|
+
else {
|
|
715
|
+
fileURL = [NSURL fileURLWithPath:pathOrURL];
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
// 5️⃣ Verify existence
|
|
719
|
+
if (!fileURL || ![[NSFileManager defaultManager] fileExistsAtPath:fileURL.path]) {
|
|
720
|
+
reject(@"file_missing", [NSString stringWithFormat:@"File missing: %@", fileURL.path], nil);
|
|
721
|
+
return;
|
|
722
|
+
}
|
|
723
|
+
|
|
724
|
+
// 6️⃣ Play through TTS engine (queued)
|
|
725
|
+
NSLog(@"[TTS] Playing file via DaVoiceTTS: %@", fileURL.path);
|
|
726
|
+
[self.tts playWav:fileURL markAsLastUtterance:markAsLast.boolValue];
|
|
727
|
+
resolve(@"queued");
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
/// playBuffer(desc: { base64, sampleRate, channels?, interleaved?, format: "i16" | "f32", markAsLast? })
|
|
731
|
+
RCT_EXPORT_METHOD(playBuffer:(NSDictionary *)desc
|
|
732
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
733
|
+
rejecter:(RCTPromiseRejectBlock)reject)
|
|
734
|
+
{
|
|
735
|
+
if (!self.tts) { reject(@"no_tts", @"Call initAll first", nil); return; }
|
|
736
|
+
|
|
737
|
+
// Validate inputs
|
|
738
|
+
NSString *b64 = [RCTConvert NSString:desc[@"base64"]];
|
|
739
|
+
NSNumber *srN = [RCTConvert NSNumber:desc[@"sampleRate"]];
|
|
740
|
+
NSString *fmt = [RCTConvert NSString:desc[@"format"]];
|
|
741
|
+
NSNumber *chN = desc[@"channels"] ? [RCTConvert NSNumber:desc[@"channels"]] : @(1);
|
|
742
|
+
NSNumber *ilN = desc[@"interleaved"] ? [RCTConvert NSNumber:desc[@"interleaved"]] : @(YES);
|
|
743
|
+
NSNumber *markLastN = desc[@"markAsLast"] ? [RCTConvert NSNumber:desc[@"markAsLast"]] : @(YES);
|
|
744
|
+
|
|
745
|
+
if (!b64 || !srN || !fmt) {
|
|
746
|
+
reject(@"invalid_args", @"Missing one of base64/sampleRate/format", nil);
|
|
747
|
+
return;
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
NSData *raw = SB_Base64Decode(b64);
|
|
751
|
+
if (!raw) { reject(@"bad_base64", @"Could not decode base64 payload", nil); return; }
|
|
752
|
+
|
|
753
|
+
AVAudioPCMBuffer *buf = SB_MakeMonoF32Buffer(raw, srN.doubleValue, fmt, chN.integerValue, ilN.boolValue);
|
|
754
|
+
if (!buf) { reject(@"bad_buffer", @"Unsupported PCM layout or empty data", nil); return; }
|
|
755
|
+
|
|
756
|
+
// Hand over to DaVoiceTTS (it will resample/normalize and enqueue via AEC graph)
|
|
757
|
+
[self.tts playBuffer:buf markAsLastUtterance:markLastN.boolValue];
|
|
758
|
+
resolve(@"queued");
|
|
759
|
+
}
|
|
760
|
+
|
|
761
|
+
@end
|