react-native-davoice 1.0.9 → 1.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. package/TTSRNBridge.podspec +1 -1
  2. package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar +0 -0
  3. package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar.md5 +1 -1
  4. package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar.sha1 +1 -1
  5. package/ios/SpeechBridge/SpeechBridge.m +134 -28
  6. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/DavoiceTTS +0 -0
  7. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Headers/DavoiceTTS-Swift.h +2 -0
  8. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.abi.json +5201 -5055
  9. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.private.swiftinterface +14 -12
  10. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.swiftinterface +14 -12
  11. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/DavoiceTTS +0 -0
  12. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Headers/DavoiceTTS-Swift.h +4 -0
  13. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.abi.json +8439 -8293
  14. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.private.swiftinterface +82 -80
  15. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftinterface +82 -80
  16. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.abi.json +8439 -8293
  17. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.private.swiftinterface +82 -80
  18. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftinterface +82 -80
  19. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeDirectory +0 -0
  20. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeRequirements-1 +0 -0
  21. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeResources +27 -27
  22. package/package.json +1 -1
  23. package/speech/index.ts +31 -8
@@ -2,7 +2,7 @@ require 'json'
2
2
 
3
3
  Pod::Spec.new do |s|
4
4
  s.name = "TTSRNBridge"
5
- s.version = "1.0.192" # Update to your package version
5
+ s.version = "1.0.201" # Update to your package version
6
6
  s.summary = "TTS for React Native."
7
7
  s.description = <<-DESC
8
8
  A React Native module for tts .
@@ -1 +1 @@
1
- 4dcec1844fb7ccbbe7cc17176ace80ef tts-1.0.0.aar
1
+ 42672253290af681c13e0c48a8ea05dd tts-1.0.0.aar
@@ -1 +1 @@
1
- f077da0fe66163982b6bd9114e106f015fbdb95e tts-1.0.0.aar
1
+ db50ab0dec3bc798242d72f812bf5d7b4f87ed0d tts-1.0.0.aar
@@ -25,14 +25,6 @@ static BOOL SBHasSpeechRecognitionPermission(void) {
25
25
  return status == SFSpeechRecognizerAuthorizationStatusAuthorized;
26
26
  }
27
27
 
28
- static void SBLog(NSString *format, ...) {
29
- va_list args;
30
- va_start(args, format);
31
- NSString *msg = [[NSString alloc] initWithFormat:format arguments:args];
32
- va_end(args);
33
- NSLog(@"[SpeechBridge] %@", msg);
34
- }
35
-
36
28
  // Make a mono Float32 AVAudioPCMBuffer from raw PCM payload (i16 or f32).
37
29
  // We accept either interleaved or non-interleaved input and mixdown to mono
38
30
  // (DaVoiceTTS.playBuffer will resample / normalize as needed).
@@ -255,6 +247,36 @@ RCT_EXPORT_MODULE(SpeechBridge)
255
247
  };
256
248
  }
257
249
 
250
+ - (void)waitForSTTEngineHotWithTimeoutMs:(NSNumber *)timeoutMs
251
+ completion:(void (^)(BOOL ok))completion
252
+ {
253
+ NSNumber *t = timeoutMs ?: @(2500);
254
+ if (t.doubleValue <= 0) t = @(2500);
255
+
256
+ CFTimeInterval deadline = CACurrentMediaTime() + MAX(0.1, t.doubleValue / 1000.0);
257
+
258
+ __weak typeof(self) weakSelf = self;
259
+ __block void (^poll)(void) = ^{
260
+ __strong typeof(weakSelf) strongSelf = weakSelf;
261
+ if (!strongSelf) {
262
+ completion(NO);
263
+ return;
264
+ }
265
+ if (strongSelf.sttEngineHot) {
266
+ completion(YES);
267
+ return;
268
+ }
269
+ if (CACurrentMediaTime() >= deadline) {
270
+ completion(NO);
271
+ return;
272
+ }
273
+ dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.05 * NSEC_PER_SEC)),
274
+ dispatch_get_main_queue(), poll);
275
+ };
276
+
277
+ dispatch_async(dispatch_get_main_queue(), poll);
278
+ }
279
+
258
280
  RCT_EXPORT_METHOD(hasMicPermissions:(RCTPromiseResolveBlock)resolve
259
281
  rejecter:(RCTPromiseRejectBlock)reject)
260
282
  {
@@ -391,10 +413,12 @@ RCT_EXPORT_METHOD(initAll:(NSDictionary *)opts
391
413
  if (self.initialized) { resolve(@"already_initialized"); return; }
392
414
 
393
415
  self.initializing = YES;
416
+ self.sttEngineHot = NO;
394
417
 
395
418
  NSString *locale = opts[@"locale"] ?: @"en-US";
396
419
  NSString *onboardingJsonPath = opts[@"onboardingJsonPath"];
397
420
  NSString *modelPath = opts[@"model"];
421
+ NSNumber *timeoutMs = opts[@"timeoutMs"];
398
422
  if (modelPath.length == 0) {
399
423
  self.initializing = NO;
400
424
  reject(@"invalid_args", @"Missing 'model' in initAll()", nil);
@@ -469,6 +493,7 @@ RCT_EXPORT_METHOD(initAll:(NSDictionary *)opts
469
493
  dispatch_async(dispatch_get_main_queue(), ^{
470
494
  if (err || !tts) {
471
495
  self.initializing = NO;
496
+ self.sttEngineHot = NO;
472
497
  [self.stt destroySpeech:nil];
473
498
  self.stt = nil;
474
499
  reject(@"tts_init_failed", err.localizedDescription ?: @"TTS init failed", err);
@@ -477,10 +502,16 @@ RCT_EXPORT_METHOD(initAll:(NSDictionary *)opts
477
502
 
478
503
  self.tts = tts;
479
504
  [self wireTTSFinishedCallback];
480
-
481
- self.initialized = YES;
482
- self.initializing = NO;
483
- resolve(@"initialized");
505
+ [self waitForSTTEngineHotWithTimeoutMs:timeoutMs completion:^(BOOL ok) {
506
+ if (!ok) {
507
+ self.initializing = NO;
508
+ reject(@"stt_init_timeout", @"STT did not become ready before timeout", nil);
509
+ return;
510
+ }
511
+ self.initialized = YES;
512
+ self.initializing = NO;
513
+ resolve(@"initialized");
514
+ }];
484
515
  });
485
516
  });
486
517
  });
@@ -496,37 +527,70 @@ RCT_EXPORT_METHOD(pauseSpeechRecognitionLite:(RCTResponseSenderBlock)callback)
496
527
  if (callback) callback(@[@(YES)]);
497
528
  }
498
529
 
530
+ RCT_EXPORT_METHOD(pauseSpeechRecognitionLiteAsync:(nonnull NSNumber *)timeoutMs
531
+ resolver:(RCTPromiseResolveBlock)resolve
532
+ rejecter:(RCTPromiseRejectBlock)reject)
533
+ {
534
+ if (!self.stt) { resolve(@{@"ok": @(YES), @"reason": @""}); return; }
535
+
536
+ NSNumber *t = timeoutMs ?: @(1500);
537
+ if (t.doubleValue <= 0) t = @(1500);
538
+
539
+ if ([(id)self.stt respondsToSelector:@selector(pauseSpeechRecognitionLiteAndWait:completion:)]) {
540
+ [(id)self.stt pauseSpeechRecognitionLiteAndWait:t completion:^(BOOL ok, NSString * _Nullable reason) {
541
+ resolve(@{@"ok": @(ok), @"reason": reason ?: @""});
542
+ }];
543
+ return;
544
+ }
545
+
546
+ [self.stt pauseSpeechRecognitionLite];
547
+ resolve(@{@"ok": @(YES), @"reason": @"bridge_fallback_no_wait_method"});
548
+ }
549
+
499
550
  RCT_EXPORT_METHOD(unPauseSpeechRecognitionLite:(nonnull NSNumber *)times
500
551
  callback:(RCTResponseSenderBlock)callback)
501
552
  {
502
- SBLog(@"unPauseSpeechRecognitionLite ENTER times=%@", times);
503
553
  // No-op if STT isn't initialized yet (avoid JS error spam).
504
- if (!self.stt) {
505
- SBLog(@"unPauseSpeechRecognitionLite EXIT no STT instance");
506
- if (callback) callback(@[@(YES)]);
507
- return;
508
- }
554
+ if (!self.stt) { if (callback) callback(@[@(YES)]); return; }
509
555
  [self.stt unPauseSpeechRecognitionLite:times];
510
- SBLog(@"unPauseSpeechRecognitionLite AFTER Swift call times=%@", times);
511
556
  if (callback) callback(@[@(YES)]);
512
- SBLog(@"unPauseSpeechRecognitionLite CALLBACK fired times=%@", times);
513
557
  }
514
558
 
515
559
  RCT_EXPORT_METHOD(unPauseSpeechRecognitionLiteWithPreFetch:(nonnull NSNumber *)times
516
560
  preFetchMs:(nonnull NSNumber *)preFetch
517
561
  callback:(RCTResponseSenderBlock)callback)
518
562
  {
519
- SBLog(@"unPauseSpeechRecognitionLiteWithPreFetch ENTER times=%@ preFetchMs=%@", times, preFetch);
520
563
  // No-op if STT isn't initialized yet (avoid JS error spam).
521
- if (!self.stt) {
522
- SBLog(@"unPauseSpeechRecognitionLiteWithPreFetch EXIT no STT instance");
523
- if (callback) callback(@[@(YES)]);
524
- return;
525
- }
564
+ if (!self.stt) { if (callback) callback(@[@(YES)]); return; }
526
565
  [self.stt unPauseSpeechRecognitionLite:times preFetch:preFetch];
527
- SBLog(@"unPauseSpeechRecognitionLiteWithPreFetch AFTER Swift call times=%@ preFetchMs=%@", times, preFetch);
528
566
  if (callback) callback(@[@(YES)]);
529
- SBLog(@"unPauseSpeechRecognitionLiteWithPreFetch CALLBACK fired times=%@ preFetchMs=%@", times, preFetch);
567
+ }
568
+
569
+ RCT_EXPORT_METHOD(unPauseSpeechRecognitionLiteAsync:(nonnull NSNumber *)times
570
+ preFetchMs:(nonnull NSNumber *)preFetch
571
+ timeoutMs:(nonnull NSNumber *)timeoutMs
572
+ resolver:(RCTPromiseResolveBlock)resolve
573
+ rejecter:(RCTPromiseRejectBlock)reject)
574
+ {
575
+ if (!self.stt) { resolve(@{@"ok": @(YES), @"reason": @""}); return; }
576
+
577
+ NSNumber *t = timeoutMs ?: @(2500);
578
+ if (t.doubleValue <= 0) t = @(2500);
579
+ NSNumber *pf = preFetch ?: @(0);
580
+ if (pf.doubleValue < 0) pf = @(0);
581
+
582
+ if ([(id)self.stt respondsToSelector:@selector(unPauseSpeechRecognitionLiteAndWait:preFetch:timeoutMs:completion:)]) {
583
+ [(id)self.stt unPauseSpeechRecognitionLiteAndWait:times
584
+ preFetch:pf
585
+ timeoutMs:t
586
+ completion:^(BOOL ok, NSString * _Nullable reason) {
587
+ resolve(@{@"ok": @(ok), @"reason": reason ?: @""});
588
+ }];
589
+ return;
590
+ }
591
+
592
+ [self.stt unPauseSpeechRecognitionLite:times preFetch:pf];
593
+ resolve(@{@"ok": @(YES), @"reason": @"bridge_fallback_no_wait_method"});
530
594
  }
531
595
 
532
596
  // Promise-based pause that resolves ONLY when iOS is actually settled in playback (mic released)
@@ -597,6 +661,7 @@ RCT_EXPORT_METHOD(destroyAll:(RCTPromiseResolveBlock)resolve
597
661
  @try { [self.stt destroySpeech:nil]; } @catch (__unused id e) {}
598
662
  self.stt = nil;
599
663
 
664
+ self.sttEngineHot = NO;
600
665
  self.initialized = NO;
601
666
  self.initializing = NO;
602
667
  resolve(@"destroyed");
@@ -609,15 +674,34 @@ RCT_EXPORT_METHOD(startSpeech:(NSString *)locale
609
674
  callback:(RCTResponseSenderBlock)callback)
610
675
  {
611
676
  [self ensureSTT];
677
+ self.sttEngineHot = NO;
612
678
  [self.stt startSpeechWithLocaleStr:locale];
613
679
  if (callback) callback(@[@(NO)]);
614
680
  }
615
681
 
682
+ RCT_EXPORT_METHOD(startSpeechAsync:(NSString *)locale
683
+ timeoutMs:(nonnull NSNumber *)timeoutMs
684
+ resolver:(RCTPromiseResolveBlock)resolve
685
+ rejecter:(RCTPromiseRejectBlock)reject)
686
+ {
687
+ [self ensureSTT];
688
+ self.sttEngineHot = NO;
689
+ [self.stt startSpeechWithLocaleStr:locale];
690
+ [self waitForSTTEngineHotWithTimeoutMs:timeoutMs completion:^(BOOL ok) {
691
+ if (!ok) {
692
+ reject(@"stt_start_timeout", @"STT did not become ready before timeout", nil);
693
+ return;
694
+ }
695
+ resolve(@{@"ok": @(YES)});
696
+ }];
697
+ }
698
+
616
699
  RCT_EXPORT_METHOD(startSpeechWithSVOnboardingJson:(NSString *)locale
617
700
  onboardingJsonPath:(NSString *)onboardingJsonPath
618
701
  callback:(RCTResponseSenderBlock)callback)
619
702
  {
620
703
  [self ensureSTT];
704
+ self.sttEngineHot = NO;
621
705
  if (onboardingJsonPath && (id)onboardingJsonPath != [NSNull null] && onboardingJsonPath.length > 0) {
622
706
  [self.stt startSpeechWithLocaleStr:locale onboardingJsonPath:onboardingJsonPath];
623
707
  } else {
@@ -626,6 +710,28 @@ RCT_EXPORT_METHOD(startSpeechWithSVOnboardingJson:(NSString *)locale
626
710
  if (callback) callback(@[@(NO)]);
627
711
  }
628
712
 
713
+ RCT_EXPORT_METHOD(startSpeechWithSVOnboardingJsonAsync:(NSString *)locale
714
+ onboardingJsonPath:(NSString *)onboardingJsonPath
715
+ timeoutMs:(nonnull NSNumber *)timeoutMs
716
+ resolver:(RCTPromiseResolveBlock)resolve
717
+ rejecter:(RCTPromiseRejectBlock)reject)
718
+ {
719
+ [self ensureSTT];
720
+ self.sttEngineHot = NO;
721
+ if (onboardingJsonPath && (id)onboardingJsonPath != [NSNull null] && onboardingJsonPath.length > 0) {
722
+ [self.stt startSpeechWithLocaleStr:locale onboardingJsonPath:onboardingJsonPath];
723
+ } else {
724
+ [self.stt startSpeechWithLocaleStr:locale];
725
+ }
726
+ [self waitForSTTEngineHotWithTimeoutMs:timeoutMs completion:^(BOOL ok) {
727
+ if (!ok) {
728
+ reject(@"stt_start_timeout", @"STT did not become ready before timeout", nil);
729
+ return;
730
+ }
731
+ resolve(@{@"ok": @(YES)});
732
+ }];
733
+ }
734
+
629
735
  RCT_EXPORT_METHOD(stopSpeech:(RCTResponseSenderBlock)callback)
630
736
  {
631
737
  if (!self.stt) { if (callback) callback(@[@(NO)]); return; }
@@ -361,8 +361,10 @@ SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, copy) NSArray<NSStri
361
361
  + (NSArray<NSString *> * _Nonnull)supportedEvents SWIFT_WARN_UNUSED_RESULT;
362
362
  - (BOOL)setLicenseWithLicenseKey:(NSString * _Nonnull)licenseKey SWIFT_WARN_UNUSED_RESULT;
363
363
  - (void)pauseSpeechRecognitionLite;
364
+ - (void)pauseSpeechRecognitionLiteAndWait:(NSNumber * _Nonnull)timeoutMs completion:(void (^ _Nonnull)(BOOL, NSString * _Nullable))completion;
364
365
  - (void)unPauseSpeechRecognitionLite:(NSNumber * _Nonnull)times;
365
366
  - (void)unPauseSpeechRecognitionLite:(NSNumber * _Nonnull)times preFetch:(NSNumber * _Nonnull)preFetch;
367
+ - (void)unPauseSpeechRecognitionLiteAndWait:(NSNumber * _Nonnull)times preFetch:(NSNumber * _Nonnull)preFetch timeoutMs:(NSNumber * _Nonnull)timeoutMs completion:(void (^ _Nonnull)(BOOL, NSString * _Nullable))completion;
366
368
  - (void)pauseMicrophoneAndWait:(NSNumber * _Nonnull)timeoutMs completion:(void (^ _Nonnull)(BOOL, NSString * _Nullable))completion;
367
369
  - (void)unPauseMicrophoneAndWait:(NSNumber * _Nonnull)timeoutMs completion:(void (^ _Nonnull)(BOOL, NSString * _Nullable))completion;
368
370
  - (void)pauseMicrophone;