react-native-davoice 1.0.12 → 1.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. package/TTSRNBridge.podspec +1 -1
  2. package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar +0 -0
  3. package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar.md5 +1 -1
  4. package/android/libs/com/davoice/tts/1.0.0/tts-1.0.0.aar.sha1 +1 -1
  5. package/android/src/main/java/com/davoice/stt/rn/STTModule.kt +35 -12
  6. package/ios/SpeechBridge/SpeechBridge.m +134 -1
  7. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/DavoiceTTS +0 -0
  8. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.abi.json +7527 -7457
  9. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.private.swiftinterface +55 -55
  10. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.swiftinterface +55 -55
  11. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/DavoiceTTS +0 -0
  12. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.abi.json +8794 -8724
  13. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.private.swiftinterface +56 -56
  14. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftinterface +56 -56
  15. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.abi.json +8794 -8724
  16. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.private.swiftinterface +56 -56
  17. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftinterface +56 -56
  18. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeDirectory +0 -0
  19. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeRequirements-1 +0 -0
  20. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeResources +24 -24
  21. package/package.json +1 -1
  22. package/speech/index.ts +119 -2
@@ -2,7 +2,7 @@ require 'json'
2
2
 
3
3
  Pod::Spec.new do |s|
4
4
  s.name = "TTSRNBridge"
5
- s.version = "1.0.202" # Update to your package version
5
+ s.version = "1.0.210" # Update to your package version
6
6
  s.summary = "TTS for React Native."
7
7
  s.description = <<-DESC
8
8
  A React Native module for tts .
@@ -1 +1 @@
1
- 42672253290af681c13e0c48a8ea05dd tts-1.0.0.aar
1
+ 34d5e94e94ed16ab142e04079aec0966 tts-1.0.0.aar
@@ -1 +1 @@
1
- db50ab0dec3bc798242d72f812bf5d7b4f87ed0d tts-1.0.0.aar
1
+ f889edd5126c11cdfebd92848e74fad33ed38345 tts-1.0.0.aar
@@ -35,6 +35,21 @@ class STTModule(private val rc: ReactApplicationContext)
35
35
  return stt!!
36
36
  }
37
37
 
38
+ private fun readOnboardingJson(options: ReadableMap?): String? {
39
+ var onboardingJson: String? = null
40
+ try {
41
+ if (options != null && options.hasKey("onboardingJsonPath") && !options.isNull("onboardingJsonPath")) {
42
+ val p = options.getString("onboardingJsonPath")
43
+ if (!p.isNullOrBlank()) {
44
+ onboardingJson = java.io.File(p).readText(Charsets.UTF_8)
45
+ }
46
+ }
47
+ } catch (_: Throwable) {
48
+ onboardingJson = null
49
+ }
50
+ return onboardingJson
51
+ }
52
+
38
53
  // ===== JS API =====
39
54
 
40
55
  @ReactMethod
@@ -66,22 +81,18 @@ class STTModule(private val rc: ReactApplicationContext)
66
81
  // ANDROID expects (locale, options, callback) like @react-native-voice
67
82
  @ReactMethod
68
83
  fun startSpeech(locale: String?, options: ReadableMap, cb: Callback) {
69
- var onboardingJson: String? = null
70
- try {
71
- if (options.hasKey("onboardingJsonPath") && !options.isNull("onboardingJsonPath")) {
72
- val p = options.getString("onboardingJsonPath")
73
- if (!p.isNullOrBlank()) {
74
- onboardingJson = java.io.File(p).readText(Charsets.UTF_8)
75
- }
76
- }
77
- } catch (_: Throwable) {
78
- onboardingJson = null
79
- }
80
-
84
+ val onboardingJson = readOnboardingJson(options)
81
85
  ensure().startSpeech(locale, onboardingJson, null)
82
86
  cb.invoke(false) // parity with voice: callback(false) on success
83
87
  }
84
88
 
89
+ @ReactMethod
90
+ fun initWithoutModel(locale: String?, options: ReadableMap?, cb: Callback) {
91
+ val onboardingJson = readOnboardingJson(options)
92
+ ensure().startSpeech(locale, onboardingJson, null)
93
+ cb.invoke(false)
94
+ }
95
+
85
96
  @ReactMethod
86
97
  fun stopSpeech(cb: Callback) {
87
98
  stt?.stopSpeech()
@@ -101,6 +112,18 @@ class STTModule(private val rc: ReactApplicationContext)
101
112
  cb.invoke(false)
102
113
  }
103
114
 
115
+ @ReactMethod
116
+ fun destroyWihtouModel(cb: Callback) {
117
+ stt?.destroySpeech()
118
+ stt = null
119
+ cb.invoke(false)
120
+ }
121
+
122
+ @ReactMethod
123
+ fun destroyWithoutModel(cb: Callback) {
124
+ destroyWihtouModel(cb)
125
+ }
126
+
104
127
  @ReactMethod
105
128
  fun isSpeechAvailable(p: Promise) {
106
129
  STT.isSpeechAvailable(rc) { ok -> p.resolve(ok) }
@@ -25,6 +25,21 @@ static BOOL SBHasSpeechRecognitionPermission(void) {
25
25
  return status == SFSpeechRecognizerAuthorizationStatusAuthorized;
26
26
  }
27
27
 
28
+ static NSString *SBNowString(void) {
29
+ static NSDateFormatter *formatter = nil;
30
+ static dispatch_once_t onceToken;
31
+ dispatch_once(&onceToken, ^{
32
+ formatter = [NSDateFormatter new];
33
+ formatter.locale = [NSLocale localeWithLocaleIdentifier:@"en_US_POSIX"];
34
+ formatter.dateFormat = @"yyyy-MM-dd'T'HH:mm:ss.SSSZZZZZ";
35
+ });
36
+ return [formatter stringFromDate:[NSDate date]];
37
+ }
38
+
39
+ static NSString *SBBoolString(BOOL value) {
40
+ return value ? @"YES" : @"NO";
41
+ }
42
+
28
43
  // Make a mono Float32 AVAudioPCMBuffer from raw PCM payload (i16 or f32).
29
44
  // We accept either interleaved or non-interleaved input and mixdown to mono
30
45
  // (DaVoiceTTS.playBuffer will resample / normalize as needed).
@@ -119,6 +134,8 @@ static AVAudioPCMBuffer *SB_MakeMonoF32Buffer(NSData *raw,
119
134
 
120
135
  // used only to gate TTS init until STT engine is “hot”
121
136
  @property (atomic, assign) BOOL sttEngineHot;
137
+ // retained so we can re-license a fresh STT instance after destroyAll
138
+ @property (nonatomic, copy, nullable) NSString *lastLicenseKey;
122
139
  @end
123
140
 
124
141
  @implementation SpeechBridge
@@ -228,6 +245,9 @@ RCT_EXPORT_MODULE(SpeechBridge)
228
245
  if (!self.stt) {
229
246
  self.stt = [STT new];
230
247
  self.stt.delegate = self;
248
+ if (self.lastLicenseKey) {
249
+ [self.stt setLicenseWithLicenseKey:self.lastLicenseKey];
250
+ }
231
251
  }
232
252
  }
233
253
 
@@ -381,6 +401,7 @@ RCT_EXPORT_METHOD(setLicense:(NSString *)licenseKey
381
401
  }
382
402
 
383
403
  dispatch_async(dispatch_get_main_queue(), ^{
404
+ self.lastLicenseKey = licenseKey;
384
405
  BOOL ttsOk = [DaVoiceTTS activateLicenseWithLicenseKey:licenseKey];
385
406
  [self ensureSTT];
386
407
  BOOL sttOk = [self.stt setLicenseWithLicenseKey:licenseKey];
@@ -429,6 +450,9 @@ RCT_EXPORT_METHOD(initAll:(NSDictionary *)opts
429
450
  if (!self.stt) {
430
451
  self.stt = [STT new];
431
452
  self.stt.delegate = self;
453
+ if (self.lastLicenseKey) {
454
+ [self.stt setLicenseWithLicenseKey:self.lastLicenseKey];
455
+ }
432
456
  }
433
457
  if (onboardingJsonPath && (id)onboardingJsonPath != [NSNull null] && onboardingJsonPath.length > 0) {
434
458
  [self.stt startSpeechWithLocaleStr:locale onboardingJsonPath:onboardingJsonPath];
@@ -486,6 +510,11 @@ RCT_EXPORT_METHOD(initAll:(NSDictionary *)opts
486
510
  }
487
511
  NSLog(@"[TTS] INIT: modelURL == %@", modelURL);
488
512
 
513
+ if (self.tts) {
514
+ @try { [self.tts stopSpeaking]; [self.tts destroy]; } @catch (__unused id e) {}
515
+ self.tts = nil;
516
+ }
517
+
489
518
  dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
490
519
  NSError *err = nil;
491
520
  DaVoiceTTS *tts = [[DaVoiceTTS alloc] initWithModel:modelURL error:&err];
@@ -517,6 +546,55 @@ RCT_EXPORT_METHOD(initAll:(NSDictionary *)opts
517
546
  });
518
547
  }
519
548
 
549
+ RCT_EXPORT_METHOD(initWithoutModel:(NSDictionary *)opts
550
+ resolver:(RCTPromiseResolveBlock)resolve
551
+ rejecter:(RCTPromiseRejectBlock)reject)
552
+ {
553
+ dispatch_async(dispatch_get_main_queue(), ^{
554
+ if (self.initializing) { resolve(@"already_initializing"); return; }
555
+ if (!self.tts) {
556
+ reject(@"no_tts", @"initWithoutModel() requires a successful initAll() first", nil);
557
+ return;
558
+ }
559
+ if (self.initialized && self.stt) {
560
+ resolve(@"already_initialized");
561
+ return;
562
+ }
563
+
564
+ self.initializing = YES;
565
+ self.sttEngineHot = NO;
566
+
567
+ NSString *locale = opts[@"locale"] ?: @"en-US";
568
+ NSString *onboardingJsonPath = opts[@"onboardingJsonPath"];
569
+ NSNumber *timeoutMs = opts[@"timeoutMs"];
570
+
571
+ @try { [self.stt destroySpeech:nil]; } @catch (__unused id e) {}
572
+ self.stt = nil;
573
+
574
+ [self ensureSTT];
575
+ if (onboardingJsonPath && (id)onboardingJsonPath != [NSNull null] && onboardingJsonPath.length > 0) {
576
+ [self.stt startSpeechWithLocaleStr:locale onboardingJsonPath:onboardingJsonPath];
577
+ } else {
578
+ [self.stt startSpeechWithLocaleStr:locale];
579
+ }
580
+
581
+ [self waitForSTTEngineHotWithTimeoutMs:timeoutMs completion:^(BOOL ok) {
582
+ if (!ok) {
583
+ self.initializing = NO;
584
+ self.initialized = NO;
585
+ self.sttEngineHot = NO;
586
+ @try { [self.stt destroySpeech:nil]; } @catch (__unused id e) {}
587
+ self.stt = nil;
588
+ reject(@"stt_init_timeout", @"STT did not become ready before timeout", nil);
589
+ return;
590
+ }
591
+ self.initialized = YES;
592
+ self.initializing = NO;
593
+ resolve(@"initialized_without_model");
594
+ }];
595
+ });
596
+ }
597
+
520
598
  // --- SpeechRecognitionLite pause/unpause (counter-based) ---
521
599
 
522
600
  RCT_EXPORT_METHOD(pauseSpeechRecognitionLite:(RCTResponseSenderBlock)callback)
@@ -535,14 +613,22 @@ RCT_EXPORT_METHOD(pauseSpeechRecognitionLiteAsync:(nonnull NSNumber *)timeoutMs
535
613
 
536
614
  NSNumber *t = timeoutMs ?: @(1500);
537
615
  if (t.doubleValue <= 0) t = @(1500);
616
+ CFTimeInterval startedAt = CACurrentMediaTime();
617
+ RCTLogInfo(@"[SpeechBridge] %@ pauseSpeechRecognitionLiteAsync begin timeoutMs=%@ stt=%@", SBNowString(), t, self.stt);
538
618
 
539
619
  if ([(id)self.stt respondsToSelector:@selector(pauseSpeechRecognitionLiteAndWait:completion:)]) {
540
620
  [(id)self.stt pauseSpeechRecognitionLiteAndWait:t completion:^(BOOL ok, NSString * _Nullable reason) {
621
+ RCTLogInfo(@"[SpeechBridge] %@ pauseSpeechRecognitionLiteAsync resolved elapsedMs=%.1f ok=%@ reason=%@",
622
+ SBNowString(),
623
+ (CACurrentMediaTime() - startedAt) * 1000.0,
624
+ SBBoolString(ok),
625
+ reason ?: @"");
541
626
  resolve(@{@"ok": @(ok), @"reason": reason ?: @""});
542
627
  }];
543
628
  return;
544
629
  }
545
630
 
631
+ RCTLogInfo(@"[SpeechBridge] %@ pauseSpeechRecognitionLiteAsync fallback no-wait", SBNowString());
546
632
  [self.stt pauseSpeechRecognitionLite];
547
633
  resolve(@{@"ok": @(YES), @"reason": @"bridge_fallback_no_wait_method"});
548
634
  }
@@ -578,17 +664,32 @@ RCT_EXPORT_METHOD(unPauseSpeechRecognitionLiteAsync:(nonnull NSNumber *)times
578
664
  if (t.doubleValue <= 0) t = @(2500);
579
665
  NSNumber *pf = preFetch ?: @(0);
580
666
  if (pf.doubleValue < 0) pf = @(0);
667
+ CFTimeInterval startedAt = CACurrentMediaTime();
668
+ RCTLogInfo(@"[SpeechBridge] %@ unPauseSpeechRecognitionLiteAsync begin times=%@ preFetchMs=%@ timeoutMs=%@ stt=%@ initialized=%@ initializing=%@",
669
+ SBNowString(),
670
+ times,
671
+ pf,
672
+ t,
673
+ self.stt,
674
+ SBBoolString(self.initialized),
675
+ SBBoolString(self.initializing));
581
676
 
582
677
  if ([(id)self.stt respondsToSelector:@selector(unPauseSpeechRecognitionLiteAndWait:preFetch:timeoutMs:completion:)]) {
583
678
  [(id)self.stt unPauseSpeechRecognitionLiteAndWait:times
584
679
  preFetch:pf
585
680
  timeoutMs:t
586
681
  completion:^(BOOL ok, NSString * _Nullable reason) {
682
+ RCTLogInfo(@"[SpeechBridge] %@ unPauseSpeechRecognitionLiteAsync resolved elapsedMs=%.1f ok=%@ reason=%@",
683
+ SBNowString(),
684
+ (CACurrentMediaTime() - startedAt) * 1000.0,
685
+ SBBoolString(ok),
686
+ reason ?: @"");
587
687
  resolve(@{@"ok": @(ok), @"reason": reason ?: @""});
588
688
  }];
589
689
  return;
590
690
  }
591
691
 
692
+ RCTLogInfo(@"[SpeechBridge] %@ unPauseSpeechRecognitionLiteAsync fallback no-wait", SBNowString());
592
693
  [self.stt unPauseSpeechRecognitionLite:times preFetch:pf];
593
694
  resolve(@{@"ok": @(YES), @"reason": @"bridge_fallback_no_wait_method"});
594
695
  }
@@ -647,7 +748,7 @@ RCT_EXPORT_METHOD(destroyAll:(RCTPromiseResolveBlock)resolve
647
748
  rejecter:(RCTPromiseRejectBlock)reject)
648
749
  {
649
750
  dispatch_async(dispatch_get_main_queue(), ^{
650
- if (!self.initialized && !self.initializing) {
751
+ if (!self.tts && !self.stt && !self.initialized && !self.initializing) {
651
752
  resolve(@"already_destroyed");
652
753
  return;
653
754
  }
@@ -668,6 +769,38 @@ RCT_EXPORT_METHOD(destroyAll:(RCTPromiseResolveBlock)resolve
668
769
  });
669
770
  }
670
771
 
772
+ RCT_EXPORT_METHOD(destroyWihtouModel:(RCTPromiseResolveBlock)resolve
773
+ rejecter:(RCTPromiseRejectBlock)reject)
774
+ {
775
+ dispatch_async(dispatch_get_main_queue(), ^{
776
+ if (!self.tts) {
777
+ reject(@"no_tts", @"destroyWihtouModel() requires a successful initAll() first", nil);
778
+ return;
779
+ }
780
+
781
+ if (!self.stt && !self.initialized && !self.initializing) {
782
+ resolve(@"already_destroyed_without_model");
783
+ return;
784
+ }
785
+
786
+ self.initializing = YES;
787
+
788
+ @try { [self.stt destroySpeech:nil]; } @catch (__unused id e) {}
789
+ self.stt = nil;
790
+
791
+ self.sttEngineHot = NO;
792
+ self.initialized = NO;
793
+ self.initializing = NO;
794
+ resolve(@"destroyed_without_model");
795
+ });
796
+ }
797
+
798
+ RCT_EXPORT_METHOD(destroyWithoutModel:(RCTPromiseResolveBlock)resolve
799
+ rejecter:(RCTPromiseRejectBlock)reject)
800
+ {
801
+ [self destroyWihtouModel:resolve rejecter:reject];
802
+ }
803
+
671
804
  #pragma mark - Convenience passthroughs (optional)
672
805
 
673
806
  RCT_EXPORT_METHOD(startSpeech:(NSString *)locale