react-native-davoice-tts 1.0.321 → 1.0.323

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (18) hide show
  1. package/TTSRNBridge.podspec +1 -1
  2. package/ios/SpeechBridge/SpeechBridge.m +38 -0
  3. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/DavoiceTTS +0 -0
  4. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.abi.json +8854 -8833
  5. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.private.swiftinterface +77 -77
  6. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios.swiftinterface +77 -77
  7. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/DavoiceTTS +0 -0
  8. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.abi.json +8757 -8736
  9. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.private.swiftinterface +53 -53
  10. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftinterface +53 -53
  11. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.abi.json +8757 -8736
  12. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.private.swiftinterface +53 -53
  13. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftinterface +53 -53
  14. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeDirectory +0 -0
  15. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeRequirements-1 +0 -0
  16. package/ios/TTSRNBridge/DavoiceTTS.xcframework/ios-arm64_x86_64-simulator/DavoiceTTS.framework/_CodeSignature/CodeResources +24 -24
  17. package/package.json +1 -1
  18. package/speech/index.ts +26 -2
@@ -15,6 +15,57 @@ import _StringProcessing
15
15
  import _SwiftConcurrencyShims
16
16
  import onnxruntime_objc
17
17
  import phonemes
18
+ public typealias EngineSchedule = (_ url: Foundation.URL, _ onDone: @escaping () -> Swift.Void) -> Swift.Bool
19
+ public typealias IsEngineReady = () -> Swift.Bool
20
+ public typealias useOnlyEnginePlayback = () -> Swift.Bool
21
+ public typealias StopEnginePlayback = () -> Swift.Void
22
+ public typealias CurrentEngineProvider = () -> AVFAudio.AVAudioEngine?
23
+ public enum AudioPlaybackHook {
24
+ public static var engineScheduleFile: DavoiceTTS.EngineSchedule?
25
+ public static var isEngineReady: DavoiceTTS.IsEngineReady?
26
+ public static var useOnlyEnginePlayback: DavoiceTTS.useOnlyEnginePlayback?
27
+ public static var stopEnginePlayback: DavoiceTTS.StopEnginePlayback?
28
+ public static var currentEngine: DavoiceTTS.CurrentEngineProvider?
29
+ }
30
+ @objc public protocol STTDelegate {
31
+ @objc func stt(_ stt: DavoiceTTS.STT, didEmitEvent name: Swift.String, body: [Swift.String : Any]?)
32
+ }
33
+ @objc @_inheritsConvenienceInitializers @objcMembers final public class STT : ObjectiveC.NSObject, Speech.SFSpeechRecognizerDelegate {
34
+ @objc weak final public var delegate: (any DavoiceTTS.STTDelegate)?
35
+ @objc final public var continuous: Swift.Bool
36
+ @objc final public var aecEnabled: Swift.Bool
37
+ @objc final public var force16kMicSampleRate: Swift.Bool
38
+ @objc final public var useLegacySpeakerGateBehavior: Swift.Bool
39
+ @objc final public var useSpeakerGateHangover: Swift.Bool
40
+ @objc final public var speakerGateHangoverSeconds: Swift.Double
41
+ @objc final public var useShortSpeakerVerificationTailWindow: Swift.Bool
42
+ @objc final public var shortSpeakerVerificationTailSeconds: Swift.Float
43
+ @objc final public var speakerPreRollFlushMaxSeconds: Swift.Double
44
+ @objc public static let supportedEvents: [Swift.String]
45
+ @objc final public func setLicense(licenseKey: Swift.String) -> Swift.Bool
46
+ @objc final public func pauseSpeechRecognitionLite()
47
+ @objc final public func pauseSpeechRecognitionLiteAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
48
+ @objc final public func unPauseSpeechRecognitionLite(_ times: Foundation.NSNumber)
49
+ @objc(unPauseSpeechRecognitionLite:preFetch:) final public func unPauseSpeechRecognitionLite(_ times: Foundation.NSNumber, preFetch: Foundation.NSNumber)
50
+ @objc final public func unPauseSpeechRecognitionLiteAndWait(_ times: Foundation.NSNumber, preFetch: Foundation.NSNumber, timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
51
+ @objc final public func pauseMicrophoneAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
52
+ @objc final public func unPauseMicrophoneAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
53
+ @objc final public func pauseMicrophone()
54
+ @objc final public func unPauseMicrophone()
55
+ @objc final public func setAECEnabled(_ enabled: Swift.Bool)
56
+ @objc final public func isAECEnabled() -> Swift.Bool
57
+ @objc final public func isSpeechAvailable(_ completion: @escaping (Swift.Bool) -> Swift.Void)
58
+ @objc final public func isRecognizing() -> Swift.Bool
59
+ @objc final public func startSpeech(localeStr: Swift.String?)
60
+ @objc final public func startSpeech(localeStr: Swift.String?, onboardingJsonPath: Swift.String)
61
+ @objc final public func stopSpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
62
+ @objc final public func cancelSpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
63
+ @objc final public func destroySpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
64
+ @objc final public func teardown()
65
+ @objc final public func speechRecognizer(_ speechRecognizer: Speech.SFSpeechRecognizer, availabilityDidChange available: Swift.Bool)
66
+ @objc override dynamic public init()
67
+ @objc deinit
68
+ }
18
69
  @objc @_inheritsConvenienceInitializers @_hasMissingDesignatedInitializers final public class SwiftSoundQueue : ObjectiveC.NSObject, AVFAudio.AVAudioPlayerDelegate {
19
70
  @objc deinit
20
71
  public static let shared: DavoiceTTS.SwiftSoundQueue
@@ -53,23 +104,6 @@ import phonemes
53
104
  @objc final public func playWav(_ url: Foundation.URL, markAsLastUtterance: Swift.Bool = true)
54
105
  @objc final public func playBuffer(_ buffer: AVFAudio.AVAudioPCMBuffer, markAsLastUtterance: Swift.Bool = true)
55
106
  }
56
- @_inheritsConvenienceInitializers @objc final public class LicenseManager : ObjectiveC.NSObject {
57
- @objc public static func isLicenseValid(licenseKey: Swift.String) -> Swift.Bool
58
- @objc override dynamic public init()
59
- @objc deinit
60
- }
61
- public typealias EngineSchedule = (_ url: Foundation.URL, _ onDone: @escaping () -> Swift.Void) -> Swift.Bool
62
- public typealias IsEngineReady = () -> Swift.Bool
63
- public typealias useOnlyEnginePlayback = () -> Swift.Bool
64
- public typealias StopEnginePlayback = () -> Swift.Void
65
- public typealias CurrentEngineProvider = () -> AVFAudio.AVAudioEngine?
66
- public enum AudioPlaybackHook {
67
- public static var engineScheduleFile: DavoiceTTS.EngineSchedule?
68
- public static var isEngineReady: DavoiceTTS.IsEngineReady?
69
- public static var useOnlyEnginePlayback: DavoiceTTS.useOnlyEnginePlayback?
70
- public static var stopEnginePlayback: DavoiceTTS.StopEnginePlayback?
71
- public static var currentEngine: DavoiceTTS.CurrentEngineProvider?
72
- }
73
107
  public enum SVLogLevel : Swift.Int, Swift.Codable {
74
108
  case off, error, warn, info, debug, trace
75
109
  public init?(rawValue: Swift.Int)
@@ -172,42 +206,8 @@ public enum SpeakerVerificationError : Swift.Error, Swift.CustomStringConvertibl
172
206
  @objc override dynamic public init()
173
207
  @objc deinit
174
208
  }
175
- @objc public protocol STTDelegate {
176
- @objc func stt(_ stt: DavoiceTTS.STT, didEmitEvent name: Swift.String, body: [Swift.String : Any]?)
177
- }
178
- @objc @_inheritsConvenienceInitializers @objcMembers final public class STT : ObjectiveC.NSObject, Speech.SFSpeechRecognizerDelegate {
179
- @objc weak final public var delegate: (any DavoiceTTS.STTDelegate)?
180
- @objc final public var continuous: Swift.Bool
181
- @objc final public var aecEnabled: Swift.Bool
182
- @objc final public var force16kMicSampleRate: Swift.Bool
183
- @objc final public var useLegacySpeakerGateBehavior: Swift.Bool
184
- @objc final public var useSpeakerGateHangover: Swift.Bool
185
- @objc final public var speakerGateHangoverSeconds: Swift.Double
186
- @objc final public var useShortSpeakerVerificationTailWindow: Swift.Bool
187
- @objc final public var shortSpeakerVerificationTailSeconds: Swift.Float
188
- @objc final public var speakerPreRollFlushMaxSeconds: Swift.Double
189
- @objc public static let supportedEvents: [Swift.String]
190
- @objc final public func setLicense(licenseKey: Swift.String) -> Swift.Bool
191
- @objc final public func pauseSpeechRecognitionLite()
192
- @objc final public func pauseSpeechRecognitionLiteAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
193
- @objc final public func unPauseSpeechRecognitionLite(_ times: Foundation.NSNumber)
194
- @objc(unPauseSpeechRecognitionLite:preFetch:) final public func unPauseSpeechRecognitionLite(_ times: Foundation.NSNumber, preFetch: Foundation.NSNumber)
195
- @objc final public func unPauseSpeechRecognitionLiteAndWait(_ times: Foundation.NSNumber, preFetch: Foundation.NSNumber, timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
196
- @objc final public func pauseMicrophoneAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
197
- @objc final public func unPauseMicrophoneAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
198
- @objc final public func pauseMicrophone()
199
- @objc final public func unPauseMicrophone()
200
- @objc final public func setAECEnabled(_ enabled: Swift.Bool)
201
- @objc final public func isAECEnabled() -> Swift.Bool
202
- @objc final public func isSpeechAvailable(_ completion: @escaping (Swift.Bool) -> Swift.Void)
203
- @objc final public func isRecognizing() -> Swift.Bool
204
- @objc final public func startSpeech(localeStr: Swift.String?)
205
- @objc final public func startSpeech(localeStr: Swift.String?, onboardingJsonPath: Swift.String)
206
- @objc final public func stopSpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
207
- @objc final public func cancelSpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
208
- @objc final public func destroySpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
209
- @objc final public func teardown()
210
- @objc final public func speechRecognizer(_ speechRecognizer: Speech.SFSpeechRecognizer, availabilityDidChange available: Swift.Bool)
209
+ @_inheritsConvenienceInitializers @objc final public class LicenseManager : ObjectiveC.NSObject {
210
+ @objc public static func isLicenseValid(licenseKey: Swift.String) -> Swift.Bool
211
211
  @objc override dynamic public init()
212
212
  @objc deinit
213
213
  }
@@ -15,6 +15,57 @@ import _StringProcessing
15
15
  import _SwiftConcurrencyShims
16
16
  import onnxruntime_objc
17
17
  import phonemes
18
+ public typealias EngineSchedule = (_ url: Foundation.URL, _ onDone: @escaping () -> Swift.Void) -> Swift.Bool
19
+ public typealias IsEngineReady = () -> Swift.Bool
20
+ public typealias useOnlyEnginePlayback = () -> Swift.Bool
21
+ public typealias StopEnginePlayback = () -> Swift.Void
22
+ public typealias CurrentEngineProvider = () -> AVFAudio.AVAudioEngine?
23
+ public enum AudioPlaybackHook {
24
+ public static var engineScheduleFile: DavoiceTTS.EngineSchedule?
25
+ public static var isEngineReady: DavoiceTTS.IsEngineReady?
26
+ public static var useOnlyEnginePlayback: DavoiceTTS.useOnlyEnginePlayback?
27
+ public static var stopEnginePlayback: DavoiceTTS.StopEnginePlayback?
28
+ public static var currentEngine: DavoiceTTS.CurrentEngineProvider?
29
+ }
30
+ @objc public protocol STTDelegate {
31
+ @objc func stt(_ stt: DavoiceTTS.STT, didEmitEvent name: Swift.String, body: [Swift.String : Any]?)
32
+ }
33
+ @objc @_inheritsConvenienceInitializers @objcMembers final public class STT : ObjectiveC.NSObject, Speech.SFSpeechRecognizerDelegate {
34
+ @objc weak final public var delegate: (any DavoiceTTS.STTDelegate)?
35
+ @objc final public var continuous: Swift.Bool
36
+ @objc final public var aecEnabled: Swift.Bool
37
+ @objc final public var force16kMicSampleRate: Swift.Bool
38
+ @objc final public var useLegacySpeakerGateBehavior: Swift.Bool
39
+ @objc final public var useSpeakerGateHangover: Swift.Bool
40
+ @objc final public var speakerGateHangoverSeconds: Swift.Double
41
+ @objc final public var useShortSpeakerVerificationTailWindow: Swift.Bool
42
+ @objc final public var shortSpeakerVerificationTailSeconds: Swift.Float
43
+ @objc final public var speakerPreRollFlushMaxSeconds: Swift.Double
44
+ @objc public static let supportedEvents: [Swift.String]
45
+ @objc final public func setLicense(licenseKey: Swift.String) -> Swift.Bool
46
+ @objc final public func pauseSpeechRecognitionLite()
47
+ @objc final public func pauseSpeechRecognitionLiteAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
48
+ @objc final public func unPauseSpeechRecognitionLite(_ times: Foundation.NSNumber)
49
+ @objc(unPauseSpeechRecognitionLite:preFetch:) final public func unPauseSpeechRecognitionLite(_ times: Foundation.NSNumber, preFetch: Foundation.NSNumber)
50
+ @objc final public func unPauseSpeechRecognitionLiteAndWait(_ times: Foundation.NSNumber, preFetch: Foundation.NSNumber, timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
51
+ @objc final public func pauseMicrophoneAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
52
+ @objc final public func unPauseMicrophoneAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
53
+ @objc final public func pauseMicrophone()
54
+ @objc final public func unPauseMicrophone()
55
+ @objc final public func setAECEnabled(_ enabled: Swift.Bool)
56
+ @objc final public func isAECEnabled() -> Swift.Bool
57
+ @objc final public func isSpeechAvailable(_ completion: @escaping (Swift.Bool) -> Swift.Void)
58
+ @objc final public func isRecognizing() -> Swift.Bool
59
+ @objc final public func startSpeech(localeStr: Swift.String?)
60
+ @objc final public func startSpeech(localeStr: Swift.String?, onboardingJsonPath: Swift.String)
61
+ @objc final public func stopSpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
62
+ @objc final public func cancelSpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
63
+ @objc final public func destroySpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
64
+ @objc final public func teardown()
65
+ @objc final public func speechRecognizer(_ speechRecognizer: Speech.SFSpeechRecognizer, availabilityDidChange available: Swift.Bool)
66
+ @objc override dynamic public init()
67
+ @objc deinit
68
+ }
18
69
  @objc @_inheritsConvenienceInitializers @_hasMissingDesignatedInitializers final public class SwiftSoundQueue : ObjectiveC.NSObject, AVFAudio.AVAudioPlayerDelegate {
19
70
  @objc deinit
20
71
  public static let shared: DavoiceTTS.SwiftSoundQueue
@@ -53,23 +104,6 @@ import phonemes
53
104
  @objc final public func playWav(_ url: Foundation.URL, markAsLastUtterance: Swift.Bool = true)
54
105
  @objc final public func playBuffer(_ buffer: AVFAudio.AVAudioPCMBuffer, markAsLastUtterance: Swift.Bool = true)
55
106
  }
56
- @_inheritsConvenienceInitializers @objc final public class LicenseManager : ObjectiveC.NSObject {
57
- @objc public static func isLicenseValid(licenseKey: Swift.String) -> Swift.Bool
58
- @objc override dynamic public init()
59
- @objc deinit
60
- }
61
- public typealias EngineSchedule = (_ url: Foundation.URL, _ onDone: @escaping () -> Swift.Void) -> Swift.Bool
62
- public typealias IsEngineReady = () -> Swift.Bool
63
- public typealias useOnlyEnginePlayback = () -> Swift.Bool
64
- public typealias StopEnginePlayback = () -> Swift.Void
65
- public typealias CurrentEngineProvider = () -> AVFAudio.AVAudioEngine?
66
- public enum AudioPlaybackHook {
67
- public static var engineScheduleFile: DavoiceTTS.EngineSchedule?
68
- public static var isEngineReady: DavoiceTTS.IsEngineReady?
69
- public static var useOnlyEnginePlayback: DavoiceTTS.useOnlyEnginePlayback?
70
- public static var stopEnginePlayback: DavoiceTTS.StopEnginePlayback?
71
- public static var currentEngine: DavoiceTTS.CurrentEngineProvider?
72
- }
73
107
  public enum SVLogLevel : Swift.Int, Swift.Codable {
74
108
  case off, error, warn, info, debug, trace
75
109
  public init?(rawValue: Swift.Int)
@@ -172,42 +206,8 @@ public enum SpeakerVerificationError : Swift.Error, Swift.CustomStringConvertibl
172
206
  @objc override dynamic public init()
173
207
  @objc deinit
174
208
  }
175
- @objc public protocol STTDelegate {
176
- @objc func stt(_ stt: DavoiceTTS.STT, didEmitEvent name: Swift.String, body: [Swift.String : Any]?)
177
- }
178
- @objc @_inheritsConvenienceInitializers @objcMembers final public class STT : ObjectiveC.NSObject, Speech.SFSpeechRecognizerDelegate {
179
- @objc weak final public var delegate: (any DavoiceTTS.STTDelegate)?
180
- @objc final public var continuous: Swift.Bool
181
- @objc final public var aecEnabled: Swift.Bool
182
- @objc final public var force16kMicSampleRate: Swift.Bool
183
- @objc final public var useLegacySpeakerGateBehavior: Swift.Bool
184
- @objc final public var useSpeakerGateHangover: Swift.Bool
185
- @objc final public var speakerGateHangoverSeconds: Swift.Double
186
- @objc final public var useShortSpeakerVerificationTailWindow: Swift.Bool
187
- @objc final public var shortSpeakerVerificationTailSeconds: Swift.Float
188
- @objc final public var speakerPreRollFlushMaxSeconds: Swift.Double
189
- @objc public static let supportedEvents: [Swift.String]
190
- @objc final public func setLicense(licenseKey: Swift.String) -> Swift.Bool
191
- @objc final public func pauseSpeechRecognitionLite()
192
- @objc final public func pauseSpeechRecognitionLiteAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
193
- @objc final public func unPauseSpeechRecognitionLite(_ times: Foundation.NSNumber)
194
- @objc(unPauseSpeechRecognitionLite:preFetch:) final public func unPauseSpeechRecognitionLite(_ times: Foundation.NSNumber, preFetch: Foundation.NSNumber)
195
- @objc final public func unPauseSpeechRecognitionLiteAndWait(_ times: Foundation.NSNumber, preFetch: Foundation.NSNumber, timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
196
- @objc final public func pauseMicrophoneAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
197
- @objc final public func unPauseMicrophoneAndWait(_ timeoutMs: Foundation.NSNumber, completion: @escaping (Swift.Bool, Swift.String?) -> Swift.Void)
198
- @objc final public func pauseMicrophone()
199
- @objc final public func unPauseMicrophone()
200
- @objc final public func setAECEnabled(_ enabled: Swift.Bool)
201
- @objc final public func isAECEnabled() -> Swift.Bool
202
- @objc final public func isSpeechAvailable(_ completion: @escaping (Swift.Bool) -> Swift.Void)
203
- @objc final public func isRecognizing() -> Swift.Bool
204
- @objc final public func startSpeech(localeStr: Swift.String?)
205
- @objc final public func startSpeech(localeStr: Swift.String?, onboardingJsonPath: Swift.String)
206
- @objc final public func stopSpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
207
- @objc final public func cancelSpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
208
- @objc final public func destroySpeech(_ completion: ((Swift.Bool) -> Swift.Void)? = nil)
209
- @objc final public func teardown()
210
- @objc final public func speechRecognizer(_ speechRecognizer: Speech.SFSpeechRecognizer, availabilityDidChange available: Swift.Bool)
209
+ @_inheritsConvenienceInitializers @objc final public class LicenseManager : ObjectiveC.NSObject {
210
+ @objc public static func isLicenseValid(licenseKey: Swift.String) -> Swift.Bool
211
211
  @objc override dynamic public init()
212
212
  @objc deinit
213
213
  }
@@ -14,11 +14,11 @@
14
14
  </data>
15
15
  <key>Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.abi.json</key>
16
16
  <data>
17
- 9CFGBqNMnh3cwOoi38aVBgsneqk=
17
+ RENuqkB3ikCnGFcIITX8v98FfDQ=
18
18
  </data>
19
19
  <key>Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.private.swiftinterface</key>
20
20
  <data>
21
- iZ9SB6PLiy0mvvKUm31ZjWrdPY8=
21
+ FWtkzOs4XvJqGX9j0xq4o8n47jo=
22
22
  </data>
23
23
  <key>Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftdoc</key>
24
24
  <data>
@@ -26,19 +26,19 @@
26
26
  </data>
27
27
  <key>Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftinterface</key>
28
28
  <data>
29
- iZ9SB6PLiy0mvvKUm31ZjWrdPY8=
29
+ FWtkzOs4XvJqGX9j0xq4o8n47jo=
30
30
  </data>
31
31
  <key>Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftmodule</key>
32
32
  <data>
33
- jp/eQj6hR7QPBMc0xBNk10JXAQM=
33
+ eK74iyD5qmMTkwSiBjje9FbV+tY=
34
34
  </data>
35
35
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.abi.json</key>
36
36
  <data>
37
- 9CFGBqNMnh3cwOoi38aVBgsneqk=
37
+ RENuqkB3ikCnGFcIITX8v98FfDQ=
38
38
  </data>
39
39
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.private.swiftinterface</key>
40
40
  <data>
41
- 6dQ7pJyDbRInQCGa8BRk3B9JBRY=
41
+ h+WngymCE4xWp/SvdrqSNgghGn4=
42
42
  </data>
43
43
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftdoc</key>
44
44
  <data>
@@ -46,11 +46,11 @@
46
46
  </data>
47
47
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftinterface</key>
48
48
  <data>
49
- 6dQ7pJyDbRInQCGa8BRk3B9JBRY=
49
+ h+WngymCE4xWp/SvdrqSNgghGn4=
50
50
  </data>
51
51
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftmodule</key>
52
52
  <data>
53
- 08QaJH5IFQQUTX61OdpSPYcdDbQ=
53
+ sSx3DD/rr9AZj+g/D1MS3g0e0gE=
54
54
  </data>
55
55
  <key>Modules/module.modulemap</key>
56
56
  <data>
@@ -74,22 +74,22 @@
74
74
  <dict>
75
75
  <key>hash</key>
76
76
  <data>
77
- 9CFGBqNMnh3cwOoi38aVBgsneqk=
77
+ RENuqkB3ikCnGFcIITX8v98FfDQ=
78
78
  </data>
79
79
  <key>hash2</key>
80
80
  <data>
81
- YOKYBImLcHgWJLcSItgIoJJaujhZ3De9+PCgVsi2Wl0=
81
+ ZH/+2Rw+ABSlYfH2vEGknOz8kmZR71RvE+HUfQnaHoM=
82
82
  </data>
83
83
  </dict>
84
84
  <key>Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.private.swiftinterface</key>
85
85
  <dict>
86
86
  <key>hash</key>
87
87
  <data>
88
- iZ9SB6PLiy0mvvKUm31ZjWrdPY8=
88
+ FWtkzOs4XvJqGX9j0xq4o8n47jo=
89
89
  </data>
90
90
  <key>hash2</key>
91
91
  <data>
92
- 6yqMqwnkQOtfKqVrzCxqU/p/UmJb+VLLlaIT8BVf9FI=
92
+ cVjyknswMshrznRZD/SjqcL8xaEIG3IjDBFazJ+EKnY=
93
93
  </data>
94
94
  </dict>
95
95
  <key>Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftdoc</key>
@@ -107,44 +107,44 @@
107
107
  <dict>
108
108
  <key>hash</key>
109
109
  <data>
110
- iZ9SB6PLiy0mvvKUm31ZjWrdPY8=
110
+ FWtkzOs4XvJqGX9j0xq4o8n47jo=
111
111
  </data>
112
112
  <key>hash2</key>
113
113
  <data>
114
- 6yqMqwnkQOtfKqVrzCxqU/p/UmJb+VLLlaIT8BVf9FI=
114
+ cVjyknswMshrznRZD/SjqcL8xaEIG3IjDBFazJ+EKnY=
115
115
  </data>
116
116
  </dict>
117
117
  <key>Modules/DavoiceTTS.swiftmodule/arm64-apple-ios-simulator.swiftmodule</key>
118
118
  <dict>
119
119
  <key>hash</key>
120
120
  <data>
121
- jp/eQj6hR7QPBMc0xBNk10JXAQM=
121
+ eK74iyD5qmMTkwSiBjje9FbV+tY=
122
122
  </data>
123
123
  <key>hash2</key>
124
124
  <data>
125
- 7BXGUKiPQUkLePIFPa0Nxbqpj3p9wZKkqtUnN6jwE4M=
125
+ KJ25n0/DBSnwFFWxByu+hQOJLrx/ifG1yB7A4rA5AOc=
126
126
  </data>
127
127
  </dict>
128
128
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.abi.json</key>
129
129
  <dict>
130
130
  <key>hash</key>
131
131
  <data>
132
- 9CFGBqNMnh3cwOoi38aVBgsneqk=
132
+ RENuqkB3ikCnGFcIITX8v98FfDQ=
133
133
  </data>
134
134
  <key>hash2</key>
135
135
  <data>
136
- YOKYBImLcHgWJLcSItgIoJJaujhZ3De9+PCgVsi2Wl0=
136
+ ZH/+2Rw+ABSlYfH2vEGknOz8kmZR71RvE+HUfQnaHoM=
137
137
  </data>
138
138
  </dict>
139
139
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.private.swiftinterface</key>
140
140
  <dict>
141
141
  <key>hash</key>
142
142
  <data>
143
- 6dQ7pJyDbRInQCGa8BRk3B9JBRY=
143
+ h+WngymCE4xWp/SvdrqSNgghGn4=
144
144
  </data>
145
145
  <key>hash2</key>
146
146
  <data>
147
- GwE5m+vZsiC5Yb1S83RtpV6FORCCBOreebjmXNXoSSw=
147
+ 8BRqWC1B9YHxAwJKEK5vYTvMn5o58S7l4gG0Slp72Pg=
148
148
  </data>
149
149
  </dict>
150
150
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftdoc</key>
@@ -162,22 +162,22 @@
162
162
  <dict>
163
163
  <key>hash</key>
164
164
  <data>
165
- 6dQ7pJyDbRInQCGa8BRk3B9JBRY=
165
+ h+WngymCE4xWp/SvdrqSNgghGn4=
166
166
  </data>
167
167
  <key>hash2</key>
168
168
  <data>
169
- GwE5m+vZsiC5Yb1S83RtpV6FORCCBOreebjmXNXoSSw=
169
+ 8BRqWC1B9YHxAwJKEK5vYTvMn5o58S7l4gG0Slp72Pg=
170
170
  </data>
171
171
  </dict>
172
172
  <key>Modules/DavoiceTTS.swiftmodule/x86_64-apple-ios-simulator.swiftmodule</key>
173
173
  <dict>
174
174
  <key>hash</key>
175
175
  <data>
176
- 08QaJH5IFQQUTX61OdpSPYcdDbQ=
176
+ sSx3DD/rr9AZj+g/D1MS3g0e0gE=
177
177
  </data>
178
178
  <key>hash2</key>
179
179
  <data>
180
- 72qZhHIICmK61DshcqfMQUoiTgAt3/eW412CPptXn9k=
180
+ IPvWLa4TcunlyT8VRldbHKX8Qv0xbPSkN7lD68ATeq4=
181
181
  </data>
182
182
  </dict>
183
183
  <key>Modules/module.modulemap</key>
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "react-native-davoice-tts",
3
- "version": "1.0.321",
3
+ "version": "1.0.323",
4
4
  "description": "tts library for React Native",
5
5
  "main": "tts/index.js",
6
6
  "types": "tts/index.d.ts",
package/speech/index.ts CHANGED
@@ -34,6 +34,12 @@ function sleep(ms: number) {
34
34
  return new Promise<void>((r) => setTimeout(r, ms));
35
35
  }
36
36
 
37
+ let speechOpSeq = 0;
38
+ function nextSpeechOpId(prefix: string) {
39
+ speechOpSeq += 1;
40
+ return `${prefix}-${speechOpSeq}-${Date.now()}`;
41
+ }
42
+
37
43
  // If you use typed-array -> base64, Buffer is convenient (works in RN)
38
44
  let toBase64: (u8: Uint8Array) => string;
39
45
  try {
@@ -527,6 +533,8 @@ class Speech {
527
533
  // TODO: CHECK THE NATIVE SIDE DOES NOT REALLY AWAITS
528
534
  async pauseSpeechRecognition(): Promise<void> {
529
535
  this.logCall('pauseSpeechRecognitionLite');
536
+ const opId = nextSpeechOpId('pause-stt');
537
+ const startedAt = Date.now();
530
538
 
531
539
  const mod: any = Platform.OS === 'ios' ? NativeSpeech : NativeSTT;
532
540
  const fn = mod?.pauseSpeechRecognitionLite;
@@ -537,7 +545,9 @@ class Speech {
537
545
  }
538
546
 
539
547
  if (Platform.OS === 'ios' && typeof mod?.pauseSpeechRecognitionLiteAsync === 'function') {
548
+ dbg('[pauseSpeechRecognitionLiteAsync] begin', { opId, timeoutMs: 1500 });
540
549
  const result = await mod.pauseSpeechRecognitionLiteAsync(1500);
550
+ dbg('[pauseSpeechRecognitionLiteAsync] resolved', { opId, elapsedMs: Date.now() - startedAt, result });
541
551
  if (result?.ok === false) dbgErr('pauseSpeechRecognitionLiteAsync failed', result?.reason);
542
552
  return;
543
553
  }
@@ -569,8 +579,14 @@ class Speech {
569
579
  });
570
580
  }
571
581
 
572
- async unPauseSpeechRecognition(times: number, preFetchMs: number = 0): Promise<void> {
582
+ async unPauseSpeechRecognition(
583
+ times: number,
584
+ preFetchMs: number = 0,
585
+ timeoutMs: number = 2500,
586
+ ): Promise<void> {
573
587
  this.logCall('unPauseSpeechRecognitionLite', { times, preFetchMs });
588
+ const opId = nextSpeechOpId('unpause-stt');
589
+ const startedAt = Date.now();
574
590
 
575
591
  const mod: any = Platform.OS === 'ios' ? NativeSpeech : NativeSTT;
576
592
  const fn = mod?.unPauseSpeechRecognitionLite;
@@ -581,7 +597,15 @@ class Speech {
581
597
  }
582
598
 
583
599
  if (Platform.OS === 'ios' && typeof mod?.unPauseSpeechRecognitionLiteAsync === 'function') {
584
- const result = await mod.unPauseSpeechRecognitionLiteAsync(times, preFetchMs, 2500);
600
+ dbg('[unPauseSpeechRecognitionLiteAsync] begin', { opId, times, preFetchMs, timeoutMs });
601
+ const result = await mod.unPauseSpeechRecognitionLiteAsync(times, preFetchMs, timeoutMs);
602
+ dbg('[unPauseSpeechRecognitionLiteAsync] resolved', {
603
+ opId,
604
+ times,
605
+ preFetchMs,
606
+ elapsedMs: Date.now() - startedAt,
607
+ result,
608
+ });
585
609
  if (result?.ok === false) dbgErr('unPauseSpeechRecognitionLiteAsync failed', result?.reason);
586
610
  return;
587
611
  }