@independo/capacitor-voice-recorder 8.0.2-dev.1 → 8.1.0-dev.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +130 -32
- package/android/build.gradle +44 -1
- package/android/src/main/java/app/independo/capacitorvoicerecorder/VoiceRecorder.java +146 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/adapters/PermissionChecker.java +8 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/adapters/RecordDataMapper.java +32 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/adapters/RecorderAdapter.java +39 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/adapters/RecorderPlatform.java +25 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/core/CurrentRecordingStatus.java +9 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/core/ErrorCodes.java +19 -0
- package/android/src/main/java/{com/tchvu3/capacitorvoicerecorder → app/independo/capacitorvoicerecorder/core}/Messages.java +2 -1
- package/android/src/main/java/{com/tchvu3/capacitorvoicerecorder → app/independo/capacitorvoicerecorder/core}/RecordData.java +15 -1
- package/android/src/main/java/app/independo/capacitorvoicerecorder/core/RecordOptions.java +4 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/core/ResponseFormat.java +18 -0
- package/android/src/main/java/{com/tchvu3/capacitorvoicerecorder → app/independo/capacitorvoicerecorder/core}/ResponseGenerator.java +7 -1
- package/android/src/main/java/app/independo/capacitorvoicerecorder/platform/CustomMediaRecorder.java +281 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/platform/DefaultRecorderPlatform.java +86 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/platform/NotSupportedOsVersion.java +4 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/service/VoiceRecorderService.java +144 -0
- package/android/src/main/java/app/independo/capacitorvoicerecorder/service/VoiceRecorderServiceException.java +23 -0
- package/dist/docs.json +145 -5
- package/dist/esm/adapters/VoiceRecorderWebAdapter.d.ts +23 -0
- package/dist/esm/adapters/VoiceRecorderWebAdapter.js +41 -0
- package/dist/esm/adapters/VoiceRecorderWebAdapter.js.map +1 -0
- package/dist/esm/core/error-codes.d.ts +4 -0
- package/dist/esm/core/error-codes.js +21 -0
- package/dist/esm/core/error-codes.js.map +1 -0
- package/dist/esm/core/recording-contract.d.ts +3 -0
- package/dist/esm/core/recording-contract.js +15 -0
- package/dist/esm/core/recording-contract.js.map +1 -0
- package/dist/esm/core/response-format.d.ts +8 -0
- package/dist/esm/core/response-format.js +17 -0
- package/dist/esm/core/response-format.js.map +1 -0
- package/dist/esm/definitions.d.ts +36 -3
- package/dist/esm/definitions.js.map +1 -1
- package/dist/esm/platform/web/VoiceRecorderImpl.d.ts +45 -0
- package/dist/esm/{VoiceRecorderImpl.js → platform/web/VoiceRecorderImpl.js} +20 -2
- package/dist/esm/platform/web/VoiceRecorderImpl.js.map +1 -0
- package/dist/esm/platform/web/get-blob-duration.js.map +1 -0
- package/dist/esm/{predefined-web-responses.d.ts → platform/web/predefined-web-responses.d.ts} +12 -1
- package/dist/esm/{predefined-web-responses.js → platform/web/predefined-web-responses.js} +11 -0
- package/dist/esm/platform/web/predefined-web-responses.js.map +1 -0
- package/dist/esm/service/VoiceRecorderService.d.ts +47 -0
- package/dist/esm/service/VoiceRecorderService.js +60 -0
- package/dist/esm/service/VoiceRecorderService.js.map +1 -0
- package/dist/esm/web.d.ts +12 -1
- package/dist/esm/web.js +26 -12
- package/dist/esm/web.js.map +1 -1
- package/dist/plugin.cjs.js +200 -9
- package/dist/plugin.cjs.js.map +1 -1
- package/dist/plugin.js +200 -9
- package/dist/plugin.js.map +1 -1
- package/ios/Sources/VoiceRecorder/Adapters/DefaultRecorderPlatform.swift +33 -0
- package/ios/Sources/VoiceRecorder/Adapters/RecordDataMapper.swift +38 -0
- package/ios/Sources/VoiceRecorder/Adapters/RecorderAdapter.swift +24 -0
- package/ios/Sources/VoiceRecorder/Adapters/RecorderPlatform.swift +11 -0
- package/ios/Sources/VoiceRecorder/Bridge/VoiceRecorder.swift +172 -0
- package/ios/Sources/VoiceRecorder/{CurrentRecordingStatus.swift → Core/CurrentRecordingStatus.swift} +2 -0
- package/ios/Sources/VoiceRecorder/Core/ErrorCodes.swift +16 -0
- package/ios/Sources/VoiceRecorder/{Messages.swift → Core/Messages.swift} +2 -0
- package/ios/Sources/VoiceRecorder/{RecordData.swift → Core/RecordData.swift} +6 -0
- package/ios/Sources/VoiceRecorder/Core/RecordOptions.swift +11 -0
- package/ios/Sources/VoiceRecorder/Core/ResponseFormat.swift +22 -0
- package/ios/Sources/VoiceRecorder/{ResponseGenerator.swift → Core/ResponseGenerator.swift} +6 -0
- package/ios/Sources/VoiceRecorder/Platform/CustomMediaRecorder.swift +359 -0
- package/ios/Sources/VoiceRecorder/Service/VoiceRecorderService.swift +128 -0
- package/ios/Sources/VoiceRecorder/Service/VoiceRecorderServiceError.swift +14 -0
- package/package.json +10 -4
- package/android/src/main/java/com/tchvu3/capacitorvoicerecorder/CurrentRecordingStatus.java +0 -7
- package/android/src/main/java/com/tchvu3/capacitorvoicerecorder/CustomMediaRecorder.java +0 -149
- package/android/src/main/java/com/tchvu3/capacitorvoicerecorder/NotSupportedOsVersion.java +0 -3
- package/android/src/main/java/com/tchvu3/capacitorvoicerecorder/RecordOptions.java +0 -3
- package/android/src/main/java/com/tchvu3/capacitorvoicerecorder/VoiceRecorder.java +0 -203
- package/dist/esm/VoiceRecorderImpl.d.ts +0 -27
- package/dist/esm/VoiceRecorderImpl.js.map +0 -1
- package/dist/esm/helper/get-blob-duration.js.map +0 -1
- package/dist/esm/predefined-web-responses.js.map +0 -1
- package/ios/Sources/VoiceRecorder/CustomMediaRecorder.swift +0 -113
- package/ios/Sources/VoiceRecorder/RecordOptions.swift +0 -8
- package/ios/Sources/VoiceRecorder/VoiceRecorder.swift +0 -147
- /package/dist/esm/{helper → platform/web}/get-blob-duration.d.ts +0 -0
- /package/dist/esm/{helper → platform/web}/get-blob-duration.js +0 -0
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import AVFoundation
|
|
3
|
+
import Capacitor
|
|
4
|
+
|
|
5
|
+
/// Capacitor bridge for the VoiceRecorder plugin.
|
|
6
|
+
@objc(VoiceRecorder)
|
|
7
|
+
public class VoiceRecorder: CAPPlugin, CAPBridgedPlugin {
|
|
8
|
+
/// Plugin identifier used by Capacitor.
|
|
9
|
+
public let identifier = "VoiceRecorder"
|
|
10
|
+
/// JavaScript name used for the plugin proxy.
|
|
11
|
+
public let jsName = "VoiceRecorder"
|
|
12
|
+
/// Supported plugin methods exposed to the JS layer.
|
|
13
|
+
public let pluginMethods: [CAPPluginMethod] = [
|
|
14
|
+
CAPPluginMethod(name: "canDeviceVoiceRecord", returnType: CAPPluginReturnPromise),
|
|
15
|
+
CAPPluginMethod(name: "requestAudioRecordingPermission", returnType: CAPPluginReturnPromise),
|
|
16
|
+
CAPPluginMethod(name: "hasAudioRecordingPermission", returnType: CAPPluginReturnPromise),
|
|
17
|
+
CAPPluginMethod(name: "startRecording", returnType: CAPPluginReturnPromise),
|
|
18
|
+
CAPPluginMethod(name: "stopRecording", returnType: CAPPluginReturnPromise),
|
|
19
|
+
CAPPluginMethod(name: "pauseRecording", returnType: CAPPluginReturnPromise),
|
|
20
|
+
CAPPluginMethod(name: "resumeRecording", returnType: CAPPluginReturnPromise),
|
|
21
|
+
CAPPluginMethod(name: "getCurrentStatus", returnType: CAPPluginReturnPromise),
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
/// Service layer that performs recording operations.
|
|
25
|
+
private var service: VoiceRecorderService?
|
|
26
|
+
/// Response format derived from plugin configuration.
|
|
27
|
+
private var responseFormat: ResponseFormat = .legacy
|
|
28
|
+
|
|
29
|
+
/// Initializes dependencies after the plugin loads.
|
|
30
|
+
public override func load() {
|
|
31
|
+
super.load()
|
|
32
|
+
responseFormat = ResponseFormat(config: getConfig())
|
|
33
|
+
service = VoiceRecorderService(
|
|
34
|
+
platform: DefaultRecorderPlatform(),
|
|
35
|
+
permissionChecker: { [weak self] in
|
|
36
|
+
self?.doesUserGaveAudioRecordingPermission() ?? false
|
|
37
|
+
}
|
|
38
|
+
)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/// Returns whether the device can record audio.
|
|
42
|
+
@objc func canDeviceVoiceRecord(_ call: CAPPluginCall) {
|
|
43
|
+
let canRecord = service?.canDeviceVoiceRecord() ?? false
|
|
44
|
+
call.resolve(ResponseGenerator.fromBoolean(canRecord))
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/// Requests microphone permission from the user.
|
|
48
|
+
@objc func requestAudioRecordingPermission(_ call: CAPPluginCall) {
|
|
49
|
+
AVAudioSession.sharedInstance().requestRecordPermission { granted in
|
|
50
|
+
if granted {
|
|
51
|
+
call.resolve(ResponseGenerator.successResponse())
|
|
52
|
+
} else {
|
|
53
|
+
call.resolve(ResponseGenerator.failResponse())
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/// Returns whether the app has microphone permission.
|
|
59
|
+
@objc func hasAudioRecordingPermission(_ call: CAPPluginCall) {
|
|
60
|
+
let hasPermission = service?.hasAudioRecordingPermission() ?? false
|
|
61
|
+
call.resolve(ResponseGenerator.fromBoolean(hasPermission))
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/// Starts a recording session with optional file output.
|
|
65
|
+
@objc func startRecording(_ call: CAPPluginCall) {
|
|
66
|
+
guard let service = service else {
|
|
67
|
+
call.reject(Messages.FAILED_TO_RECORD, ErrorCodes.failedToRecord)
|
|
68
|
+
return
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
let directory: String? = call.getString("directory")
|
|
72
|
+
let subDirectory: String? = call.getString("subDirectory")
|
|
73
|
+
let recordOptions = RecordOptions(directory: directory, subDirectory: subDirectory)
|
|
74
|
+
do {
|
|
75
|
+
try service.startRecording(
|
|
76
|
+
options: recordOptions,
|
|
77
|
+
onInterruptionBegan: { [weak self] in
|
|
78
|
+
self?.notifyListeners("voiceRecordingInterrupted", data: [:])
|
|
79
|
+
},
|
|
80
|
+
onInterruptionEnded: { [weak self] in
|
|
81
|
+
self?.notifyListeners("voiceRecordingInterruptionEnded", data: [:])
|
|
82
|
+
}
|
|
83
|
+
)
|
|
84
|
+
call.resolve(ResponseGenerator.successResponse())
|
|
85
|
+
} catch let error as VoiceRecorderServiceError {
|
|
86
|
+
call.reject(toLegacyMessage(error.code), error.code, error.underlyingError ?? error)
|
|
87
|
+
} catch {
|
|
88
|
+
call.reject(Messages.FAILED_TO_RECORD, ErrorCodes.failedToRecord, error)
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/// Stops recording and returns the audio payload.
|
|
93
|
+
@objc func stopRecording(_ call: CAPPluginCall) {
|
|
94
|
+
guard let service = service else {
|
|
95
|
+
call.reject(Messages.FAILED_TO_FETCH_RECORDING, ErrorCodes.failedToFetchRecording)
|
|
96
|
+
return
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
service.stopRecording { [weak self] result in
|
|
100
|
+
DispatchQueue.main.async {
|
|
101
|
+
guard let self = self else {
|
|
102
|
+
call.reject(Messages.FAILED_TO_FETCH_RECORDING, ErrorCodes.failedToFetchRecording)
|
|
103
|
+
return
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
switch result {
|
|
107
|
+
case .success(let recordData):
|
|
108
|
+
let payload: Dictionary<String, Any>
|
|
109
|
+
if self.responseFormat == .normalized {
|
|
110
|
+
payload = RecordDataMapper.toNormalizedDictionary(recordData)
|
|
111
|
+
} else {
|
|
112
|
+
payload = RecordDataMapper.toLegacyDictionary(recordData)
|
|
113
|
+
}
|
|
114
|
+
call.resolve(ResponseGenerator.dataResponse(payload))
|
|
115
|
+
case .failure(let error):
|
|
116
|
+
call.reject(self.toLegacyMessage(error.code), error.code, error.underlyingError ?? error)
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/// Pauses a recording session if supported.
|
|
123
|
+
@objc func pauseRecording(_ call: CAPPluginCall) {
|
|
124
|
+
guard let service = service else {
|
|
125
|
+
call.reject(Messages.RECORDING_HAS_NOT_STARTED, ErrorCodes.recordingHasNotStarted)
|
|
126
|
+
return
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
do {
|
|
130
|
+
call.resolve(ResponseGenerator.fromBoolean(try service.pauseRecording()))
|
|
131
|
+
} catch let error as VoiceRecorderServiceError {
|
|
132
|
+
call.reject(toLegacyMessage(error.code), error.code, error.underlyingError ?? error)
|
|
133
|
+
} catch {
|
|
134
|
+
call.reject(Messages.FAILED_TO_RECORD, ErrorCodes.failedToRecord, error)
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/// Resumes a paused recording session if supported.
|
|
139
|
+
@objc func resumeRecording(_ call: CAPPluginCall) {
|
|
140
|
+
guard let service = service else {
|
|
141
|
+
call.reject(Messages.RECORDING_HAS_NOT_STARTED, ErrorCodes.recordingHasNotStarted)
|
|
142
|
+
return
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
do {
|
|
146
|
+
call.resolve(ResponseGenerator.fromBoolean(try service.resumeRecording()))
|
|
147
|
+
} catch let error as VoiceRecorderServiceError {
|
|
148
|
+
call.reject(toLegacyMessage(error.code), error.code, error.underlyingError ?? error)
|
|
149
|
+
} catch {
|
|
150
|
+
call.reject(Messages.FAILED_TO_RECORD, ErrorCodes.failedToRecord, error)
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/// Returns the current recording status.
|
|
155
|
+
@objc func getCurrentStatus(_ call: CAPPluginCall) {
|
|
156
|
+
let status = service?.getCurrentStatus() ?? .NONE
|
|
157
|
+
call.resolve(ResponseGenerator.statusResponse(status))
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/// Returns whether AVAudioSession reports granted permission.
|
|
161
|
+
func doesUserGaveAudioRecordingPermission() -> Bool {
|
|
162
|
+
return AVAudioSession.sharedInstance().recordPermission == AVAudioSession.RecordPermission.granted
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/// Maps canonical error codes back to legacy messages.
|
|
166
|
+
private func toLegacyMessage(_ canonicalCode: String) -> String {
|
|
167
|
+
if canonicalCode == ErrorCodes.deviceCannotVoiceRecord {
|
|
168
|
+
return Messages.CANNOT_RECORD_ON_THIS_PHONE
|
|
169
|
+
}
|
|
170
|
+
return canonicalCode
|
|
171
|
+
}
|
|
172
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
|
|
3
|
+
/// Canonical error codes returned by the plugin.
|
|
4
|
+
struct ErrorCodes {
|
|
5
|
+
static let missingPermission = "MISSING_PERMISSION"
|
|
6
|
+
static let alreadyRecording = "ALREADY_RECORDING"
|
|
7
|
+
static let microphoneBeingUsed = "MICROPHONE_BEING_USED"
|
|
8
|
+
static let deviceCannotVoiceRecord = "DEVICE_CANNOT_VOICE_RECORD"
|
|
9
|
+
static let failedToRecord = "FAILED_TO_RECORD"
|
|
10
|
+
static let emptyRecording = "EMPTY_RECORDING"
|
|
11
|
+
static let recordingHasNotStarted = "RECORDING_HAS_NOT_STARTED"
|
|
12
|
+
static let failedToFetchRecording = "FAILED_TO_FETCH_RECORDING"
|
|
13
|
+
static let failedToMergeRecording = "FAILED_TO_MERGE_RECORDING"
|
|
14
|
+
static let notSupportedOsVersion = "NOT_SUPPORTED_OS_VERSION"
|
|
15
|
+
static let couldNotQueryPermissionStatus = "COULD_NOT_QUERY_PERMISSION_STATUS"
|
|
16
|
+
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import Foundation
|
|
2
2
|
|
|
3
|
+
/// Legacy error messages preserved for backward compatibility.
|
|
3
4
|
struct Messages {
|
|
4
5
|
|
|
5
6
|
static let MISSING_PERMISSION = "MISSING_PERMISSION"
|
|
@@ -7,6 +8,7 @@ struct Messages {
|
|
|
7
8
|
static let FAILED_TO_RECORD = "FAILED_TO_RECORD"
|
|
8
9
|
static let RECORDING_HAS_NOT_STARTED = "RECORDING_HAS_NOT_STARTED"
|
|
9
10
|
static let FAILED_TO_FETCH_RECORDING = "FAILED_TO_FETCH_RECORDING"
|
|
11
|
+
static let FAILED_TO_MERGE_RECORDING = "FAILED_TO_MERGE_RECORDING"
|
|
10
12
|
static let EMPTY_RECORDING = "EMPTY_RECORDING"
|
|
11
13
|
static let ALREADY_RECORDING = "ALREADY_RECORDING"
|
|
12
14
|
static let MICROPHONE_BEING_USED = "MICROPHONE_BEING_USED"
|
|
@@ -1,12 +1,18 @@
|
|
|
1
1
|
import Foundation
|
|
2
2
|
|
|
3
|
+
/// Recording payload returned to the bridge layer.
|
|
3
4
|
struct RecordData {
|
|
4
5
|
|
|
6
|
+
/// Base64-encoded recording data (legacy payloads).
|
|
5
7
|
public let recordDataBase64: String?
|
|
8
|
+
/// MIME type of the recorded audio.
|
|
6
9
|
public let mimeType: String
|
|
10
|
+
/// Recording duration in milliseconds.
|
|
7
11
|
public let msDuration: Int
|
|
12
|
+
/// File path or URI to the recorded audio.
|
|
8
13
|
public let uri: String?
|
|
9
14
|
|
|
15
|
+
/// Serializes record data into the legacy payload shape.
|
|
10
16
|
public func toDictionary() -> Dictionary<String, Any> {
|
|
11
17
|
return [
|
|
12
18
|
"recordDataBase64": recordDataBase64 ?? "",
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
|
|
3
|
+
/// Optional output configuration for recordings.
|
|
4
|
+
struct RecordOptions {
|
|
5
|
+
|
|
6
|
+
/// Directory name provided by the caller.
|
|
7
|
+
public let directory: String?
|
|
8
|
+
/// Subdirectory name provided by the caller.
|
|
9
|
+
public let subDirectory: String?
|
|
10
|
+
|
|
11
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import Capacitor
|
|
3
|
+
|
|
4
|
+
/// Supported response payload shapes.
|
|
5
|
+
enum ResponseFormat: String {
|
|
6
|
+
case legacy
|
|
7
|
+
case normalized
|
|
8
|
+
|
|
9
|
+
/// Converts a raw config value into a response format.
|
|
10
|
+
static func from(value: String?) -> ResponseFormat {
|
|
11
|
+
guard let value = value?.lowercased(), value == "normalized" else {
|
|
12
|
+
return .legacy
|
|
13
|
+
}
|
|
14
|
+
return .normalized
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/// Reads the response format from plugin configuration.
|
|
18
|
+
init(config: PluginConfig) {
|
|
19
|
+
let value = config.getString("responseFormat", "legacy") ?? "legacy"
|
|
20
|
+
self = ResponseFormat.from(value: value)
|
|
21
|
+
}
|
|
22
|
+
}
|
|
@@ -1,26 +1,32 @@
|
|
|
1
1
|
import Foundation
|
|
2
2
|
|
|
3
|
+
/// Helper for building JS payloads in the legacy response shape.
|
|
3
4
|
struct ResponseGenerator {
|
|
4
5
|
|
|
5
6
|
private static let VALUE_RESPONSE_KEY = "value"
|
|
6
7
|
private static let STATUS_RESPONSE_KEY = "status"
|
|
7
8
|
|
|
9
|
+
/// Wraps a boolean value into the response shape.
|
|
8
10
|
public static func fromBoolean(_ value: Bool) -> Dictionary<String, Bool> {
|
|
9
11
|
return value ? successResponse() : failResponse()
|
|
10
12
|
}
|
|
11
13
|
|
|
14
|
+
/// Returns a success response with value=true.
|
|
12
15
|
public static func successResponse() -> Dictionary<String, Bool> {
|
|
13
16
|
return [VALUE_RESPONSE_KEY: true]
|
|
14
17
|
}
|
|
15
18
|
|
|
19
|
+
/// Returns a failure response with value=false.
|
|
16
20
|
public static func failResponse() -> Dictionary<String, Bool> {
|
|
17
21
|
return [VALUE_RESPONSE_KEY: false]
|
|
18
22
|
}
|
|
19
23
|
|
|
24
|
+
/// Wraps arbitrary data into the response shape.
|
|
20
25
|
public static func dataResponse(_ data: Any) -> Dictionary<String, Any> {
|
|
21
26
|
return [VALUE_RESPONSE_KEY: data]
|
|
22
27
|
}
|
|
23
28
|
|
|
29
|
+
/// Wraps the recording status into the response shape.
|
|
24
30
|
public static func statusResponse(_ data: CurrentRecordingStatus) -> Dictionary<String, String> {
|
|
25
31
|
return [STATUS_RESPONSE_KEY: data.rawValue]
|
|
26
32
|
}
|
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import AVFoundation
|
|
3
|
+
|
|
4
|
+
/// AVAudioRecorder wrapper that supports interruptions and segment merging.
|
|
5
|
+
class CustomMediaRecorder: RecorderAdapter {
|
|
6
|
+
|
|
7
|
+
/// Options provided by the service layer.
|
|
8
|
+
public var options: RecordOptions?
|
|
9
|
+
/// Active audio session for recording.
|
|
10
|
+
private var recordingSession: AVAudioSession!
|
|
11
|
+
/// Active recorder instance for the current segment.
|
|
12
|
+
private var audioRecorder: AVAudioRecorder!
|
|
13
|
+
/// Base file path for the merged recording.
|
|
14
|
+
private var baseAudioFilePath: URL!
|
|
15
|
+
/// List of segment files created during interruptions.
|
|
16
|
+
private var audioFileSegments: [URL] = []
|
|
17
|
+
/// Audio session category before recording starts.
|
|
18
|
+
private var originalRecordingSessionCategory: AVAudioSession.Category!
|
|
19
|
+
/// Current recording status.
|
|
20
|
+
private var status = CurrentRecordingStatus.NONE
|
|
21
|
+
/// Notification observer for audio interruptions.
|
|
22
|
+
private var interruptionObserver: NSObjectProtocol?
|
|
23
|
+
/// Callback invoked when interruptions begin.
|
|
24
|
+
var onInterruptionBegan: (() -> Void)?
|
|
25
|
+
/// Callback invoked when interruptions end.
|
|
26
|
+
var onInterruptionEnded: (() -> Void)?
|
|
27
|
+
|
|
28
|
+
/// Recorder settings used for all segments.
|
|
29
|
+
private let settings = [
|
|
30
|
+
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
|
|
31
|
+
AVSampleRateKey: 44100,
|
|
32
|
+
AVNumberOfChannelsKey: 1,
|
|
33
|
+
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
/// Resolves the directory where audio files should be saved.
|
|
37
|
+
private func getDirectoryToSaveAudioFile() -> URL {
|
|
38
|
+
if options?.directory != nil,
|
|
39
|
+
let directory = getDirectory(directory: options?.directory),
|
|
40
|
+
var outputDirURL = FileManager.default.urls(for: directory, in: .userDomainMask).first {
|
|
41
|
+
if let subDirectory = options?.subDirectory?.trimmingCharacters(in: CharacterSet(charactersIn: "/")) {
|
|
42
|
+
outputDirURL = outputDirURL.appendingPathComponent(subDirectory, isDirectory: true)
|
|
43
|
+
|
|
44
|
+
do {
|
|
45
|
+
if !FileManager.default.fileExists(atPath: outputDirURL.path) {
|
|
46
|
+
try FileManager.default.createDirectory(at: outputDirURL, withIntermediateDirectories: true)
|
|
47
|
+
}
|
|
48
|
+
} catch {
|
|
49
|
+
print("Error creating directory: \(error)")
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return outputDirURL
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/// Starts recording audio and prepares the session.
|
|
60
|
+
public func startRecording(recordOptions: RecordOptions?) -> Bool {
|
|
61
|
+
do {
|
|
62
|
+
options = recordOptions
|
|
63
|
+
recordingSession = AVAudioSession.sharedInstance()
|
|
64
|
+
originalRecordingSessionCategory = recordingSession.category
|
|
65
|
+
try recordingSession.setCategory(AVAudioSession.Category.playAndRecord)
|
|
66
|
+
try recordingSession.setActive(true)
|
|
67
|
+
baseAudioFilePath = getDirectoryToSaveAudioFile().appendingPathComponent("recording-\(Int(Date().timeIntervalSince1970 * 1000)).aac")
|
|
68
|
+
audioFileSegments = [baseAudioFilePath]
|
|
69
|
+
audioRecorder = try AVAudioRecorder(url: baseAudioFilePath, settings: settings)
|
|
70
|
+
setupInterruptionHandling()
|
|
71
|
+
audioRecorder.record()
|
|
72
|
+
status = CurrentRecordingStatus.RECORDING
|
|
73
|
+
return true
|
|
74
|
+
} catch {
|
|
75
|
+
return false
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/// Stops recording and merges segments if needed.
|
|
80
|
+
public func stopRecording(completion: @escaping (Bool) -> Void) {
|
|
81
|
+
removeInterruptionHandling()
|
|
82
|
+
audioRecorder.stop()
|
|
83
|
+
|
|
84
|
+
let finalizeStop: (Bool) -> Void = { [weak self] success in
|
|
85
|
+
guard let self = self else {
|
|
86
|
+
completion(false)
|
|
87
|
+
return
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
do {
|
|
91
|
+
try self.recordingSession.setActive(false)
|
|
92
|
+
try self.recordingSession.setCategory(self.originalRecordingSessionCategory)
|
|
93
|
+
} catch {
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
self.originalRecordingSessionCategory = nil
|
|
97
|
+
self.audioRecorder = nil
|
|
98
|
+
self.recordingSession = nil
|
|
99
|
+
self.status = CurrentRecordingStatus.NONE
|
|
100
|
+
completion(success)
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if audioFileSegments.count > 1 {
|
|
104
|
+
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
|
|
105
|
+
guard let self = self else {
|
|
106
|
+
completion(false)
|
|
107
|
+
return
|
|
108
|
+
}
|
|
109
|
+
self.mergeAudioSegments { success in
|
|
110
|
+
finalizeStop(success)
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
} else {
|
|
114
|
+
finalizeStop(true)
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/// Returns the output file for the recording.
|
|
119
|
+
public func getOutputFile() -> URL {
|
|
120
|
+
return baseAudioFilePath
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
/// Maps directory strings to FileManager search paths.
|
|
124
|
+
public func getDirectory(directory: String?) -> FileManager.SearchPathDirectory? {
|
|
125
|
+
if let directory = directory {
|
|
126
|
+
switch directory {
|
|
127
|
+
case "CACHE":
|
|
128
|
+
return .cachesDirectory
|
|
129
|
+
case "LIBRARY":
|
|
130
|
+
return .libraryDirectory
|
|
131
|
+
default:
|
|
132
|
+
return .documentDirectory
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
return nil
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/// Pauses recording when currently active.
|
|
139
|
+
public func pauseRecording() -> Bool {
|
|
140
|
+
if(status == CurrentRecordingStatus.RECORDING) {
|
|
141
|
+
audioRecorder.pause()
|
|
142
|
+
status = CurrentRecordingStatus.PAUSED
|
|
143
|
+
return true
|
|
144
|
+
} else {
|
|
145
|
+
return false
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/// Resumes recording after pause or interruption.
|
|
150
|
+
public func resumeRecording() -> Bool {
|
|
151
|
+
if(status == CurrentRecordingStatus.PAUSED || status == CurrentRecordingStatus.INTERRUPTED) {
|
|
152
|
+
let wasInterrupted = status == CurrentRecordingStatus.INTERRUPTED
|
|
153
|
+
do {
|
|
154
|
+
try recordingSession.setActive(true)
|
|
155
|
+
if status == CurrentRecordingStatus.INTERRUPTED {
|
|
156
|
+
let directory = getDirectoryToSaveAudioFile()
|
|
157
|
+
let timestamp = Int(Date().timeIntervalSince1970 * 1000)
|
|
158
|
+
let segmentNumber = audioFileSegments.count
|
|
159
|
+
let segmentPath = directory.appendingPathComponent("recording-\(timestamp)-segment-\(segmentNumber).aac")
|
|
160
|
+
audioRecorder = try AVAudioRecorder(url: segmentPath, settings: settings)
|
|
161
|
+
audioFileSegments.append(segmentPath)
|
|
162
|
+
}
|
|
163
|
+
audioRecorder.record()
|
|
164
|
+
status = CurrentRecordingStatus.RECORDING
|
|
165
|
+
return true
|
|
166
|
+
} catch {
|
|
167
|
+
if wasInterrupted {
|
|
168
|
+
try? recordingSession.setActive(false)
|
|
169
|
+
}
|
|
170
|
+
return false
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
return false
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/// Returns the current recording status.
|
|
178
|
+
public func getCurrentStatus() -> CurrentRecordingStatus {
|
|
179
|
+
return status
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/// Registers for interruption notifications.
|
|
183
|
+
private func setupInterruptionHandling() {
|
|
184
|
+
interruptionObserver = NotificationCenter.default.addObserver(
|
|
185
|
+
forName: AVAudioSession.interruptionNotification,
|
|
186
|
+
object: AVAudioSession.sharedInstance(),
|
|
187
|
+
queue: .main
|
|
188
|
+
) { [weak self] notification in
|
|
189
|
+
self?.handleInterruption(notification: notification)
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
/// Removes interruption observers.
|
|
194
|
+
private func removeInterruptionHandling() {
|
|
195
|
+
if let observer = interruptionObserver {
|
|
196
|
+
NotificationCenter.default.removeObserver(observer)
|
|
197
|
+
interruptionObserver = nil
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
/// Handles audio session interruptions.
|
|
202
|
+
private func handleInterruption(notification: Notification) {
|
|
203
|
+
guard let userInfo = notification.userInfo,
|
|
204
|
+
let interruptionTypeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
|
|
205
|
+
let interruptionType = AVAudioSession.InterruptionType(rawValue: interruptionTypeValue) else {
|
|
206
|
+
return
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
switch interruptionType {
|
|
210
|
+
case .began:
|
|
211
|
+
if status == CurrentRecordingStatus.RECORDING {
|
|
212
|
+
audioRecorder.stop()
|
|
213
|
+
status = CurrentRecordingStatus.INTERRUPTED
|
|
214
|
+
onInterruptionBegan?()
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
case .ended:
|
|
218
|
+
if status == CurrentRecordingStatus.INTERRUPTED {
|
|
219
|
+
onInterruptionEnded?()
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
@unknown default:
|
|
223
|
+
break
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
/// Merges recorded segments into a single file when interruptions occur.
|
|
228
|
+
private func mergeAudioSegments(completion: @escaping (Bool) -> Void) {
|
|
229
|
+
if audioFileSegments.count <= 1 {
|
|
230
|
+
completion(true)
|
|
231
|
+
return
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
let basePathWithoutExtension = baseAudioFilePath.deletingPathExtension()
|
|
235
|
+
let mergedFilePath = basePathWithoutExtension.appendingPathExtension("m4a")
|
|
236
|
+
let segmentURLs = audioFileSegments
|
|
237
|
+
let keys = ["tracks", "duration"]
|
|
238
|
+
let dispatchGroup = DispatchGroup()
|
|
239
|
+
let syncQueue = DispatchQueue(label: "CustomMediaRecorder.assetSyncQueue")
|
|
240
|
+
var loadedAssets = Array<AVURLAsset?>(repeating: nil, count: segmentURLs.count)
|
|
241
|
+
var loadFailed = false
|
|
242
|
+
|
|
243
|
+
for (index, segmentURL) in segmentURLs.enumerated() {
|
|
244
|
+
let asset = AVURLAsset(url: segmentURL)
|
|
245
|
+
dispatchGroup.enter()
|
|
246
|
+
asset.loadValuesAsynchronously(forKeys: keys) {
|
|
247
|
+
var assetIsValid = true
|
|
248
|
+
for key in keys {
|
|
249
|
+
var error: NSError?
|
|
250
|
+
if asset.statusOfValue(forKey: key, error: &error) != .loaded {
|
|
251
|
+
assetIsValid = false
|
|
252
|
+
break
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
syncQueue.async {
|
|
256
|
+
if assetIsValid {
|
|
257
|
+
loadedAssets[index] = asset
|
|
258
|
+
} else {
|
|
259
|
+
loadFailed = true
|
|
260
|
+
}
|
|
261
|
+
dispatchGroup.leave()
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
dispatchGroup.notify(queue: DispatchQueue.global(qos: .userInitiated)) { [weak self] in
|
|
267
|
+
guard let self = self else {
|
|
268
|
+
completion(false)
|
|
269
|
+
return
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
var assets: [AVURLAsset] = []
|
|
273
|
+
var didFail = false
|
|
274
|
+
syncQueue.sync {
|
|
275
|
+
if loadFailed || loadedAssets.contains(where: { $0 == nil }) {
|
|
276
|
+
didFail = true
|
|
277
|
+
} else {
|
|
278
|
+
assets = loadedAssets.compactMap { $0 }
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
if didFail || assets.count != segmentURLs.count {
|
|
283
|
+
completion(false)
|
|
284
|
+
return
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
let composition = AVMutableComposition()
|
|
288
|
+
guard let compositionAudioTrack = composition.addMutableTrack(
|
|
289
|
+
withMediaType: .audio,
|
|
290
|
+
preferredTrackID: kCMPersistentTrackID_Invalid
|
|
291
|
+
) else {
|
|
292
|
+
completion(false)
|
|
293
|
+
return
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
var insertTime = CMTime.zero
|
|
297
|
+
|
|
298
|
+
for asset in assets {
|
|
299
|
+
guard let assetTrack = asset.tracks(withMediaType: .audio).first else {
|
|
300
|
+
completion(false)
|
|
301
|
+
return
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
do {
|
|
305
|
+
let timeRange = CMTimeRange(start: .zero, duration: asset.duration)
|
|
306
|
+
try compositionAudioTrack.insertTimeRange(timeRange, of: assetTrack, at: insertTime)
|
|
307
|
+
insertTime = CMTimeAdd(insertTime, asset.duration)
|
|
308
|
+
} catch {
|
|
309
|
+
completion(false)
|
|
310
|
+
return
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) else {
|
|
315
|
+
completion(false)
|
|
316
|
+
return
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
let tempDirectory = self.getDirectoryToSaveAudioFile()
|
|
320
|
+
let tempPath = tempDirectory.appendingPathComponent("temp-merged-\(Int(Date().timeIntervalSince1970 * 1000)).m4a")
|
|
321
|
+
|
|
322
|
+
exportSession.outputURL = tempPath
|
|
323
|
+
exportSession.outputFileType = .m4a
|
|
324
|
+
|
|
325
|
+
exportSession.exportAsynchronously {
|
|
326
|
+
guard exportSession.status == .completed else {
|
|
327
|
+
completion(false)
|
|
328
|
+
return
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
if !FileManager.default.fileExists(atPath: tempPath.path) {
|
|
332
|
+
completion(false)
|
|
333
|
+
return
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
do {
|
|
337
|
+
if FileManager.default.fileExists(atPath: mergedFilePath.path) {
|
|
338
|
+
try FileManager.default.removeItem(at: mergedFilePath)
|
|
339
|
+
}
|
|
340
|
+
try FileManager.default.moveItem(at: tempPath, to: mergedFilePath)
|
|
341
|
+
|
|
342
|
+
for segmentURL in self.audioFileSegments {
|
|
343
|
+
if segmentURL != mergedFilePath && FileManager.default.fileExists(atPath: segmentURL.path) {
|
|
344
|
+
try? FileManager.default.removeItem(at: segmentURL)
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
self.baseAudioFilePath = mergedFilePath
|
|
348
|
+
completion(true)
|
|
349
|
+
} catch {
|
|
350
|
+
if FileManager.default.fileExists(atPath: tempPath.path) {
|
|
351
|
+
try? FileManager.default.removeItem(at: tempPath)
|
|
352
|
+
}
|
|
353
|
+
completion(false)
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
}
|