react-native-audio-api 0.5.5 → 0.6.0-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/RNAudioAPI.podspec +1 -1
- package/android/src/main/cpp/audioapi/android/core/AudioPlayer.cpp +0 -20
- package/android/src/main/cpp/audioapi/android/core/AudioPlayer.h +0 -2
- package/android/src/main/java/com/swmansion/audioapi/AudioAPIPackage.kt +13 -0
- package/android/src/main/java/com/swmansion/audioapi/AudioManagerModule.kt +59 -0
- package/android/src/oldarch/NativeAudioManagerModuleSpec.java +99 -0
- package/common/cpp/audioapi/AudioAPIModuleInstaller.h +30 -6
- package/common/cpp/audioapi/HostObjects/OfflineAudioContextHostObject.h +70 -0
- package/common/cpp/audioapi/core/AudioContext.cpp +1 -12
- package/common/cpp/audioapi/core/AudioContext.h +0 -1
- package/common/cpp/audioapi/core/OfflineAudioContext.cpp +117 -0
- package/common/cpp/audioapi/core/OfflineAudioContext.h +40 -0
- package/common/cpp/audioapi/core/sources/AudioBufferSourceNode.cpp +3 -3
- package/common/cpp/audioapi/core/sources/AudioScheduledSourceNode.cpp +28 -2
- package/common/cpp/audioapi/core/utils/AudioNodeDestructor.cpp +53 -0
- package/common/cpp/audioapi/core/utils/AudioNodeDestructor.h +33 -0
- package/common/cpp/audioapi/core/utils/AudioNodeManager.cpp +13 -10
- package/common/cpp/audioapi/core/utils/AudioNodeManager.h +3 -0
- package/common/cpp/audioapi/libs/signalsmith-stretch/fft-accelerate.h +326 -0
- package/common/cpp/audioapi/libs/signalsmith-stretch/fft.h +1257 -413
- package/common/cpp/audioapi/libs/signalsmith-stretch/signalsmith-stretch.h +398 -232
- package/common/cpp/audioapi/libs/signalsmith-stretch/stft.h +625 -0
- package/ios/audioapi/ios/AudioAPIModule.mm +2 -3
- package/ios/audioapi/ios/AudioManagerModule.h +18 -0
- package/ios/audioapi/ios/AudioManagerModule.mm +92 -0
- package/ios/audioapi/ios/core/AudioPlayer.h +4 -12
- package/ios/audioapi/ios/core/AudioPlayer.m +26 -108
- package/ios/audioapi/ios/core/IOSAudioPlayer.h +1 -3
- package/ios/audioapi/ios/core/IOSAudioPlayer.mm +4 -28
- package/ios/audioapi/ios/system/AudioEngine.h +23 -0
- package/ios/audioapi/ios/system/AudioEngine.mm +137 -0
- package/ios/audioapi/ios/system/AudioSessionManager.h +22 -0
- package/ios/audioapi/ios/system/AudioSessionManager.mm +183 -0
- package/ios/audioapi/ios/system/LockScreenManager.h +23 -0
- package/ios/audioapi/ios/system/LockScreenManager.mm +299 -0
- package/ios/audioapi/ios/system/NotificationManager.h +16 -0
- package/ios/audioapi/ios/system/NotificationManager.mm +151 -0
- package/lib/module/api.js +3 -1
- package/lib/module/api.js.map +1 -1
- package/lib/module/api.web.js +1 -0
- package/lib/module/api.web.js.map +1 -1
- package/lib/module/core/AudioContext.js +2 -1
- package/lib/module/core/AudioContext.js.map +1 -1
- package/lib/module/core/OfflineAudioContext.js +57 -0
- package/lib/module/core/OfflineAudioContext.js.map +1 -0
- package/lib/module/specs/NativeAudioManagerModule.js +31 -0
- package/lib/module/specs/NativeAudioManagerModule.js.map +1 -0
- package/lib/module/specs/index.js +6 -0
- package/lib/module/specs/index.js.map +1 -0
- package/lib/module/system/AudioManager.js +66 -0
- package/lib/module/system/AudioManager.js.map +1 -0
- package/lib/module/system/index.js +4 -0
- package/lib/module/system/index.js.map +1 -0
- package/lib/module/system/types.js +2 -0
- package/lib/module/system/types.js.map +1 -0
- package/lib/module/web-core/OfflineAudioContext.js +90 -0
- package/lib/module/web-core/OfflineAudioContext.js.map +1 -0
- package/lib/typescript/api.d.ts +4 -1
- package/lib/typescript/api.d.ts.map +1 -1
- package/lib/typescript/api.web.d.ts +1 -0
- package/lib/typescript/api.web.d.ts.map +1 -1
- package/lib/typescript/core/AudioContext.d.ts.map +1 -1
- package/lib/typescript/core/OfflineAudioContext.d.ts +14 -0
- package/lib/typescript/core/OfflineAudioContext.d.ts.map +1 -0
- package/lib/typescript/interfaces.d.ts +6 -0
- package/lib/typescript/interfaces.d.ts.map +1 -1
- package/lib/typescript/specs/NativeAudioManagerModule.d.ts +13 -0
- package/lib/typescript/specs/NativeAudioManagerModule.d.ts.map +1 -0
- package/lib/typescript/specs/index.d.ts +4 -0
- package/lib/typescript/specs/index.d.ts.map +1 -0
- package/lib/typescript/system/AudioManager.d.ts +12 -0
- package/lib/typescript/system/AudioManager.d.ts.map +1 -0
- package/lib/typescript/system/index.d.ts +2 -0
- package/lib/typescript/system/index.d.ts.map +1 -0
- package/lib/typescript/system/types.d.ts +28 -0
- package/lib/typescript/system/types.d.ts.map +1 -0
- package/lib/typescript/types.d.ts +5 -0
- package/lib/typescript/types.d.ts.map +1 -1
- package/lib/typescript/web-core/OfflineAudioContext.d.ts +34 -0
- package/lib/typescript/web-core/OfflineAudioContext.d.ts.map +1 -0
- package/package.json +2 -2
- package/src/api.ts +12 -2
- package/src/api.web.ts +1 -0
- package/src/core/AudioContext.ts +6 -1
- package/src/core/OfflineAudioContext.ts +94 -0
- package/src/interfaces.ts +11 -0
- package/src/specs/NativeAudioManagerModule.ts +51 -0
- package/src/specs/index.ts +6 -0
- package/src/system/AudioManager.ts +122 -0
- package/src/system/index.ts +1 -0
- package/src/system/types.ts +68 -0
- package/src/types.ts +6 -0
- package/src/web-core/OfflineAudioContext.tsx +163 -0
- package/common/cpp/audioapi/libs/signalsmith-stretch/delay.h +0 -715
- package/common/cpp/audioapi/libs/signalsmith-stretch/perf.h +0 -82
- package/common/cpp/audioapi/libs/signalsmith-stretch/spectral.h +0 -493
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { ContextState, PeriodicWaveConstraints, OfflineAudioContextOptions, AudioBufferSourceNodeOptions } from '../types';
|
|
2
|
+
import BaseAudioContext from './BaseAudioContext';
|
|
3
|
+
import AnalyserNode from './AnalyserNode';
|
|
4
|
+
import AudioDestinationNode from './AudioDestinationNode';
|
|
5
|
+
import AudioBuffer from './AudioBuffer';
|
|
6
|
+
import AudioBufferSourceNode from './AudioBufferSourceNode';
|
|
7
|
+
import BiquadFilterNode from './BiquadFilterNode';
|
|
8
|
+
import GainNode from './GainNode';
|
|
9
|
+
import OscillatorNode from './OscillatorNode';
|
|
10
|
+
import PeriodicWave from './PeriodicWave';
|
|
11
|
+
import StereoPannerNode from './StereoPannerNode';
|
|
12
|
+
export default class OfflineAudioContext implements BaseAudioContext {
|
|
13
|
+
readonly context: globalThis.OfflineAudioContext;
|
|
14
|
+
readonly destination: AudioDestinationNode;
|
|
15
|
+
readonly sampleRate: number;
|
|
16
|
+
constructor(options: OfflineAudioContextOptions);
|
|
17
|
+
constructor(numberOfChannels: number, length: number, sampleRate: number);
|
|
18
|
+
get currentTime(): number;
|
|
19
|
+
get state(): ContextState;
|
|
20
|
+
createOscillator(): OscillatorNode;
|
|
21
|
+
createGain(): GainNode;
|
|
22
|
+
createStereoPanner(): StereoPannerNode;
|
|
23
|
+
createBiquadFilter(): BiquadFilterNode;
|
|
24
|
+
createBufferSource(options?: AudioBufferSourceNodeOptions): Promise<AudioBufferSourceNode>;
|
|
25
|
+
createBuffer(numOfChannels: number, length: number, sampleRate: number): AudioBuffer;
|
|
26
|
+
createPeriodicWave(real: Float32Array, imag: Float32Array, constraints?: PeriodicWaveConstraints): PeriodicWave;
|
|
27
|
+
createAnalyser(): AnalyserNode;
|
|
28
|
+
decodeAudioDataSource(source: string): Promise<AudioBuffer>;
|
|
29
|
+
decodeAudioData(arrayBuffer: ArrayBuffer): Promise<AudioBuffer>;
|
|
30
|
+
startRendering(): Promise<AudioBuffer>;
|
|
31
|
+
resume(): Promise<void>;
|
|
32
|
+
suspend(suspendTime: number): Promise<void>;
|
|
33
|
+
}
|
|
34
|
+
//# sourceMappingURL=OfflineAudioContext.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OfflineAudioContext.d.ts","sourceRoot":"","sources":["../../../src/web-core/OfflineAudioContext.tsx"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,uBAAuB,EACvB,0BAA0B,EAC1B,4BAA4B,EAC7B,MAAM,UAAU,CAAC;AAElB,OAAO,gBAAgB,MAAM,oBAAoB,CAAC;AAClD,OAAO,YAAY,MAAM,gBAAgB,CAAC;AAC1C,OAAO,oBAAoB,MAAM,wBAAwB,CAAC;AAC1D,OAAO,WAAW,MAAM,eAAe,CAAC;AACxC,OAAO,qBAAqB,MAAM,yBAAyB,CAAC;AAC5D,OAAO,gBAAgB,MAAM,oBAAoB,CAAC;AAClD,OAAO,QAAQ,MAAM,YAAY,CAAC;AAClC,OAAO,cAAc,MAAM,kBAAkB,CAAC;AAC9C,OAAO,YAAY,MAAM,gBAAgB,CAAC;AAC1C,OAAO,gBAAgB,MAAM,oBAAoB,CAAC;AAIlD,MAAM,CAAC,OAAO,OAAO,mBAAoB,YAAW,gBAAgB;IAClE,QAAQ,CAAC,OAAO,EAAE,UAAU,CAAC,mBAAmB,CAAC;IAEjD,QAAQ,CAAC,WAAW,EAAE,oBAAoB,CAAC;IAC3C,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;gBAEhB,OAAO,EAAE,0BAA0B;gBACnC,gBAAgB,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM;IAsBxE,IAAW,WAAW,IAAI,MAAM,CAE/B;IAED,IAAW,KAAK,IAAI,YAAY,CAE/B;IAED,gBAAgB,IAAI,cAAc;IAIlC,UAAU,IAAI,QAAQ;IAItB,kBAAkB,IAAI,gBAAgB;IAItC,kBAAkB,IAAI,gBAAgB;IAIhC,kBAAkB,CACtB,OAAO,CAAC,EAAE,4BAA4B,GACrC,OAAO,CAAC,qBAAqB,CAAC;IAgBjC,YAAY,CACV,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,UAAU,EAAE,MAAM,GACjB,WAAW;IAwBd,kBAAkB,CAChB,IAAI,EAAE,YAAY,EAClB,IAAI,EAAE,YAAY,EAClB,WAAW,CAAC,EAAE,uBAAuB,GACpC,YAAY;IAYf,cAAc,IAAI,YAAY;IAIxB,qBAAqB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC;IAQ3D,eAAe,CAAC,WAAW,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAI/D,cAAc,IAAI,OAAO,CAAC,WAAW,CAAC;IAItC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;IAIvB,OAAO,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAGlD"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "react-native-audio-api",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.6.0-rc.0",
|
|
4
4
|
"description": "react-native-audio-api provides system for controlling audio in React Native environment compatible with Web Audio API specification",
|
|
5
5
|
"bin": {
|
|
6
6
|
"setup-rn-audio-api-web": "./scripts/setup-rn-audio-api-web.js"
|
|
@@ -48,7 +48,7 @@
|
|
|
48
48
|
"format:js": "prettier --write --list-different src",
|
|
49
49
|
"format:android:cpp": "find android/src/ -path android/src/main/cpp/audioapi/android/libs -iname \"*.h\" -o -iname \"*.cpp\" | xargs clang-format -i",
|
|
50
50
|
"format:android:kotlin": "ktlint -F 'android/src/main/java/**/*.kt'",
|
|
51
|
-
"format:ios": "find ios/ -iname \"*.h\" -o -iname \"*.m\" -o -iname \"*.mm\" -o -iname \"*.cpp\" | xargs clang-format -i",
|
|
51
|
+
"format:ios": "find ios/ \\( -iname \"*.h\" -o -iname \"*.m\" -o -iname \"*.mm\" -o -iname \"*.cpp\" \\) -not -path \"ios/audioapi/ios/*\" | xargs clang-format -i",
|
|
52
52
|
"format:common": "find common/cpp/ -path 'common/cpp/audioapi/libs' -prune -iname \"*.h\" -o -iname \"*.cpp\" | xargs clang-format -i",
|
|
53
53
|
"build": "bob build",
|
|
54
54
|
"prepack": "cp ../../README.md ./README.md",
|
package/src/api.ts
CHANGED
|
@@ -1,13 +1,21 @@
|
|
|
1
1
|
import NativeAudioAPIModule from './specs/NativeAudioAPIModule';
|
|
2
|
-
import type { IAudioContext } from './interfaces';
|
|
2
|
+
import type { IAudioContext, IOfflineAudioContext } from './interfaces';
|
|
3
3
|
|
|
4
4
|
/* eslint-disable no-var */
|
|
5
5
|
declare global {
|
|
6
6
|
var createAudioContext: (sampleRate?: number) => IAudioContext;
|
|
7
|
+
var createOfflineAudioContext: (
|
|
8
|
+
numberOfChannels: number,
|
|
9
|
+
length: number,
|
|
10
|
+
sampleRate: number
|
|
11
|
+
) => IOfflineAudioContext;
|
|
7
12
|
}
|
|
8
13
|
/* eslint-disable no-var */
|
|
9
14
|
|
|
10
|
-
if (
|
|
15
|
+
if (
|
|
16
|
+
global.createAudioContext == null ||
|
|
17
|
+
global.createOfflineAudioContext == null
|
|
18
|
+
) {
|
|
11
19
|
if (!NativeAudioAPIModule) {
|
|
12
20
|
throw new Error(
|
|
13
21
|
`Failed to install react-native-audio-api: The native module could not be found.`
|
|
@@ -20,6 +28,7 @@ if (global.createAudioContext == null) {
|
|
|
20
28
|
export { default as AudioBuffer } from './core/AudioBuffer';
|
|
21
29
|
export { default as AudioBufferSourceNode } from './core/AudioBufferSourceNode';
|
|
22
30
|
export { default as AudioContext } from './core/AudioContext';
|
|
31
|
+
export { default as OfflineAudioContext } from './core/OfflineAudioContext';
|
|
23
32
|
export { default as AudioDestinationNode } from './core/AudioDestinationNode';
|
|
24
33
|
export { default as AudioNode } from './core/AudioNode';
|
|
25
34
|
export { default as AnalyserNode } from './core/AnalyserNode';
|
|
@@ -30,6 +39,7 @@ export { default as BiquadFilterNode } from './core/BiquadFilterNode';
|
|
|
30
39
|
export { default as GainNode } from './core/GainNode';
|
|
31
40
|
export { default as OscillatorNode } from './core/OscillatorNode';
|
|
32
41
|
export { default as StereoPannerNode } from './core/StereoPannerNode';
|
|
42
|
+
export { default as AudioManager } from './system';
|
|
33
43
|
|
|
34
44
|
export {
|
|
35
45
|
OscillatorType,
|
package/src/api.web.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
export { default as AudioBuffer } from './web-core/AudioBuffer';
|
|
2
2
|
export { default as AudioBufferSourceNode } from './web-core/AudioBufferSourceNode';
|
|
3
3
|
export { default as AudioContext } from './web-core/AudioContext';
|
|
4
|
+
export { default as OfflineAudioContext } from './web-core/OfflineAudioContext';
|
|
4
5
|
export { default as AudioDestinationNode } from './web-core/AudioDestinationNode';
|
|
5
6
|
export { default as AudioNode } from './web-core/AudioNode';
|
|
6
7
|
export { default as AnalyserNode } from './web-core/AnalyserNode';
|
package/src/core/AudioContext.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { IAudioContext } from '../interfaces';
|
|
2
2
|
import BaseAudioContext from './BaseAudioContext';
|
|
3
|
+
import AudioManager from '../system';
|
|
3
4
|
import { AudioContextOptions } from '../types';
|
|
4
5
|
import { NotSupportedError } from '../errors';
|
|
5
6
|
|
|
@@ -11,7 +12,11 @@ export default class AudioContext extends BaseAudioContext {
|
|
|
11
12
|
);
|
|
12
13
|
}
|
|
13
14
|
|
|
14
|
-
super(
|
|
15
|
+
super(
|
|
16
|
+
global.createAudioContext(
|
|
17
|
+
options?.sampleRate || AudioManager.getDevicePreferredSampleRate()
|
|
18
|
+
)
|
|
19
|
+
);
|
|
15
20
|
}
|
|
16
21
|
|
|
17
22
|
async close(): Promise<undefined> {
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { IOfflineAudioContext } from '../interfaces';
|
|
2
|
+
import BaseAudioContext from './BaseAudioContext';
|
|
3
|
+
import { OfflineAudioContextOptions } from '../types';
|
|
4
|
+
import { InvalidStateError, NotSupportedError } from '../errors';
|
|
5
|
+
import AudioBuffer from './AudioBuffer';
|
|
6
|
+
|
|
7
|
+
export default class OfflineAudioContext extends BaseAudioContext {
|
|
8
|
+
private isSuspended: boolean;
|
|
9
|
+
private isRendering: boolean;
|
|
10
|
+
private duration: number;
|
|
11
|
+
|
|
12
|
+
constructor(options: OfflineAudioContextOptions);
|
|
13
|
+
constructor(numberOfChannels: number, length: number, sampleRate: number);
|
|
14
|
+
constructor(
|
|
15
|
+
arg0: OfflineAudioContextOptions | number,
|
|
16
|
+
arg1?: number,
|
|
17
|
+
arg2?: number
|
|
18
|
+
) {
|
|
19
|
+
if (typeof arg0 === 'object') {
|
|
20
|
+
const { numberOfChannels, length, sampleRate } = arg0;
|
|
21
|
+
super(
|
|
22
|
+
global.createOfflineAudioContext(numberOfChannels, length, sampleRate)
|
|
23
|
+
);
|
|
24
|
+
|
|
25
|
+
this.duration = length / sampleRate;
|
|
26
|
+
} else if (
|
|
27
|
+
typeof arg0 === 'number' &&
|
|
28
|
+
typeof arg1 === 'number' &&
|
|
29
|
+
typeof arg2 === 'number'
|
|
30
|
+
) {
|
|
31
|
+
super(global.createOfflineAudioContext(arg0, arg1, arg2));
|
|
32
|
+
this.duration = arg1 / arg2;
|
|
33
|
+
} else {
|
|
34
|
+
throw new NotSupportedError('Invalid constructor arguments');
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
this.isSuspended = false;
|
|
38
|
+
this.isRendering = false;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async resume(): Promise<undefined> {
|
|
42
|
+
if (!this.isRendering) {
|
|
43
|
+
throw new InvalidStateError(
|
|
44
|
+
'Cannot resume an OfflineAudioContext while rendering'
|
|
45
|
+
);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (!this.isSuspended) {
|
|
49
|
+
throw new InvalidStateError(
|
|
50
|
+
'Cannot resume an OfflineAudioContext that is not suspended'
|
|
51
|
+
);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
this.isSuspended = false;
|
|
55
|
+
|
|
56
|
+
await (this.context as IOfflineAudioContext).resume();
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async suspend(suspendTime: number): Promise<undefined> {
|
|
60
|
+
if (suspendTime < 0) {
|
|
61
|
+
throw new InvalidStateError('suspendTime must be a non-negative number');
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if (suspendTime < this.context.currentTime) {
|
|
65
|
+
throw new InvalidStateError(
|
|
66
|
+
`suspendTime must be greater than the current time: ${suspendTime}`
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (suspendTime > this.duration) {
|
|
71
|
+
throw new InvalidStateError(
|
|
72
|
+
`suspendTime must be less than the duration of the context: ${suspendTime}`
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
this.isSuspended = true;
|
|
77
|
+
|
|
78
|
+
await (this.context as IOfflineAudioContext).suspend(suspendTime);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async startRendering(): Promise<AudioBuffer> {
|
|
82
|
+
if (this.isRendering) {
|
|
83
|
+
throw new InvalidStateError('OfflineAudioContext is already rendering');
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
this.isRendering = true;
|
|
87
|
+
|
|
88
|
+
const audioBuffer = await (
|
|
89
|
+
this.context as IOfflineAudioContext
|
|
90
|
+
).startRendering();
|
|
91
|
+
|
|
92
|
+
return new AudioBuffer(audioBuffer);
|
|
93
|
+
}
|
|
94
|
+
}
|
package/src/interfaces.ts
CHANGED
|
@@ -9,6 +9,11 @@ import {
|
|
|
9
9
|
|
|
10
10
|
export interface AudioAPIInstaller {
|
|
11
11
|
createAudioContext: (sampleRate?: number) => IAudioContext;
|
|
12
|
+
createOfflineAudioContext: (
|
|
13
|
+
numberOfChannels: number,
|
|
14
|
+
length: number,
|
|
15
|
+
sampleRate: number
|
|
16
|
+
) => IAudioContext;
|
|
12
17
|
}
|
|
13
18
|
|
|
14
19
|
export interface IBaseAudioContext {
|
|
@@ -43,6 +48,12 @@ export interface IAudioContext extends IBaseAudioContext {
|
|
|
43
48
|
suspend(): Promise<void>;
|
|
44
49
|
}
|
|
45
50
|
|
|
51
|
+
export interface IOfflineAudioContext extends IBaseAudioContext {
|
|
52
|
+
resume(): Promise<void>;
|
|
53
|
+
suspend(suspendTime: number): Promise<void>;
|
|
54
|
+
startRendering(): Promise<IAudioBuffer>;
|
|
55
|
+
}
|
|
56
|
+
|
|
46
57
|
export interface IAudioNode {
|
|
47
58
|
readonly context: BaseAudioContext;
|
|
48
59
|
readonly numberOfInputs: number;
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import {
|
|
2
|
+
NativeModules,
|
|
3
|
+
DeviceEventEmitter,
|
|
4
|
+
NativeEventEmitter,
|
|
5
|
+
Platform,
|
|
6
|
+
} from 'react-native';
|
|
7
|
+
|
|
8
|
+
const NativeAudioManagerModule = NativeModules.AudioManagerModule;
|
|
9
|
+
const eventEmitter = Platform.select({
|
|
10
|
+
ios: new NativeEventEmitter(NativeModules.AudioManagerModule),
|
|
11
|
+
android: DeviceEventEmitter,
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
if (!NativeAudioManagerModule || !eventEmitter) {
|
|
15
|
+
throw new Error(
|
|
16
|
+
`Failed to install react-native-audio-api: The native module could not be found.`
|
|
17
|
+
);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const AudioManagerModule = {
|
|
21
|
+
setLockScreenInfo(info: {
|
|
22
|
+
[key: string]: string | boolean | number | undefined;
|
|
23
|
+
}): void {
|
|
24
|
+
NativeAudioManagerModule.setLockScreenInfo(info);
|
|
25
|
+
},
|
|
26
|
+
resetLockScreenInfo(): void {
|
|
27
|
+
NativeAudioManagerModule.resetLockScreenInfo();
|
|
28
|
+
},
|
|
29
|
+
enableRemoteCommand(name: string, enabled: boolean): void {
|
|
30
|
+
NativeAudioManagerModule.enableRemoteCommand(name, enabled);
|
|
31
|
+
},
|
|
32
|
+
// audio session
|
|
33
|
+
setAudioSessionOptions(
|
|
34
|
+
category: string,
|
|
35
|
+
mode: string,
|
|
36
|
+
options: Array<string>,
|
|
37
|
+
active: boolean
|
|
38
|
+
): void {
|
|
39
|
+
NativeAudioManagerModule.setAudioSessionOptions(
|
|
40
|
+
category,
|
|
41
|
+
mode,
|
|
42
|
+
options,
|
|
43
|
+
active
|
|
44
|
+
);
|
|
45
|
+
},
|
|
46
|
+
getDevicePreferredSampleRate(): number {
|
|
47
|
+
return NativeAudioManagerModule.getDevicePreferredSampleRate();
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
export { eventEmitter, AudioManagerModule };
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import { SessionOptions, LockScreenInfo, RemoteControl } from './types';
|
|
2
|
+
import { AudioManagerModule, eventEmitter } from '../specs';
|
|
3
|
+
import { EmitterSubscription } from 'react-native';
|
|
4
|
+
|
|
5
|
+
class AudioManager {
|
|
6
|
+
setLockScreenInfo(info: LockScreenInfo) {
|
|
7
|
+
AudioManagerModule.setLockScreenInfo(info);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
resetLockScreenInfo() {
|
|
11
|
+
AudioManagerModule.resetLockScreenInfo();
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
setAudioSessionOptions(options: SessionOptions) {
|
|
15
|
+
AudioManagerModule.setAudioSessionOptions(
|
|
16
|
+
options.iosCategory ?? '',
|
|
17
|
+
options.iosMode ?? '',
|
|
18
|
+
options.iosOptions ?? [],
|
|
19
|
+
options.active ?? true
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
getDevicePreferredSampleRate(): number {
|
|
24
|
+
return AudioManagerModule.getDevicePreferredSampleRate();
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
enableRemoteCommand(
|
|
28
|
+
name: RemoteControl,
|
|
29
|
+
enabled: boolean,
|
|
30
|
+
callback?: (value?: number) => void
|
|
31
|
+
): EmitterSubscription | null {
|
|
32
|
+
AudioManagerModule.enableRemoteCommand(name, enabled);
|
|
33
|
+
|
|
34
|
+
let subscription = null;
|
|
35
|
+
|
|
36
|
+
if (enabled && callback) {
|
|
37
|
+
switch (name) {
|
|
38
|
+
case 'play':
|
|
39
|
+
subscription = eventEmitter!.addListener('onRemotePlay', callback);
|
|
40
|
+
break;
|
|
41
|
+
|
|
42
|
+
case 'pause':
|
|
43
|
+
subscription = eventEmitter!.addListener('onRemotePause', callback);
|
|
44
|
+
break;
|
|
45
|
+
|
|
46
|
+
case 'stop':
|
|
47
|
+
subscription = eventEmitter!.addListener('onRemoteStop', callback);
|
|
48
|
+
break;
|
|
49
|
+
|
|
50
|
+
case 'togglePlayPause':
|
|
51
|
+
subscription = eventEmitter!.addListener(
|
|
52
|
+
'onRemoteTogglePlayPause',
|
|
53
|
+
callback
|
|
54
|
+
);
|
|
55
|
+
break;
|
|
56
|
+
|
|
57
|
+
case 'changePlaybackRate':
|
|
58
|
+
subscription = eventEmitter!.addListener(
|
|
59
|
+
'onRemoteChangePlaybackRate',
|
|
60
|
+
callback
|
|
61
|
+
);
|
|
62
|
+
break;
|
|
63
|
+
|
|
64
|
+
case 'nextTrack':
|
|
65
|
+
subscription = eventEmitter!.addListener(
|
|
66
|
+
'onRemoteNextTrack',
|
|
67
|
+
callback
|
|
68
|
+
);
|
|
69
|
+
break;
|
|
70
|
+
|
|
71
|
+
case 'previousTrack':
|
|
72
|
+
subscription = eventEmitter!.addListener(
|
|
73
|
+
'onRemotePreviousTrack',
|
|
74
|
+
callback
|
|
75
|
+
);
|
|
76
|
+
break;
|
|
77
|
+
|
|
78
|
+
case 'skipForward':
|
|
79
|
+
subscription = eventEmitter!.addListener(
|
|
80
|
+
'onRemoteSkipForward',
|
|
81
|
+
callback
|
|
82
|
+
);
|
|
83
|
+
break;
|
|
84
|
+
|
|
85
|
+
case 'skipBackward':
|
|
86
|
+
subscription = eventEmitter!.addListener(
|
|
87
|
+
'onRemoteSkipBackward',
|
|
88
|
+
callback
|
|
89
|
+
);
|
|
90
|
+
break;
|
|
91
|
+
|
|
92
|
+
case 'seekForward':
|
|
93
|
+
subscription = eventEmitter!.addListener(
|
|
94
|
+
'onRemoteSeekForward',
|
|
95
|
+
callback
|
|
96
|
+
);
|
|
97
|
+
break;
|
|
98
|
+
|
|
99
|
+
case 'seekBackward':
|
|
100
|
+
subscription = eventEmitter!.addListener(
|
|
101
|
+
'onRemoteSeekBackward',
|
|
102
|
+
callback
|
|
103
|
+
);
|
|
104
|
+
break;
|
|
105
|
+
|
|
106
|
+
case 'changePlaybackPosition':
|
|
107
|
+
subscription = eventEmitter!.addListener(
|
|
108
|
+
'onRemoteChangePlaybackPosition',
|
|
109
|
+
callback
|
|
110
|
+
);
|
|
111
|
+
break;
|
|
112
|
+
|
|
113
|
+
default:
|
|
114
|
+
console.error('Unsupported RemoteControl action:', name);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
return subscription;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
export default new AudioManager();
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { default } from './AudioManager';
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
export type IOSCategory =
|
|
2
|
+
| 'record'
|
|
3
|
+
| 'ambient'
|
|
4
|
+
| 'playback'
|
|
5
|
+
| 'multiRoute'
|
|
6
|
+
| 'soloAmbient'
|
|
7
|
+
| 'playAndRecord';
|
|
8
|
+
|
|
9
|
+
export type IOSMode =
|
|
10
|
+
| 'default'
|
|
11
|
+
| 'gameChat'
|
|
12
|
+
| 'videoChat'
|
|
13
|
+
| 'voiceChat'
|
|
14
|
+
| 'measurement'
|
|
15
|
+
| 'voicePrompt'
|
|
16
|
+
| 'spokenAudio'
|
|
17
|
+
| 'moviePlayback'
|
|
18
|
+
| 'videoRecording';
|
|
19
|
+
|
|
20
|
+
export type IOSOption =
|
|
21
|
+
| 'duckOthers'
|
|
22
|
+
| 'allowAirPlay'
|
|
23
|
+
| 'mixWithOthers'
|
|
24
|
+
| 'allowBluetooth'
|
|
25
|
+
| 'defaultToSpeaker'
|
|
26
|
+
| 'allowBluetoothA2DP'
|
|
27
|
+
| 'overrideMutedMicrophoneInterruption'
|
|
28
|
+
| 'interruptSpokenAudioAndMixWithOthers';
|
|
29
|
+
|
|
30
|
+
export interface SessionOptions {
|
|
31
|
+
iosMode?: IOSMode;
|
|
32
|
+
iosOptions?: IOSOption[];
|
|
33
|
+
iosCategory?: IOSCategory;
|
|
34
|
+
active?: boolean;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export type MediaState = 'state_playing' | 'state_paused' | 'state_stopped';
|
|
38
|
+
|
|
39
|
+
interface BaseLockScreenInfo {
|
|
40
|
+
[key: string]: string | boolean | number | undefined;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface LockScreenInfo extends BaseLockScreenInfo {
|
|
44
|
+
title?: string;
|
|
45
|
+
artwork?: string;
|
|
46
|
+
artist?: string;
|
|
47
|
+
album?: string;
|
|
48
|
+
genre?: string;
|
|
49
|
+
duration?: number;
|
|
50
|
+
isLiveStream?: boolean;
|
|
51
|
+
state?: MediaState;
|
|
52
|
+
speed?: number;
|
|
53
|
+
elapsedTime?: number;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export type RemoteControl =
|
|
57
|
+
| 'play'
|
|
58
|
+
| 'pause'
|
|
59
|
+
| 'stop'
|
|
60
|
+
| 'togglePlayPause'
|
|
61
|
+
| 'changePlaybackRate'
|
|
62
|
+
| 'nextTrack'
|
|
63
|
+
| 'previousTrack'
|
|
64
|
+
| 'skipForward'
|
|
65
|
+
| 'skipBackward'
|
|
66
|
+
| 'seekForward'
|
|
67
|
+
| 'seekBackward'
|
|
68
|
+
| 'changePlaybackPosition';
|
package/src/types.ts
CHANGED
|
@@ -29,6 +29,12 @@ export interface AudioContextOptions {
|
|
|
29
29
|
sampleRate: number;
|
|
30
30
|
}
|
|
31
31
|
|
|
32
|
+
export interface OfflineAudioContextOptions {
|
|
33
|
+
numberOfChannels: number;
|
|
34
|
+
length: number;
|
|
35
|
+
sampleRate: number;
|
|
36
|
+
}
|
|
37
|
+
|
|
32
38
|
export type WindowType = 'blackman' | 'hann';
|
|
33
39
|
|
|
34
40
|
export interface AudioBufferSourceNodeOptions {
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ContextState,
|
|
3
|
+
PeriodicWaveConstraints,
|
|
4
|
+
OfflineAudioContextOptions,
|
|
5
|
+
AudioBufferSourceNodeOptions,
|
|
6
|
+
} from '../types';
|
|
7
|
+
import { InvalidAccessError, NotSupportedError } from '../errors';
|
|
8
|
+
import BaseAudioContext from './BaseAudioContext';
|
|
9
|
+
import AnalyserNode from './AnalyserNode';
|
|
10
|
+
import AudioDestinationNode from './AudioDestinationNode';
|
|
11
|
+
import AudioBuffer from './AudioBuffer';
|
|
12
|
+
import AudioBufferSourceNode from './AudioBufferSourceNode';
|
|
13
|
+
import BiquadFilterNode from './BiquadFilterNode';
|
|
14
|
+
import GainNode from './GainNode';
|
|
15
|
+
import OscillatorNode from './OscillatorNode';
|
|
16
|
+
import PeriodicWave from './PeriodicWave';
|
|
17
|
+
import StereoPannerNode from './StereoPannerNode';
|
|
18
|
+
|
|
19
|
+
import { globalWasmPromise, globalTag } from './custom/LoadCustomWasm';
|
|
20
|
+
|
|
21
|
+
export default class OfflineAudioContext implements BaseAudioContext {
|
|
22
|
+
readonly context: globalThis.OfflineAudioContext;
|
|
23
|
+
|
|
24
|
+
readonly destination: AudioDestinationNode;
|
|
25
|
+
readonly sampleRate: number;
|
|
26
|
+
|
|
27
|
+
constructor(options: OfflineAudioContextOptions);
|
|
28
|
+
constructor(numberOfChannels: number, length: number, sampleRate: number);
|
|
29
|
+
constructor(
|
|
30
|
+
arg0: OfflineAudioContextOptions | number,
|
|
31
|
+
arg1?: number,
|
|
32
|
+
arg2?: number
|
|
33
|
+
) {
|
|
34
|
+
if (typeof arg0 === 'object') {
|
|
35
|
+
this.context = new window.OfflineAudioContext(arg0);
|
|
36
|
+
} else if (
|
|
37
|
+
typeof arg0 === 'number' &&
|
|
38
|
+
typeof arg1 === 'number' &&
|
|
39
|
+
typeof arg2 === 'number'
|
|
40
|
+
) {
|
|
41
|
+
this.context = new window.OfflineAudioContext(arg0, arg1, arg2);
|
|
42
|
+
} else {
|
|
43
|
+
throw new NotSupportedError('Invalid constructor arguments');
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
this.sampleRate = this.context.sampleRate;
|
|
47
|
+
this.destination = new AudioDestinationNode(this, this.context.destination);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
public get currentTime(): number {
|
|
51
|
+
return this.context.currentTime;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
public get state(): ContextState {
|
|
55
|
+
return this.context.state as ContextState;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
createOscillator(): OscillatorNode {
|
|
59
|
+
return new OscillatorNode(this, this.context.createOscillator());
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
createGain(): GainNode {
|
|
63
|
+
return new GainNode(this, this.context.createGain());
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
createStereoPanner(): StereoPannerNode {
|
|
67
|
+
return new StereoPannerNode(this, this.context.createStereoPanner());
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
createBiquadFilter(): BiquadFilterNode {
|
|
71
|
+
return new BiquadFilterNode(this, this.context.createBiquadFilter());
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async createBufferSource(
|
|
75
|
+
options?: AudioBufferSourceNodeOptions
|
|
76
|
+
): Promise<AudioBufferSourceNode> {
|
|
77
|
+
if (!options || !options.pitchCorrection) {
|
|
78
|
+
return new AudioBufferSourceNode(
|
|
79
|
+
this,
|
|
80
|
+
this.context.createBufferSource(),
|
|
81
|
+
false
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
await globalWasmPromise;
|
|
86
|
+
|
|
87
|
+
const wasmStretch = await window[globalTag](this.context);
|
|
88
|
+
|
|
89
|
+
return new AudioBufferSourceNode(this, wasmStretch, true);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
createBuffer(
|
|
93
|
+
numOfChannels: number,
|
|
94
|
+
length: number,
|
|
95
|
+
sampleRate: number
|
|
96
|
+
): AudioBuffer {
|
|
97
|
+
if (numOfChannels < 1 || numOfChannels >= 32) {
|
|
98
|
+
throw new NotSupportedError(
|
|
99
|
+
`The number of channels provided (${numOfChannels}) is outside the range [1, 32]`
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (length <= 0) {
|
|
104
|
+
throw new NotSupportedError(
|
|
105
|
+
`The number of frames provided (${length}) is less than or equal to the minimum bound (0)`
|
|
106
|
+
);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (sampleRate < 8000 || sampleRate > 96000) {
|
|
110
|
+
throw new NotSupportedError(
|
|
111
|
+
`The sample rate provided (${sampleRate}) is outside the range [8000, 96000]`
|
|
112
|
+
);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return new AudioBuffer(
|
|
116
|
+
this.context.createBuffer(numOfChannels, length, sampleRate)
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
createPeriodicWave(
|
|
121
|
+
real: Float32Array,
|
|
122
|
+
imag: Float32Array,
|
|
123
|
+
constraints?: PeriodicWaveConstraints
|
|
124
|
+
): PeriodicWave {
|
|
125
|
+
if (real.length !== imag.length) {
|
|
126
|
+
throw new InvalidAccessError(
|
|
127
|
+
`The lengths of the real (${real.length}) and imaginary (${imag.length}) arrays must match.`
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
return new PeriodicWave(
|
|
132
|
+
this.context.createPeriodicWave(real, imag, constraints)
|
|
133
|
+
);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
createAnalyser(): AnalyserNode {
|
|
137
|
+
return new AnalyserNode(this, this.context.createAnalyser());
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
async decodeAudioDataSource(source: string): Promise<AudioBuffer> {
|
|
141
|
+
const arrayBuffer = await fetch(source).then((response) =>
|
|
142
|
+
response.arrayBuffer()
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
return this.decodeAudioData(arrayBuffer);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async decodeAudioData(arrayBuffer: ArrayBuffer): Promise<AudioBuffer> {
|
|
149
|
+
return new AudioBuffer(await this.context.decodeAudioData(arrayBuffer));
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async startRendering(): Promise<AudioBuffer> {
|
|
153
|
+
return new AudioBuffer(await this.context.startRendering());
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
async resume(): Promise<void> {
|
|
157
|
+
await this.context.resume();
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
async suspend(suspendTime: number): Promise<void> {
|
|
161
|
+
await this.context.suspend(suspendTime);
|
|
162
|
+
}
|
|
163
|
+
}
|