@idealyst/audio 1.2.48
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +69 -0
- package/src/constants.ts +161 -0
- package/src/context/AudioContext.native.ts +84 -0
- package/src/context/AudioContext.web.ts +97 -0
- package/src/context/index.native.ts +1 -0
- package/src/context/index.ts +1 -0
- package/src/hooks/index.ts +3 -0
- package/src/hooks/useAudio.ts +129 -0
- package/src/hooks/usePlayer.ts +247 -0
- package/src/hooks/useRecorder.ts +176 -0
- package/src/index.native.ts +114 -0
- package/src/index.ts +114 -0
- package/src/index.web.ts +8 -0
- package/src/playback/Player.native.ts +517 -0
- package/src/playback/Player.web.ts +518 -0
- package/src/playback/index.native.ts +1 -0
- package/src/playback/index.ts +1 -0
- package/src/recording/Recorder.native.ts +330 -0
- package/src/recording/Recorder.web.ts +399 -0
- package/src/recording/index.native.ts +1 -0
- package/src/recording/index.ts +1 -0
- package/src/session/AudioSession.native.ts +204 -0
- package/src/session/AudioSession.web.ts +69 -0
- package/src/session/index.native.ts +5 -0
- package/src/session/index.ts +1 -0
- package/src/types.ts +470 -0
- package/src/utils.ts +379 -0
package/package.json
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@idealyst/audio",
|
|
3
|
+
"version": "1.2.48",
|
|
4
|
+
"description": "Unified cross-platform audio for React and React Native - recording, playback, and PCM streaming",
|
|
5
|
+
"documentation": "https://github.com/IdealystIO/idealyst-framework/tree/main/packages/audio#readme",
|
|
6
|
+
"readme": "README.md",
|
|
7
|
+
"main": "src/index.ts",
|
|
8
|
+
"module": "src/index.ts",
|
|
9
|
+
"types": "src/index.ts",
|
|
10
|
+
"react-native": "src/index.native.ts",
|
|
11
|
+
"repository": {
|
|
12
|
+
"type": "git",
|
|
13
|
+
"url": "https://github.com/IdealystIO/idealyst-framework.git",
|
|
14
|
+
"directory": "packages/audio"
|
|
15
|
+
},
|
|
16
|
+
"author": "Idealyst <contact@idealyst.io>",
|
|
17
|
+
"license": "MIT",
|
|
18
|
+
"publishConfig": {
|
|
19
|
+
"access": "public"
|
|
20
|
+
},
|
|
21
|
+
"exports": {
|
|
22
|
+
".": {
|
|
23
|
+
"react-native": "./src/index.native.ts",
|
|
24
|
+
"browser": "./src/index.web.ts",
|
|
25
|
+
"import": "./src/index.ts",
|
|
26
|
+
"require": "./src/index.ts",
|
|
27
|
+
"types": "./src/index.ts"
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
"scripts": {
|
|
31
|
+
"prepublishOnly": "echo 'Publishing TypeScript source directly'",
|
|
32
|
+
"publish:npm": "npm publish"
|
|
33
|
+
},
|
|
34
|
+
"peerDependencies": {
|
|
35
|
+
"react": ">=16.8.0",
|
|
36
|
+
"react-native": ">=0.60.0",
|
|
37
|
+
"react-native-audio-api": ">=0.4.0"
|
|
38
|
+
},
|
|
39
|
+
"peerDependenciesMeta": {
|
|
40
|
+
"react-native": {
|
|
41
|
+
"optional": true
|
|
42
|
+
},
|
|
43
|
+
"react-native-audio-api": {
|
|
44
|
+
"optional": true
|
|
45
|
+
}
|
|
46
|
+
},
|
|
47
|
+
"devDependencies": {
|
|
48
|
+
"@types/react": "^19.1.0",
|
|
49
|
+
"@types/react-native": "^0.73.0",
|
|
50
|
+
"react-native-audio-api": "^0.4.0",
|
|
51
|
+
"typescript": "^5.0.0"
|
|
52
|
+
},
|
|
53
|
+
"files": [
|
|
54
|
+
"src",
|
|
55
|
+
"README.md"
|
|
56
|
+
],
|
|
57
|
+
"keywords": [
|
|
58
|
+
"react",
|
|
59
|
+
"react-native",
|
|
60
|
+
"audio",
|
|
61
|
+
"recording",
|
|
62
|
+
"playback",
|
|
63
|
+
"pcm",
|
|
64
|
+
"streaming",
|
|
65
|
+
"microphone",
|
|
66
|
+
"cross-platform",
|
|
67
|
+
"web-audio-api"
|
|
68
|
+
]
|
|
69
|
+
}
|
package/src/constants.ts
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
AudioConfig,
|
|
3
|
+
AudioLevel,
|
|
4
|
+
RecorderStatus,
|
|
5
|
+
PlayerStatus,
|
|
6
|
+
AudioSessionState,
|
|
7
|
+
AudioProfiles,
|
|
8
|
+
SessionPresets,
|
|
9
|
+
} from './types';
|
|
10
|
+
|
|
11
|
+
// ============================================
|
|
12
|
+
// AUDIO CONFIGURATION DEFAULTS
|
|
13
|
+
// ============================================
|
|
14
|
+
|
|
15
|
+
export const DEFAULT_AUDIO_CONFIG: AudioConfig = {
|
|
16
|
+
sampleRate: 16000,
|
|
17
|
+
channels: 1,
|
|
18
|
+
bitDepth: 16,
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Pre-configured audio profiles for common use cases.
|
|
23
|
+
*/
|
|
24
|
+
export const AUDIO_PROFILES: AudioProfiles = {
|
|
25
|
+
/** Optimized for speech/voice (AI assistants, TTS, STT) */
|
|
26
|
+
speech: {
|
|
27
|
+
sampleRate: 16000,
|
|
28
|
+
channels: 1,
|
|
29
|
+
bitDepth: 16,
|
|
30
|
+
},
|
|
31
|
+
|
|
32
|
+
/** Higher quality for music playback */
|
|
33
|
+
highQuality: {
|
|
34
|
+
sampleRate: 44100,
|
|
35
|
+
channels: 2,
|
|
36
|
+
bitDepth: 16,
|
|
37
|
+
},
|
|
38
|
+
|
|
39
|
+
/** Professional audio quality */
|
|
40
|
+
studio: {
|
|
41
|
+
sampleRate: 48000,
|
|
42
|
+
channels: 2,
|
|
43
|
+
bitDepth: 16,
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
/** Low bandwidth (phone calls, basic voice) */
|
|
47
|
+
phone: {
|
|
48
|
+
sampleRate: 8000,
|
|
49
|
+
channels: 1,
|
|
50
|
+
bitDepth: 16,
|
|
51
|
+
},
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
// ============================================
|
|
55
|
+
// AUDIO SESSION DEFAULTS
|
|
56
|
+
// ============================================
|
|
57
|
+
|
|
58
|
+
export const DEFAULT_SESSION_STATE: AudioSessionState = {
|
|
59
|
+
isActive: false,
|
|
60
|
+
category: 'soloAmbient',
|
|
61
|
+
mode: 'default',
|
|
62
|
+
categoryOptions: [],
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Pre-configured audio session presets for common use cases.
|
|
67
|
+
*/
|
|
68
|
+
export const SESSION_PRESETS: SessionPresets = {
|
|
69
|
+
/** For apps that only play audio (music players, podcasts) */
|
|
70
|
+
playback: {
|
|
71
|
+
category: 'playback',
|
|
72
|
+
mode: 'default',
|
|
73
|
+
categoryOptions: [],
|
|
74
|
+
active: true,
|
|
75
|
+
},
|
|
76
|
+
|
|
77
|
+
/** For apps that only record audio */
|
|
78
|
+
record: {
|
|
79
|
+
category: 'record',
|
|
80
|
+
mode: 'default',
|
|
81
|
+
categoryOptions: [],
|
|
82
|
+
active: true,
|
|
83
|
+
},
|
|
84
|
+
|
|
85
|
+
/** For voice chat / VoIP applications */
|
|
86
|
+
voiceChat: {
|
|
87
|
+
category: 'playAndRecord',
|
|
88
|
+
mode: 'voiceChat',
|
|
89
|
+
categoryOptions: ['defaultToSpeaker', 'allowBluetooth', 'allowBluetoothA2DP'],
|
|
90
|
+
active: true,
|
|
91
|
+
},
|
|
92
|
+
|
|
93
|
+
/** For apps that mix with other audio (games with sound effects) */
|
|
94
|
+
ambient: {
|
|
95
|
+
category: 'ambient',
|
|
96
|
+
mode: 'default',
|
|
97
|
+
categoryOptions: ['mixWithOthers'],
|
|
98
|
+
active: true,
|
|
99
|
+
},
|
|
100
|
+
|
|
101
|
+
/** Default for simultaneous playback and recording (AI assistants) */
|
|
102
|
+
default: {
|
|
103
|
+
category: 'playAndRecord',
|
|
104
|
+
mode: 'default',
|
|
105
|
+
categoryOptions: ['defaultToSpeaker', 'allowBluetooth', 'allowBluetoothA2DP', 'mixWithOthers'],
|
|
106
|
+
active: true,
|
|
107
|
+
},
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
// ============================================
|
|
111
|
+
// RECORDER DEFAULTS
|
|
112
|
+
// ============================================
|
|
113
|
+
|
|
114
|
+
export const DEFAULT_AUDIO_LEVEL: AudioLevel = {
|
|
115
|
+
current: 0,
|
|
116
|
+
peak: 0,
|
|
117
|
+
rms: 0,
|
|
118
|
+
db: -Infinity,
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
export const DEFAULT_RECORDER_STATUS: RecorderStatus = {
|
|
122
|
+
state: 'idle',
|
|
123
|
+
isRecording: false,
|
|
124
|
+
isPaused: false,
|
|
125
|
+
permission: 'undetermined',
|
|
126
|
+
duration: 0,
|
|
127
|
+
level: DEFAULT_AUDIO_LEVEL,
|
|
128
|
+
config: DEFAULT_AUDIO_CONFIG,
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
export const DEFAULT_LEVEL_UPDATE_INTERVAL = 100;
|
|
132
|
+
export const DEFAULT_BUFFER_SIZE = 4096;
|
|
133
|
+
|
|
134
|
+
// ============================================
|
|
135
|
+
// PLAYER DEFAULTS
|
|
136
|
+
// ============================================
|
|
137
|
+
|
|
138
|
+
export const DEFAULT_PLAYER_STATUS: PlayerStatus = {
|
|
139
|
+
state: 'idle',
|
|
140
|
+
isPlaying: false,
|
|
141
|
+
isPaused: false,
|
|
142
|
+
duration: 0,
|
|
143
|
+
position: 0,
|
|
144
|
+
buffered: 0,
|
|
145
|
+
volume: 1.0,
|
|
146
|
+
muted: false,
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
export const DEFAULT_VOLUME = 1.0;
|
|
150
|
+
export const DEFAULT_POSITION_UPDATE_INTERVAL = 100;
|
|
151
|
+
export const MIN_BUFFER_THRESHOLD_MS = 100;
|
|
152
|
+
|
|
153
|
+
// ============================================
|
|
154
|
+
// UTILITY CONSTANTS
|
|
155
|
+
// ============================================
|
|
156
|
+
|
|
157
|
+
export const BIT_DEPTH_MAX_VALUES = {
|
|
158
|
+
8: 128,
|
|
159
|
+
16: 32768,
|
|
160
|
+
32: 1.0,
|
|
161
|
+
} as const;
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Native Audio Context Manager
|
|
3
|
+
*
|
|
4
|
+
* Manages a shared AudioContext using react-native-audio-api for both
|
|
5
|
+
* recording and playback on React Native.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { AudioContext as RNAudioContext } from 'react-native-audio-api';
|
|
9
|
+
import type { IAudioContext } from '../types';
|
|
10
|
+
|
|
11
|
+
class NativeAudioContextManager implements IAudioContext {
|
|
12
|
+
private context: RNAudioContext | null = null;
|
|
13
|
+
private _isInitialized = false;
|
|
14
|
+
|
|
15
|
+
get sampleRate(): number {
|
|
16
|
+
return this.context?.sampleRate ?? 44100;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
get currentTime(): number {
|
|
20
|
+
return this.context?.currentTime ?? 0;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
get isInitialized(): boolean {
|
|
24
|
+
return this._isInitialized && this.context !== null;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async initialize(): Promise<void> {
|
|
28
|
+
if (this.context) {
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Create the react-native-audio-api AudioContext
|
|
33
|
+
this.context = new RNAudioContext();
|
|
34
|
+
this._isInitialized = true;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
getContext(): RNAudioContext | null {
|
|
38
|
+
return this.context;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async suspend(): Promise<void> {
|
|
42
|
+
// react-native-audio-api may not support suspend directly
|
|
43
|
+
// This is a no-op for now
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async resume(): Promise<void> {
|
|
47
|
+
// react-native-audio-api may not support resume directly
|
|
48
|
+
// This is a no-op for now
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async close(): Promise<void> {
|
|
52
|
+
if (this.context) {
|
|
53
|
+
// Note: react-native-audio-api may not have a close method
|
|
54
|
+
// We just null out the reference
|
|
55
|
+
this.context = null;
|
|
56
|
+
this._isInitialized = false;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Singleton instance
|
|
62
|
+
let instance: NativeAudioContextManager | null = null;
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Get the shared AudioContext manager instance.
|
|
66
|
+
*/
|
|
67
|
+
export function getAudioContext(): IAudioContext {
|
|
68
|
+
if (!instance) {
|
|
69
|
+
instance = new NativeAudioContextManager();
|
|
70
|
+
}
|
|
71
|
+
return instance;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* Dispose the shared AudioContext.
|
|
76
|
+
*/
|
|
77
|
+
export async function disposeAudioContext(): Promise<void> {
|
|
78
|
+
if (instance) {
|
|
79
|
+
await instance.close();
|
|
80
|
+
instance = null;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export { NativeAudioContextManager };
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Web Audio Context Manager
|
|
3
|
+
*
|
|
4
|
+
* Manages a shared AudioContext for both recording and playback on web.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { IAudioContext } from '../types';
|
|
8
|
+
|
|
9
|
+
class WebAudioContextManager implements IAudioContext {
|
|
10
|
+
private context: AudioContext | null = null;
|
|
11
|
+
private _isInitialized = false;
|
|
12
|
+
|
|
13
|
+
get sampleRate(): number {
|
|
14
|
+
return this.context?.sampleRate ?? 44100;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
get currentTime(): number {
|
|
18
|
+
return this.context?.currentTime ?? 0;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
get isInitialized(): boolean {
|
|
22
|
+
return this._isInitialized && this.context !== null;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async initialize(): Promise<void> {
|
|
26
|
+
if (this.context) {
|
|
27
|
+
// Already initialized, just resume if suspended
|
|
28
|
+
if (this.context.state === 'suspended') {
|
|
29
|
+
await this.context.resume();
|
|
30
|
+
}
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Create the AudioContext
|
|
35
|
+
const AudioContextClass = window.AudioContext || (window as any).webkitAudioContext;
|
|
36
|
+
if (!AudioContextClass) {
|
|
37
|
+
throw new Error('Web Audio API is not supported in this browser');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
this.context = new AudioContextClass();
|
|
41
|
+
this._isInitialized = true;
|
|
42
|
+
|
|
43
|
+
// Resume if needed (some browsers require user interaction)
|
|
44
|
+
if (this.context.state === 'suspended') {
|
|
45
|
+
await this.context.resume();
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
getContext(): AudioContext | null {
|
|
50
|
+
return this.context;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
async suspend(): Promise<void> {
|
|
54
|
+
if (this.context && this.context.state === 'running') {
|
|
55
|
+
await this.context.suspend();
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async resume(): Promise<void> {
|
|
60
|
+
if (this.context && this.context.state === 'suspended') {
|
|
61
|
+
await this.context.resume();
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
async close(): Promise<void> {
|
|
66
|
+
if (this.context) {
|
|
67
|
+
await this.context.close();
|
|
68
|
+
this.context = null;
|
|
69
|
+
this._isInitialized = false;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Singleton instance
|
|
75
|
+
let instance: WebAudioContextManager | null = null;
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Get the shared AudioContext manager instance.
|
|
79
|
+
*/
|
|
80
|
+
export function getAudioContext(): IAudioContext {
|
|
81
|
+
if (!instance) {
|
|
82
|
+
instance = new WebAudioContextManager();
|
|
83
|
+
}
|
|
84
|
+
return instance;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Dispose the shared AudioContext.
|
|
89
|
+
*/
|
|
90
|
+
export async function disposeAudioContext(): Promise<void> {
|
|
91
|
+
if (instance) {
|
|
92
|
+
await instance.close();
|
|
93
|
+
instance = null;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
export { WebAudioContextManager };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { getAudioContext, disposeAudioContext, NativeAudioContextManager } from './AudioContext.native';
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { getAudioContext, disposeAudioContext, WebAudioContextManager } from './AudioContext.web';
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* useAudio Hook
|
|
3
|
+
*
|
|
4
|
+
* Provides access to the shared audio context and session management.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { useState, useEffect, useCallback, useRef } from 'react';
|
|
8
|
+
import type {
|
|
9
|
+
UseAudioOptions,
|
|
10
|
+
UseAudioResult,
|
|
11
|
+
AudioSessionState,
|
|
12
|
+
AudioSessionConfig,
|
|
13
|
+
} from '../types';
|
|
14
|
+
import { DEFAULT_SESSION_STATE, SESSION_PRESETS } from '../constants';
|
|
15
|
+
import { getAudioContext } from '../context';
|
|
16
|
+
import { getAudioSessionManager } from '../session';
|
|
17
|
+
|
|
18
|
+
export function useAudio(options: UseAudioOptions = {}): UseAudioResult {
|
|
19
|
+
const { session, initializeOnMount = true } = options;
|
|
20
|
+
|
|
21
|
+
const [isInitialized, setIsInitialized] = useState(false);
|
|
22
|
+
const [sessionState, setSessionState] = useState<AudioSessionState>(DEFAULT_SESSION_STATE);
|
|
23
|
+
const [outputs, setOutputs] = useState<string[]>([]);
|
|
24
|
+
|
|
25
|
+
const audioContextRef = useRef(getAudioContext());
|
|
26
|
+
const sessionManagerRef = useRef(getAudioSessionManager());
|
|
27
|
+
const mountedRef = useRef(true);
|
|
28
|
+
|
|
29
|
+
// Initialize audio context and session
|
|
30
|
+
const initialize = useCallback(async () => {
|
|
31
|
+
try {
|
|
32
|
+
const audioContext = audioContextRef.current;
|
|
33
|
+
const sessionManager = sessionManagerRef.current;
|
|
34
|
+
|
|
35
|
+
// Initialize audio context
|
|
36
|
+
if (!audioContext.isInitialized) {
|
|
37
|
+
await audioContext.initialize();
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Configure session if provided
|
|
41
|
+
if (session) {
|
|
42
|
+
await sessionManager.configure(session);
|
|
43
|
+
} else {
|
|
44
|
+
// Use default preset
|
|
45
|
+
await sessionManager.configure(SESSION_PRESETS.default);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Activate session
|
|
49
|
+
await sessionManager.activate();
|
|
50
|
+
|
|
51
|
+
if (mountedRef.current) {
|
|
52
|
+
setIsInitialized(true);
|
|
53
|
+
setSessionState(sessionManager.state);
|
|
54
|
+
setOutputs(sessionManager.getCurrentOutputs());
|
|
55
|
+
}
|
|
56
|
+
} catch (error) {
|
|
57
|
+
console.error('Failed to initialize audio:', error);
|
|
58
|
+
throw error;
|
|
59
|
+
}
|
|
60
|
+
}, [session]);
|
|
61
|
+
|
|
62
|
+
// Configure audio session
|
|
63
|
+
const configureSession = useCallback(async (config: Partial<AudioSessionConfig>) => {
|
|
64
|
+
const sessionManager = sessionManagerRef.current;
|
|
65
|
+
await sessionManager.configure(config);
|
|
66
|
+
if (mountedRef.current) {
|
|
67
|
+
setSessionState(sessionManager.state);
|
|
68
|
+
}
|
|
69
|
+
}, []);
|
|
70
|
+
|
|
71
|
+
// Suspend audio context
|
|
72
|
+
const suspend = useCallback(async () => {
|
|
73
|
+
const audioContext = audioContextRef.current;
|
|
74
|
+
await audioContext.suspend();
|
|
75
|
+
}, []);
|
|
76
|
+
|
|
77
|
+
// Resume audio context
|
|
78
|
+
const resume = useCallback(async () => {
|
|
79
|
+
const audioContext = audioContextRef.current;
|
|
80
|
+
await audioContext.resume();
|
|
81
|
+
}, []);
|
|
82
|
+
|
|
83
|
+
// Setup on mount
|
|
84
|
+
useEffect(() => {
|
|
85
|
+
mountedRef.current = true;
|
|
86
|
+
|
|
87
|
+
if (initializeOnMount) {
|
|
88
|
+
initialize().catch(console.error);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Subscribe to session changes
|
|
92
|
+
const sessionManager = sessionManagerRef.current;
|
|
93
|
+
|
|
94
|
+
const unsubInterruption = sessionManager.onInterruption((interruption) => {
|
|
95
|
+
if (mountedRef.current) {
|
|
96
|
+
// Handle interruption (e.g., phone call)
|
|
97
|
+
if (interruption.type === 'began') {
|
|
98
|
+
// Audio was interrupted
|
|
99
|
+
console.log('Audio session interrupted');
|
|
100
|
+
} else if (interruption.type === 'ended' && interruption.shouldResume) {
|
|
101
|
+
// Resume audio if appropriate
|
|
102
|
+
console.log('Audio session interruption ended, should resume');
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
const unsubRouteChange = sessionManager.onRouteChange((change) => {
|
|
108
|
+
if (mountedRef.current) {
|
|
109
|
+
setOutputs(change.currentOutputs);
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
return () => {
|
|
114
|
+
mountedRef.current = false;
|
|
115
|
+
unsubInterruption();
|
|
116
|
+
unsubRouteChange();
|
|
117
|
+
};
|
|
118
|
+
}, [initializeOnMount, initialize]);
|
|
119
|
+
|
|
120
|
+
return {
|
|
121
|
+
isInitialized,
|
|
122
|
+
sessionState,
|
|
123
|
+
outputs,
|
|
124
|
+
initialize,
|
|
125
|
+
configureSession,
|
|
126
|
+
suspend,
|
|
127
|
+
resume,
|
|
128
|
+
};
|
|
129
|
+
}
|