stream-chat-expo 7.1.2 → 7.2.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -10,7 +10,7 @@
10
10
  [![NPM](https://img.shields.io/npm/v/stream-chat-react-native.svg)](https://www.npmjs.com/package/stream-chat-react-native)
11
11
  [![Build Status](https://github.com/GetStream/stream-chat-react-native/actions/workflows/release.yml/badge.svg)](https://github.com/GetStream/stream-chat-react-native/actions)
12
12
  [![Component Reference](https://img.shields.io/badge/docs-component%20reference-blue.svg)](https://getstream.io/chat/docs/sdk/reactnative)
13
- ![JS Bundle Size](https://img.shields.io/badge/js_bundle_size-459%20KB-blue)
13
+ ![JS Bundle Size](https://img.shields.io/badge/js_bundle_size-450%20KB-blue)
14
14
 
15
15
  <img align="right" src="https://getstream.imgix.net/images/ios-chat-tutorial/iphone_chat_art@3x.png?auto=format,enhance" width="50%" />
16
16
 
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "stream-chat-expo",
3
3
  "description": "The official Expo SDK for Stream Chat, a service for building chat applications",
4
- "version": "7.1.2",
4
+ "version": "7.2.0-beta.1",
5
5
  "author": {
6
6
  "company": "Stream.io Inc",
7
7
  "name": "Stream.io Inc"
@@ -11,7 +11,7 @@
11
11
  "types": "types/index.d.ts",
12
12
  "dependencies": {
13
13
  "mime": "^4.0.7",
14
- "stream-chat-react-native-core": "7.1.2"
14
+ "stream-chat-react-native-core": "7.2.0-beta.1"
15
15
  },
16
16
  "peerDependencies": {
17
17
  "expo": ">=51.0.0",
@@ -24,12 +24,16 @@
24
24
  "expo-image-picker": "*",
25
25
  "expo-media-library": "*",
26
26
  "expo-sharing": "*",
27
- "expo-video": "*"
27
+ "expo-video": "*",
28
+ "expo-audio": "*"
28
29
  },
29
30
  "peerDependenciesMeta": {
30
31
  "expo-av": {
31
32
  "optional": true
32
33
  },
34
+ "expo-audio": {
35
+ "optional": true
36
+ },
33
37
  "expo-video": {
34
38
  "optional": true
35
39
  },
@@ -57,7 +61,8 @@
57
61
  },
58
62
  "devDependencies": {
59
63
  "expo": "^53.0.12",
60
- "expo-image-manipulator": "^12.0.5"
64
+ "expo-image-manipulator": "^12.0.5",
65
+ "expo-audio": "~0.4.6"
61
66
  },
62
67
  "scripts": {
63
68
  "prepack": " cp ../../README.md .",
@@ -1,3 +1,5 @@
1
+ import { Platform } from 'react-native';
2
+
1
3
  import {
2
4
  AndroidAudioEncoder,
3
5
  AndroidOutputFormat,
@@ -5,10 +7,22 @@ import {
5
7
  IOSAudioQuality,
6
8
  IOSOutputFormat,
7
9
  ExpoRecordingOptions as RecordingOptions,
10
+ RecordingStatus,
8
11
  } from 'stream-chat-react-native-core';
9
12
 
10
13
  import { AudioComponent, RecordingObject } from './AudioVideo';
11
14
 
15
+ let ExpoAudioComponent;
16
+ let ExpoRecordingComponent;
17
+
18
+ try {
19
+ const { AudioModule } = require('expo-audio');
20
+ ExpoAudioComponent = AudioModule;
21
+ ExpoRecordingComponent = AudioModule.AudioRecorder;
22
+ } catch (e) {
23
+ // do nothing
24
+ }
25
+
12
26
  const sleep = (ms: number) =>
13
27
  new Promise<void>((resolve) => {
14
28
  setTimeout(() => {
@@ -16,7 +30,7 @@ const sleep = (ms: number) =>
16
30
  }, ms);
17
31
  });
18
32
 
19
- class _Audio {
33
+ class _AudioExpoAV {
20
34
  recording: typeof RecordingObject | null = null;
21
35
  audioRecordingConfiguration: AudioRecordingConfiguration = {
22
36
  mode: {
@@ -105,8 +119,233 @@ class _Audio {
105
119
  };
106
120
  }
107
121
 
122
+ class _AudioExpoAudio {
123
+ recording: typeof RecordingObject | null = null;
124
+ audioRecordingConfiguration: AudioRecordingConfiguration = {
125
+ mode: {
126
+ allowsRecordingIOS: true,
127
+ playsInSilentModeIOS: true,
128
+ },
129
+ options: {
130
+ android: {
131
+ audioEncoder: AndroidAudioEncoder.AAC,
132
+ extension: '.aac',
133
+ outputFormat: AndroidOutputFormat.AAC_ADTS,
134
+ },
135
+ ios: {
136
+ audioQuality: IOSAudioQuality.HIGH,
137
+ bitRate: 128000,
138
+ extension: '.aac',
139
+ numberOfChannels: 2,
140
+ outputFormat: IOSOutputFormat.MPEG4AAC,
141
+ sampleRate: 44100,
142
+ },
143
+ isMeteringEnabled: true,
144
+ web: {},
145
+ },
146
+ };
147
+
148
+ startRecording = async (recordingOptions: RecordingOptions, onRecordingStatusUpdate) => {
149
+ try {
150
+ const permissions = await ExpoAudioComponent.getRecordingPermissionsAsync();
151
+ const permissionsStatus = permissions.status;
152
+ let permissionsGranted = permissions.granted;
153
+
154
+ // If permissions have not been determined yet, ask the user for permissions.
155
+ if (permissionsStatus === 'undetermined') {
156
+ const newPermissions = await ExpoAudioComponent.requestRecordingPermissionsAsync();
157
+ permissionsGranted = newPermissions.granted;
158
+ }
159
+
160
+ // If they are explicitly denied after this, exit early by throwing an error
161
+ // that will be caught in the catch block below (as a single source of not
162
+ // starting the player). The player would error itself anyway if we did not do
163
+ // this, but there's no reason to run the asynchronous calls when we know
164
+ // immediately that the player will not be run.
165
+ if (!permissionsGranted) {
166
+ throw new Error('Missing audio recording permission.');
167
+ }
168
+ await ExpoAudioComponent.setAudioModeAsync(
169
+ expoAvToExpoAudioModeAdapter(this.audioRecordingConfiguration.mode),
170
+ );
171
+ const options = {
172
+ ...recordingOptions,
173
+ ...this.audioRecordingConfiguration.options,
174
+ };
175
+
176
+ this.recording = new ExpoAudioRecordingAdapter(options);
177
+ await this.recording.createAsync(
178
+ Platform.OS === 'android' ? 100 : 60,
179
+ onRecordingStatusUpdate,
180
+ );
181
+ return { accessGranted: true, recording: this.recording };
182
+ } catch (error) {
183
+ console.error('Failed to start recording', error);
184
+ this.recording = null;
185
+ return { accessGranted: false, recording: null };
186
+ }
187
+ };
188
+ stopRecording = async () => {
189
+ try {
190
+ if (this.recording) {
191
+ await this.recording.stopAndUnloadAsync();
192
+ }
193
+ this.recording = null;
194
+ } catch (error) {
195
+ console.log('Error stopping recoding', error);
196
+ }
197
+ };
198
+ }
199
+
200
+ class ExpoAudioRecordingAdapter {
201
+ private recording;
202
+ private recordingStateInterval;
203
+ private uri;
204
+ private options;
205
+
206
+ constructor(options: RecordingOptions) {
207
+ // Currently, expo-audio has a bug where isMeteringEnabled is not respected
208
+ // whenever we pass it to the Recording class constructor - but rather it is
209
+ // only respected whenever you pass it to prepareToRecordAsync. That in turn
210
+ // however, means that all other audio related configuration will be overwritten
211
+ // and forgotten. So, we snapshot the configuration whenever we create an instance
212
+ // of a recorder and pass it to both places. Furthermore, the type of the options
213
+ // in prepareToRecordAsync is wrong - it's supposed to be the flattened config;
214
+ // otherwise none of the quality properties get respected either (only the top level
215
+ // ones).
216
+ this.options = flattenExpoAudioRecordingOptions(options);
217
+ this.recording = new ExpoRecordingComponent(this.options);
218
+ this.uri = null;
219
+ }
220
+
221
+ createAsync = async (
222
+ progressUpdateInterval: number = 500,
223
+ onRecordingStatusUpdate: (status: RecordingStatus) => void,
224
+ ) => {
225
+ this.recordingStateInterval = setInterval(() => {
226
+ const status = this.recording.getStatus();
227
+ onRecordingStatusUpdate(status);
228
+ }, progressUpdateInterval);
229
+ this.uri = null;
230
+ await this.recording.prepareToRecordAsync(this.options);
231
+ this.recording.record();
232
+ };
233
+
234
+ stopAndUnloadAsync = async () => {
235
+ clearInterval(this.recordingStateInterval);
236
+ await this.recording.stop();
237
+ this.uri = this.recording.uri;
238
+ this.recording.release();
239
+ };
240
+
241
+ getURI = () => this.uri;
242
+ }
243
+
108
244
  export const overrideAudioRecordingConfiguration = (
109
245
  audioRecordingConfiguration: AudioRecordingConfiguration,
110
246
  ) => audioRecordingConfiguration;
111
247
 
112
- export const Audio = AudioComponent ? new _Audio() : null;
248
+ const flattenExpoAudioRecordingOptions = (
249
+ options: RecordingOptions & {
250
+ bitRate?: number;
251
+ extension?: string;
252
+ numberOfChannels?: number;
253
+ sampleRate?: number;
254
+ },
255
+ ) => {
256
+ let commonOptions = {
257
+ bitRate: options.bitRate,
258
+ extension: options.extension,
259
+ isMeteringEnabled: options.isMeteringEnabled ?? false,
260
+ numberOfChannels: options.numberOfChannels,
261
+ sampleRate: options.sampleRate,
262
+ };
263
+
264
+ if (Platform.OS === 'ios') {
265
+ commonOptions = {
266
+ ...commonOptions,
267
+ ...options.ios,
268
+ };
269
+ } else if (Platform.OS === 'android') {
270
+ const audioEncoder = options.android.audioEncoder;
271
+ const audioEncoderConfig = audioEncoder
272
+ ? { audioEncoder: expoAvToExpoAudioAndroidEncoderAdapter(audioEncoder) }
273
+ : {};
274
+ const outputFormat = options.android.outputFormat;
275
+ const outputFormatConfig = outputFormat
276
+ ? { outputFormat: expoAvToExpoAudioAndroidOutputAdapter(outputFormat) }
277
+ : {};
278
+ commonOptions = {
279
+ ...commonOptions,
280
+ ...options.android,
281
+ ...audioEncoderConfig,
282
+ ...outputFormatConfig,
283
+ };
284
+ }
285
+ return commonOptions;
286
+ };
287
+
288
+ const expoAvToExpoAudioModeAdapter = (mode: AudioRecordingConfiguration['mode']) => {
289
+ const {
290
+ allowsRecordingIOS,
291
+ interruptionModeAndroid,
292
+ interruptionModeIOS,
293
+ playsInSilentModeIOS,
294
+ playThroughEarpieceAndroid,
295
+ staysActiveInBackground,
296
+ } = mode;
297
+
298
+ return {
299
+ allowsRecording: allowsRecordingIOS,
300
+ interruptionMode: interruptionModeIOS,
301
+ interruptionModeAndroid,
302
+ playsInSilentMode: playsInSilentModeIOS,
303
+ shouldPlayInBackground: staysActiveInBackground,
304
+ shouldRouteThroughEarpiece: playThroughEarpieceAndroid,
305
+ };
306
+ };
307
+
308
+ const expoAvToExpoAudioAndroidEncoderAdapter = (
309
+ audioEncoder: AudioRecordingConfiguration['options']['android']['audioEncoder'],
310
+ ) => {
311
+ const encoderMap = {
312
+ 0: 'default',
313
+ 1: 'amr_nb',
314
+ 2: 'amr_wb',
315
+ 3: 'aac',
316
+ 4: 'he_aac',
317
+ 5: 'aac_eld',
318
+ };
319
+
320
+ return Object.keys(encoderMap).includes(audioEncoder.toString())
321
+ ? encoderMap[audioEncoder]
322
+ : 'default';
323
+ };
324
+
325
+ const expoAvToExpoAudioAndroidOutputAdapter = (
326
+ outputFormat: AudioRecordingConfiguration['options']['android']['outputFormat'],
327
+ ) => {
328
+ const outputFormatMap = {
329
+ 0: 'default',
330
+ 1: '3gp',
331
+ 2: 'mpeg4',
332
+ 3: 'amrnb',
333
+ 4: 'amrwb',
334
+ 5: 'default',
335
+ 6: 'aac_adts',
336
+ 7: 'default',
337
+ 8: 'mpeg2ts',
338
+ 9: 'webm',
339
+ };
340
+
341
+ return Object.keys(outputFormatMap).includes(outputFormat.toString())
342
+ ? outputFormatMap[outputFormat]
343
+ : 'default';
344
+ };
345
+
346
+ // Always try to prioritize expo-audio if it's there.
347
+ export const Audio = ExpoRecordingComponent
348
+ ? new _AudioExpoAudio()
349
+ : AudioComponent
350
+ ? new _AudioExpoAV()
351
+ : null;
@@ -1,18 +1,177 @@
1
+ import type { PlaybackStatus, SoundReturnType } from 'stream-chat-react-native-core';
2
+
1
3
  import { AudioComponent } from './AudioVideo';
2
4
 
5
+ let ExpoAudioComponent;
6
+ let expoCreateSoundPlayer;
7
+
8
+ try {
9
+ const { createAudioPlayer, AudioModule } = require('expo-audio');
10
+ ExpoAudioComponent = AudioModule;
11
+ expoCreateSoundPlayer = createAudioPlayer;
12
+ } catch (e) {
13
+ // do nothing
14
+ }
15
+
3
16
  export const Sound = {
4
- initializeSound: AudioComponent
17
+ // Always try to prioritize expo-audio if it's there.
18
+ initializeSound: ExpoAudioComponent
5
19
  ? async (source, initialStatus, onPlaybackStatusUpdate: (playbackStatus) => void) => {
6
- await AudioComponent.setAudioModeAsync({
7
- playsInSilentModeIOS: true,
20
+ await ExpoAudioComponent.setAudioModeAsync({
21
+ playsInSilentMode: true,
8
22
  });
9
- const { sound } = await AudioComponent.Sound.createAsync(
10
- source,
11
- initialStatus,
12
- onPlaybackStatusUpdate,
13
- );
23
+ const sound = new ExpoAudioSoundAdapter(onPlaybackStatusUpdate);
24
+ await sound.loadAsync(source, initialStatus);
14
25
  return sound;
15
26
  }
16
- : null,
27
+ : AudioComponent
28
+ ? async (source, initialStatus, onPlaybackStatusUpdate: (playbackStatus) => void) => {
29
+ await AudioComponent.setAudioModeAsync({
30
+ playsInSilentModeIOS: true,
31
+ });
32
+ const { sound } = await AudioComponent.Sound.createAsync(
33
+ source,
34
+ initialStatus,
35
+ onPlaybackStatusUpdate,
36
+ );
37
+ return sound;
38
+ }
39
+ : null,
17
40
  Player: null,
18
41
  };
42
+
43
+ type ExpoAudioPlaybackStatus = {
44
+ currentTime: number;
45
+ didJustFinish: boolean;
46
+ duration: number;
47
+ id: number;
48
+ isBuffering: boolean;
49
+ isLoaded: boolean;
50
+ loop: boolean;
51
+ mute: boolean;
52
+ playbackRate: number;
53
+ playbackState: string;
54
+ playing: boolean;
55
+ reasonForWaitingToPlay: string;
56
+ shouldCorrectPitch: boolean;
57
+ timeControlStatus: string;
58
+ };
59
+
60
+ class ExpoAudioSoundAdapter {
61
+ private player;
62
+ private statusEventListener;
63
+ private initialPitchCorrectionQuality;
64
+ private initialShouldCorrectPitch;
65
+ private onPlaybackStatusUpdate;
66
+
67
+ constructor(onPlaybackStatusUpdate: (playbackStatus: PlaybackStatus) => void) {
68
+ this.onPlaybackStatusUpdate = (playbackStatus: ExpoAudioPlaybackStatus) => {
69
+ onPlaybackStatusUpdate(expoAudioToExpoAvStatusAdapter(playbackStatus));
70
+ if (playbackStatus.didJustFinish) {
71
+ this.unsubscribeStatusEventListener();
72
+ }
73
+ };
74
+ }
75
+
76
+ subscribeStatusEventListener = () => {
77
+ if (this.statusEventListener) {
78
+ this.unsubscribeStatusEventListener();
79
+ }
80
+ this.statusEventListener = this.player.addListener(
81
+ 'playbackStatusUpdate',
82
+ this.onPlaybackStatusUpdate,
83
+ );
84
+ };
85
+
86
+ unsubscribeStatusEventListener = () => {
87
+ if (this.statusEventListener) {
88
+ this.statusEventListener.remove();
89
+ this.statusEventListener = null;
90
+ }
91
+ };
92
+
93
+ // eslint-disable-next-line require-await
94
+ loadAsync = async (source, initialStatus) => {
95
+ this.player = expoCreateSoundPlayer?.(source, initialStatus.progressUpdateIntervalMillis);
96
+ this.initialShouldCorrectPitch = initialStatus.shouldCorrectPitch;
97
+ this.initialPitchCorrectionQuality = initialStatus.pitchCorrectionQuality;
98
+ };
99
+
100
+ // eslint-disable-next-line require-await
101
+ stopAsync: SoundReturnType['stopAsync'] = async () => {
102
+ this.unsubscribeStatusEventListener();
103
+ this.player.seekTo(0);
104
+ this.player.pause();
105
+ };
106
+
107
+ // eslint-disable-next-line require-await
108
+ unloadAsync: SoundReturnType['unloadAsync'] = async () => {
109
+ this.unsubscribeStatusEventListener();
110
+ this.player.release();
111
+ };
112
+
113
+ // eslint-disable-next-line require-await
114
+ playAsync: SoundReturnType['playAsync'] = async () => {
115
+ this.subscribeStatusEventListener();
116
+ this.player.play();
117
+ };
118
+
119
+ // eslint-disable-next-line require-await
120
+ pauseAsync: SoundReturnType['pauseAsync'] = async () => {
121
+ this.unsubscribeStatusEventListener();
122
+ this.player.pause();
123
+ };
124
+
125
+ // eslint-disable-next-line require-await
126
+ replayAsync: SoundReturnType['replayAsync'] = async () => {
127
+ this.subscribeStatusEventListener();
128
+ this.player.seekTo(0);
129
+ };
130
+
131
+ // eslint-disable-next-line require-await
132
+ setPositionAsync: SoundReturnType['setPositionAsync'] = async (milliseconds) => {
133
+ const seconds = milliseconds / 1000;
134
+ this.player.seekTo(seconds);
135
+ };
136
+
137
+ // eslint-disable-next-line require-await
138
+ setRateAsync: SoundReturnType['setRateAsync'] = async (
139
+ rate,
140
+ shouldCorrectPitch = this.initialShouldCorrectPitch,
141
+ pitchCorrectionQuality = this.initialPitchCorrectionQuality,
142
+ ) => {
143
+ // On Android, pitch correction sets the playback speed to 1f every time
144
+ // as seen here: https://github.com/expo/expo/blob/f9d82c5af6d472c257b14c2657938db1be4a1b2c/packages/expo-audio/android/src/main/java/expo/modules/audio/AudioModule.kt#L409
145
+ // Pitch correction is set to true whenever the pitchCorrectionQuality parameter is set,
146
+ // so there isn't much we can do about it for now.
147
+ // This is wrong and will likely be fixed within the library.
148
+ if (shouldCorrectPitch && pitchCorrectionQuality) {
149
+ this.player.setPlaybackRate(rate, pitchCorrectionQuality);
150
+ return;
151
+ }
152
+ this.player.setPlaybackRate(rate);
153
+ };
154
+ }
155
+
156
+ const expoAudioToExpoAvStatusAdapter = (
157
+ playbackStatus: ExpoAudioPlaybackStatus,
158
+ ): PlaybackStatus => {
159
+ const { currentTime, didJustFinish, duration, isBuffering, isLoaded, loop, mute, playing } =
160
+ playbackStatus;
161
+
162
+ return {
163
+ currentPosition: undefined, // not present in the expo-av api, breaks things if set
164
+ didJustFinish,
165
+ duration: undefined, // not present in the expo-av api, breaks things if set
166
+ durationMillis: duration * 1000,
167
+ error: null, // TODO: check how we can see if there is an error
168
+ isBuffering,
169
+ isLoaded,
170
+ isLooping: loop,
171
+ isMuted: mute,
172
+ isPlaying: playing,
173
+ isSeeking: false, // we don't use this anywhere, so just defaulting to a safe value since nothing similar exists in expo-audio
174
+ positionMillis: currentTime * 1000,
175
+ shouldPlay: undefined, // we cannot determine whether the audio should be playing or not
176
+ };
177
+ };