quickblox-react-ui-kit 0.5.0-beta.2 → 0.5.0-beta.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,133 @@
1
+ declare module 'media-recorder-js' {
2
+ interface QBMediaRecorderConstructorProps {
3
+ /** Preferred MIME type */
4
+ mimeType?: string
5
+ workerPath?: string
6
+ /**
7
+ * The minimum number of milliseconds of data to return
8
+ * in a single Blob, fire 'ondataavaible' callback
9
+ * (isn't need to use with 'audio/wav' of 'audio/mp3')
10
+ *
11
+ * @default 1000
12
+ */
13
+ timeslice?: number
14
+ /**
15
+ * What to do with a muted input MediaStreamTrack,
16
+ * e.g. insert black frames/zero audio volume in the recording
17
+ * or ignore altogether
18
+ *
19
+ * @default true
20
+ */
21
+ ignoreMutedMedia?: boolean
22
+ /** Recording start event handler */
23
+ onstart?: VoidFunction
24
+ /** Recording stop event handler */
25
+ onstop?: (file: Blob) => void
26
+ /** Recording pause event handler */
27
+ onpause?: VoidFunction
28
+ /** Recording resume event handler */
29
+ onresume?: VoidFunction
30
+ /** Error event handler */
31
+ onerror?: (error: unknown) => void
32
+ /**
33
+ * `dataavailable` event handler.
34
+ * The Blob of recorded data is contained in this event (callback
35
+ * isn't supported if use 'audio/wav' of 'audio/mp3' for recording)
36
+ */
37
+ ondataavailable?: (event: { data: Blob }) => void
38
+ }
39
+
40
+ class QBMediaRecorder {
41
+ constructor(config: QBMediaRecorderConstructorProps)
42
+
43
+ /**
44
+ * Switch recording Blob objects to the specified
45
+ * MIME type if `MediaRecorder` support it.
46
+ */
47
+ toggleMimeType(mimeType: string): void
48
+
49
+ /**
50
+ * Returns current `MediaRecorder` state
51
+ */
52
+ getState(): 'inactive' | 'recording' | 'paused'
53
+
54
+ /**
55
+ * Starts recording a stream.
56
+ * Fires `onstart` callback.
57
+ */
58
+ start(stream: MediaStream): void
59
+
60
+ /**
61
+ * Stops recording a stream
62
+ *
63
+ * @fires `onstop` callback and passing there Blob recorded
64
+ */
65
+ stop(): void
66
+
67
+ /** Pausing stream recording */
68
+ pause(): void
69
+
70
+ /** Resumes stream recording */
71
+ resume(): void
72
+
73
+ /**
74
+ * Change record source
75
+ */
76
+ change(stream: MediaStream): void
77
+
78
+ /**
79
+ * Create a file from blob and download as file.
80
+ * This method will call `stop` if recording is in progress.
81
+ *
82
+ * @param {string} filename Name of video file to be downloaded
83
+ * (default to `Date.now()`)
84
+ */
85
+ download(filename?: string): void
86
+
87
+ _getBlobRecorded(): Blob
88
+
89
+ callbacks: Pick<
90
+ QBMediaRecorderConstructorProps,
91
+ | 'onstart'
92
+ | 'onstop'
93
+ | 'onpause'
94
+ | 'onresume'
95
+ | 'ondataavailable'
96
+ | 'onerror'
97
+ >
98
+
99
+ /**
100
+ * Checks capability of recording in the environment.
101
+ * Checks `MediaRecorder`, `MediaRecorder.isTypeSupported` and `Blob`.
102
+ */
103
+ static isAvailable(): boolean
104
+
105
+ /**
106
+ * Checks if AudioContext API is available.
107
+ * Checks `window.AudioContext` or `window.webkitAudioContext`.
108
+ */
109
+ static isAudioContext(): boolean
110
+ /**
111
+ * The `QBMediaRecorder.isTypeSupported()` static method returns
112
+ * a Boolean which is true if the MIME type specified is one
113
+ * the user agent should be able to successfully record.
114
+ * @param mimeType The MIME media type to check.
115
+ *
116
+ * @returns true if the `MediaRecorder` implementation is capable of
117
+ * recording `Blob` objects for the specified MIME type. Recording may
118
+ * still fail if there are insufficient resources to support the
119
+ * recording and encoding process. If the value is false, the user
120
+ * agent is incapable of recording the specified format.
121
+ */
122
+
123
+ static isTypeSupported(mimeType: string): boolean
124
+
125
+ /**
126
+ * Return supported mime types
127
+ * @param type video or audio (dafault to 'video')
128
+ */
129
+ static getSupportedMimeTypes(type: 'audio' | 'video' = 'video'): string[]
130
+ }
131
+
132
+ export default QBMediaRecorder
133
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "quickblox-react-ui-kit",
3
- "version": "0.5.0-beta.2",
3
+ "version": "0.5.0-beta.4",
4
4
  "main": "dist/index-ui.js",
5
5
  "types": "dist/index-ui.d.ts",
6
6
  "license": "MIT",
@@ -13,6 +13,7 @@
13
13
  "qb-ai-rephrase": "^0.1.2",
14
14
  "qb-ai-translate": "^0.1.2",
15
15
  "quickblox": "^2.19.2",
16
+ "media-recorder-js": "^2.1.0",
16
17
  "react": "^19.0.0",
17
18
  "react-dom": "^19.0.0",
18
19
  "react-router-dom": "^6.11.1",
@@ -4,4 +4,3 @@ export type FunctionResult<T> = {
4
4
  result: T | T[] | boolean;
5
5
  error: any;
6
6
  };
7
-
@@ -32,7 +32,6 @@ import {
32
32
  PaginatedDTOResult,
33
33
  RemoteDataSource,
34
34
  } from '../source/remote/RemoteDataSource';
35
- import { Stubs } from '../Stubs';
36
35
 
37
36
  export default class UsersRepository {
38
37
  private localDataSource: ILocalDataSource;
@@ -205,8 +204,9 @@ export default class UsersRepository {
205
204
  let dto: RemoteUserDTO = new RemoteUserDTO();
206
205
 
207
206
  dto.id = usersIds[i]?.toString() || '-1';
208
- // eslint-disable-next-line no-await-in-loop
207
+
209
208
  try {
209
+ // eslint-disable-next-line no-await-in-loop
210
210
  dto = await this.remoteDataSource.getUser(dto);
211
211
  } catch (e) {
212
212
  dto.name = 'Unknown';
@@ -13,21 +13,32 @@ $dialog-information-container-dialog-information-height: 64px;
13
13
  justify-content: space-between;
14
14
  align-self: stretch;
15
15
  flex-shrink: 0;
16
+ font: var(--title-title-large);
16
17
  position: relative;
17
18
  border-bottom: 1px solid var(--divider);
18
19
  height: $dialog-information-container-dialog-information-height;
20
+
21
+ .dialog-header__icon {
22
+ svg {
23
+ width: 24px;
24
+ height: 24px;
25
+ fill: var(--main-text);
26
+ }
27
+ }
28
+ }
29
+ .header-dialog-info-icon {
30
+ width: 24px;
31
+ height: 24px;
32
+ fill: var(--secondary-elements);
19
33
  }
34
+
20
35
  .header-dialog-info-headline {
21
36
  color: var(--main-text, #0b1b0f);
22
37
  text-align: left;
23
38
  font: var(--title-title-large);
24
39
  position: relative;
25
40
  }
26
- .header-dialog-info-icon {
27
- width: 24px;
28
- height: 24px;
29
- fill: var(--secondary-elements);
30
- }
41
+
31
42
 
32
43
  .dialog-information-profile,
33
44
  .dialog-information-profile * {
@@ -108,7 +119,6 @@ $dialog-information-container-dialog-information-height: 64px;
108
119
  box-sizing: border-box;
109
120
  }
110
121
  .dialog-info-action-wrapper-settings {
111
- background: var(--color-background, #ffffff);
112
122
  padding: 16px;
113
123
  display: flex;
114
124
  flex-direction: row;
@@ -397,8 +397,8 @@ const DialogInfo = ({
397
397
  >
398
398
  <EditDialog
399
399
  disableActions={disableAction}
400
- nameDialog={dialogViewModel?.entity.name || dialog?.name}
401
- typeDialog={dialogViewModel?.entity.type || dialog?.type}
400
+ nameDialog={dialogViewModel?.entity?.name || dialog?.name}
401
+ typeDialog={dialogViewModel?.entity?.type || dialog?.type}
402
402
  ulrIcon={getUrlAvatar(dialogViewModel?.entity || dialog)}
403
403
  typeAddEditDialog={TypeOpenDialog.edit}
404
404
  clickUpdatedHandler={getDialogUpdatedInfoHandler}
@@ -20,6 +20,15 @@ $members-container-header-height: 64px;
20
20
  border-width: 0 0 1px 0;
21
21
 
22
22
  height: $members-container-header-height;
23
+
24
+ .dialog-header__icon {
25
+ svg {
26
+ width: 24px;
27
+ height: 24px;
28
+ fill: var(--main-text);
29
+ padding: 0;
30
+ }
31
+ }
23
32
  }
24
33
  .members-container-header-left {
25
34
  display: flex;
@@ -157,6 +157,27 @@ export default function useDialogListViewModel(
157
157
  });
158
158
  }
159
159
 
160
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function
161
+ function informDataSources(item: DialogEntity) {
162
+ const updateCurrentDialogInDataSourceUseCase: UpdateCurrentDialogInDataSourceUseCase =
163
+ new UpdateCurrentDialogInDataSourceUseCase(
164
+ new DialogsRepository(
165
+ currentContext.storage.LOCAL_DATA_SOURCE,
166
+ remoteDataSourceMock,
167
+ ),
168
+ item as GroupDialogEntity,
169
+ QBConfig,
170
+ );
171
+
172
+ updateCurrentDialogInDataSourceUseCase.execute().catch((e) => {
173
+ console.log(
174
+ 'Error updateCurrentDialogInDataSourceUseCase: ',
175
+ stringifyError(e),
176
+ );
177
+ throw new Error(stringifyError(e));
178
+ });
179
+ }
180
+
160
181
  const dialogUpdateHandler = (dialogInfo: DialogEventInfo) => {
161
182
  console.log('call dialogUpdateHandler in useDialogListView:', dialogInfo);
162
183
  if (
@@ -256,7 +277,15 @@ export default function useDialogListViewModel(
256
277
  setDialogs((prevDialogs) => {
257
278
  const newDialogs = prevDialogs.map((dialog) => {
258
279
  if (dialog.id === dialogInfo.dialogInfo?.id) {
259
- return dialogInfo.dialogInfo as PublicDialogEntity;
280
+ const updatedDialogInfo = {
281
+ ...dialogInfo.dialogInfo,
282
+ unreadMessageCount:
283
+ dialog.id === newDialog?.id
284
+ ? 0
285
+ : dialogInfo.dialogInfo.unreadMessageCount,
286
+ };
287
+
288
+ return updatedDialogInfo as PublicDialogEntity;
260
289
  }
261
290
 
262
291
  return dialog;
@@ -270,12 +299,6 @@ export default function useDialogListViewModel(
270
299
  new Date(a.updatedAt).getTime();
271
300
  });
272
301
 
273
- // const sortedData = [...newDialogs].sort((a, b) => {
274
- // return (
275
- // new Date(b.lastMessage.dateSent).getTime() - new Date(a.lastMessage.dateSent).getTime()
276
- // );
277
- // });
278
-
279
302
  return sortedData;
280
303
  });
281
304
  }
@@ -534,27 +557,6 @@ export default function useDialogListViewModel(
534
557
  return Promise.resolve(resultEnity);
535
558
  };
536
559
 
537
- // eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function
538
- function informDataSources(item: DialogEntity) {
539
- const updateCurrentDialogInDataSourceUseCase: UpdateCurrentDialogInDataSourceUseCase =
540
- new UpdateCurrentDialogInDataSourceUseCase(
541
- new DialogsRepository(
542
- currentContext.storage.LOCAL_DATA_SOURCE,
543
- remoteDataSourceMock,
544
- ),
545
- item as GroupDialogEntity,
546
- QBConfig,
547
- );
548
-
549
- updateCurrentDialogInDataSourceUseCase.execute().catch((e) => {
550
- console.log(
551
- 'Error updateCurrentDialogInDataSourceUseCase: ',
552
- stringifyError(e),
553
- );
554
- throw new Error(stringifyError(e));
555
- });
556
- }
557
-
558
560
  return {
559
561
  get entity(): DialogEntity {
560
562
  return newDialog as DialogEntity;
@@ -206,6 +206,14 @@
206
206
  border-width: 0 0 1px 0;
207
207
  }
208
208
 
209
+ .dialog-preview-container-context-menu {
210
+ svg {
211
+ fill: var(--tertiary-elements);
212
+ width: 24px;
213
+ height: 24px;
214
+ }
215
+ }
216
+
209
217
 
210
218
 
211
219
 
@@ -318,7 +318,9 @@ const PreviewDialog = ({
318
318
  }}
319
319
  placement="left"
320
320
  >
321
- <MoreSvg />
321
+ <div className="dialog-preview-container-context-menu">
322
+ <MoreSvg className="" />
323
+ </div>
322
324
  </Dropdown>
323
325
  }
324
326
  onClick={() => {
@@ -432,32 +432,34 @@ const QuickBloxUIKitDesktopLayout = ({
432
432
  />
433
433
  ) : null
434
434
  }
435
- renderDialogList={(handleSelectDialog) =>
436
- // eslint-disable-next-line no-nested-ternary
437
- dialogsViewModel?.loading ? (
438
- <div
439
- className="dialog-list__loader-container"
440
- style={{
441
- display: 'flex',
442
- flexDirection: 'row',
443
- alignItems: 'center',
444
- justifyContent: 'center',
445
- }}
446
- >
447
- <Loader size="md" className="dialog-list__loader" />
448
- </div>
449
- ) : searchedDialogs.length > 0 ? (
450
- searchedDialogs.map((dlg, index) =>
451
- renderDialogItem(dlg, index, handleSelectDialog),
452
- )
453
- ) : (
454
- <Placeholder
455
- icon={<ChatSvg />}
456
- text="There are no dialogs."
457
- className="dialog-empty-chat-placeholder"
458
- />
459
- )
460
- }
435
+ renderDialogList={(handleSelectDialog) => (
436
+ <>
437
+ {dialogsViewModel?.loading && (
438
+ <div
439
+ className="dialog-list__loader-container"
440
+ style={{
441
+ display: 'flex',
442
+ flexDirection: 'row',
443
+ alignItems: 'center',
444
+ justifyContent: 'center',
445
+ }}
446
+ >
447
+ <Loader size="md" className="dialog-list__loader" />
448
+ </div>
449
+ )}
450
+ {searchedDialogs.length > 0 ? (
451
+ searchedDialogs.map((dlg, index) =>
452
+ renderDialogItem(dlg, index, handleSelectDialog),
453
+ )
454
+ ) : (
455
+ <Placeholder
456
+ icon={<ChatSvg />}
457
+ text="There are no dialogs."
458
+ className="dialog-empty-chat-placeholder"
459
+ />
460
+ )}
461
+ </>
462
+ )}
461
463
  />
462
464
  ) : null
463
465
  }
@@ -77,7 +77,7 @@
77
77
 
78
78
  &--danger {
79
79
  background-color: var(--error);
80
- color: var(--secondary-background);
80
+ color: var(--system);
81
81
 
82
82
  &:focus {
83
83
  outline: 2px solid var(--error-secondary);
@@ -5,18 +5,18 @@
5
5
  border-radius: 4px;
6
6
  padding: 4px;
7
7
  justify-content: center;
8
- width: 18px;
9
- height: 18px;
8
+ //width: 18px;
9
+ //height: 18px;
10
10
  }
11
11
 
12
12
  .checkbox-field > input {
13
- position: absolute;
13
+ //position: fixed;
14
14
  z-index: -1;
15
15
  opacity: 0;
16
16
  }
17
17
 
18
18
  .checkbox-field .icon-checkmark {
19
- position: absolute;
19
+ //position: fixed;
20
20
  width: 18px;
21
21
  height: 18px;
22
22
  fill: var(--main-elements);
@@ -24,7 +24,7 @@
24
24
  }
25
25
 
26
26
  .checkbox-field .icon-check {
27
- position: absolute;
27
+ //position: fixed;
28
28
  width: 18px;
29
29
  height: 18px;
30
30
  fill: var(--secondary-elements);
@@ -19,7 +19,9 @@ export default function CheckBox({
19
19
  'disabled': disabled
20
20
  })}>
21
21
  {checked ? <CheckOnSvg className="icon-checkmark" /> : <CheckOffSvg className="icon-check" />}
22
- <input type="checkbox" onChange={() => onChange?.(!checked)} disabled={disabled} />
22
+ <input type="checkbox"
23
+ onChange={() => onChange?.(!checked)}
24
+ disabled={disabled} />
23
25
  </label>
24
26
  );
25
27
  }
@@ -54,7 +54,7 @@
54
54
  &__title {
55
55
  color: var(--main-text, #0b1b0f);
56
56
  text-align: left;
57
- font: var(--title-title-medium);
57
+ font: var(--title-title-large);
58
58
  overflow: hidden;
59
59
  white-space: nowrap;
60
60
  text-overflow: ellipsis;
@@ -49,7 +49,7 @@ const VoiceRecordingProgress = ({
49
49
  stopTimer();
50
50
  }, longRec * 1000);
51
51
 
52
- if (timerId === undefined) {
52
+ if (!timerId) {
53
53
  timerId = setInterval(() => {
54
54
  setCurrentTime(Date.now());
55
55
  }, 1000);
@@ -28,5 +28,9 @@
28
28
  line-height: 24px;
29
29
  letter-spacing: 0.15px;
30
30
  color: var(--main-text);
31
+ white-space: nowrap;
32
+ overflow: hidden;
33
+ text-overflow: ellipsis;
34
+ width: 180px;
31
35
  }
32
36
  }
@@ -3,6 +3,7 @@ import '../Presentation/Views/Dialog/Dialog.scss';
3
3
  import '../Presentation/Views/Dialog/DialogHeader/DialogInfoIcon/DialogInfoIcon.scss';
4
4
  import { Tone } from 'qb-ai-rephrase/src/Tone';
5
5
  import { toast } from 'react-toastify';
6
+ import QBMediaRecorder from 'media-recorder-js';
6
7
  import useQbInitializedDataContext from '../Presentation/providers/QuickBloxUIKitProvider/useQbInitializedDataContext';
7
8
  import { DialogEntity } from '../Domain/entity/DialogEntity';
8
9
  import { DialogListViewModel } from '../Presentation/Views/DialogList/DialogListViewModel';
@@ -41,7 +42,6 @@ import { UsersListViewModel } from '../Presentation/Views/DialogInfo/UsersList/U
41
42
 
42
43
  interface QuickBloxUIKitReturn {
43
44
  constants: {
44
- mimeType: string;
45
45
  messagePerPage: number;
46
46
  maxFileSize?: number;
47
47
  maxWidthToResizing: string;
@@ -65,9 +65,9 @@ interface QuickBloxUIKitReturn {
65
65
  showDialogInformation: boolean;
66
66
  needDialogInformation: boolean;
67
67
  isRecording: boolean;
68
- stream?: MediaStream;
68
+ stream?: MediaStream | null;
69
69
  permission: boolean;
70
- resultAudioBlob?: Blob;
70
+ resultAudioBlob?: Blob | null;
71
71
  audioChunks: Blob[];
72
72
  fileToSend?: File | null;
73
73
  messageText: string;
@@ -78,7 +78,6 @@ interface QuickBloxUIKitReturn {
78
78
  defaultAITranslateWidget?: AIMessageWidget;
79
79
  defaultAIAssistWidget?: AIMessageWidget;
80
80
  rephraseTones: Tone[];
81
- mimeType: string;
82
81
  messagePerPage: number;
83
82
  enableForwarding: boolean;
84
83
  enableReplying: boolean;
@@ -146,7 +145,7 @@ export default function useQuickBloxUIKit({
146
145
  uikitHeightOffset = '0px',
147
146
  }: QuickBloxUIKitProps): QuickBloxUIKitReturn {
148
147
  // 103
149
- const mimeType = 'audio/webm;codecs=opus'; // audio/ogg audio/mpeg audio/webm audio/x-wav audio/mp4
148
+ // const mimeType = 'audio/webm;codecs=opus'; // audio/ogg audio/mpeg audio/webm audio/x-wav audio/mp4
150
149
  const messagePerPage = 47;
151
150
 
152
151
  const currentContext = useQbInitializedDataContext();
@@ -372,10 +371,14 @@ export default function useQuickBloxUIKit({
372
371
  const [fileToSend, setFileToSend] = useState<File | null>(null);
373
372
  const [isRecording, setIsRecording] = useState(false);
374
373
  const [permission, setPermission] = useState(false);
375
- const [stream, setStream] = useState<MediaStream>();
376
- const mediaRecorder = useRef<MediaRecorder>();
377
- const [resultAudioBlob, setResultAudioBlob] = useState<Blob>();
378
- const [audioChunks, setAudioChunks] = useState<Array<Blob>>([]);
374
+ //
375
+ const [stream, setStream] = useState<MediaStream | null>(null);
376
+ const mediaRecorder = useRef<QBMediaRecorder | null>(null);
377
+ const [resultAudioBlob, setResultAudioBlob] = useState<Blob | null>(null);
378
+ const [audioChunks, setAudioChunks] = useState<Blob[]>([]);
379
+ const [peerConnection, setPeerConnection] =
380
+ useState<RTCPeerConnection | null>(null);
381
+ //
379
382
  const newModal = useModal();
380
383
  const [dialogToLeave, setDialogToLeave] = useState<DialogEntity>();
381
384
  const [showDialogList, setShowDialogList] = useState<boolean>(true);
@@ -487,191 +490,190 @@ export default function useQuickBloxUIKit({
487
490
  });
488
491
  };
489
492
 
490
- const getMicrophonePermission = async () => {
491
- if (window) {
492
- try {
493
- const mediaStream = await navigator.mediaDevices.getUserMedia({
494
- audio: true,
495
- video: false,
496
- });
497
-
498
- setPermission(true);
499
- setStream(mediaStream);
500
- } catch (err) {
501
- showErrorMessage(
502
- `The MediaRecorder API throws exception ${stringifyError(err)} .`,
503
- );
504
- }
505
- } else {
506
- showErrorMessage(
507
- 'The MediaRecorder API is not supported in your browser.',
508
- );
509
- }
510
- };
511
-
512
- // eslint-disable-next-line @typescript-eslint/no-unused-vars,@typescript-eslint/require-await
513
- const startRecording = async () => {
514
- if (!stream) return;
515
-
516
- // Определение браузера
493
+ // Detect browser and set MIME type
494
+ const detectBrowserAndMimeType = () => {
517
495
  const userAgent = navigator.userAgent.toLowerCase();
518
496
  const isChrome =
519
497
  /chrome/.test(userAgent) && !/edge|opr|brave/.test(userAgent);
520
498
  const isSafari = /^((?!chrome|android).)*safari/.test(userAgent);
521
499
  const isFirefox = /firefox/.test(userAgent);
522
- const isOther = !isChrome && !isSafari && !isFirefox;
500
+ const isIOS = /iphone|ipad|ipod/.test(userAgent);
523
501
 
524
- // eslint-disable-next-line no-nested-ternary
525
502
  console.log(
526
- 'Browser is:',
527
- // eslint-disable-next-line no-nested-ternary
528
- isChrome
529
- ? 'Chrome'
530
- : // eslint-disable-next-line no-nested-ternary
531
- isSafari
532
- ? 'Safari'
533
- : isFirefox
534
- ? 'Firefox'
535
- : 'Other',
503
+ `Browser detected: ${
504
+ // eslint-disable-next-line no-nested-ternary
505
+ isChrome
506
+ ? 'Chrome'
507
+ : // eslint-disable-next-line no-nested-ternary
508
+ isSafari
509
+ ? 'Safari'
510
+ : // eslint-disable-next-line no-nested-ternary
511
+ isFirefox
512
+ ? 'Firefox'
513
+ : isIOS
514
+ ? 'iOS'
515
+ : 'Other'
516
+ }`,
536
517
  );
537
518
 
538
519
  const mimeTypes = {
539
520
  chrome: ['audio/webm;codecs=opus', 'audio/webm'],
540
- safari: ['audio/wav', 'audio/aac'],
521
+ safari: ['audio/mp4', 'audio/mp4;codecs=mp4a', 'audio/aac', 'audio/wav'],
522
+ ios: ['audio/mp4', 'audio/mp4;codecs=mp4a', 'audio/aac'],
541
523
  firefox: ['audio/ogg', 'audio/webm'],
542
- other: ['audio/mp3', 'audio/wav', 'audio/webm'],
524
+ other: ['audio/webm', 'audio/mp4', 'audio/wav'],
543
525
  };
544
526
 
545
- let mimeContent = 'audio/webm;codecs=opus';
546
-
547
- if (isChrome) {
548
- mimeContent =
549
- mimeTypes.chrome.find((type) => MediaRecorder.isTypeSupported(type)) ||
550
- 'audio/webm';
551
- } else if (isSafari) {
552
- mimeContent =
553
- mimeTypes.safari.find((type) => MediaRecorder.isTypeSupported(type)) ||
554
- 'audio/wav';
555
- } else if (isFirefox) {
556
- mimeContent =
557
- mimeTypes.firefox.find((type) => MediaRecorder.isTypeSupported(type)) ||
558
- 'audio/webm';
559
- } else if (isOther) {
560
- mimeContent =
561
- mimeTypes.other.find((type) => MediaRecorder.isTypeSupported(type)) ||
562
- 'audio/wav';
563
- }
527
+ // eslint-disable-next-line no-nested-ternary
528
+ const targetMimeTypes = isIOS
529
+ ? mimeTypes.ios
530
+ : // eslint-disable-next-line no-nested-ternary
531
+ isSafari
532
+ ? mimeTypes.safari
533
+ : // eslint-disable-next-line no-nested-ternary
534
+ isChrome
535
+ ? mimeTypes.chrome
536
+ : isFirefox
537
+ ? mimeTypes.firefox
538
+ : mimeTypes.other;
539
+
540
+ return (
541
+ targetMimeTypes.find((type) => QBMediaRecorder.isTypeSupported(type)) ||
542
+ 'audio/wav'
543
+ );
544
+ };
564
545
 
565
- console.log(`Selected MIME-type: ${mimeContent}`);
546
+ // Request microphone access and setup WebRTC
547
+ const getMicrophonePermission = async () => {
548
+ if (!window) {
549
+ showErrorMessage(
550
+ 'The MediaRecorder API is not supported in your browser.',
551
+ );
552
+
553
+ return;
554
+ }
566
555
 
567
556
  try {
568
- const media = new MediaRecorder(stream, { mimeType: mimeContent });
557
+ const mediaStream = await navigator.mediaDevices.getUserMedia({
558
+ audio: true,
559
+ });
569
560
 
570
- mediaRecorder.current = media;
571
- mediaRecorder.current.start();
561
+ // Create WebRTC peer connection
562
+ const pc = new RTCPeerConnection();
572
563
 
573
- const localAudioChunks: Blob[] = [];
564
+ mediaStream
565
+ .getTracks()
566
+ .forEach((track) => pc.addTrack(track, mediaStream));
574
567
 
575
- mediaRecorder.current.ondataavailable = (event) => {
576
- if (event.data.size > 0) {
577
- localAudioChunks.push(event.data);
578
- }
568
+ pc.ontrack = (event) => {
569
+ setStream(event.streams[0]);
579
570
  };
580
571
 
581
- setAudioChunks(localAudioChunks);
572
+ setPeerConnection(pc);
573
+ setStream(mediaStream);
574
+ setPermission(true);
575
+ console.log('Microphone access granted, WebRTC connection established.');
576
+ } catch (err) {
577
+ showErrorMessage(
578
+ `The MediaRecorder API throws exception ${stringifyError(err)}.`,
579
+ );
580
+ }
581
+ };
582
+
583
+ // Start recording using QBMediaRecorder
584
+ const startWebRTCRecording = async () => {
585
+ try {
586
+ const audioContext = new AudioContext();
587
+ const source = audioContext.createMediaStreamSource(stream!);
588
+ const destination = audioContext.createMediaStreamDestination();
589
+ source.connect(destination);
590
+
591
+ const recorder = new QBMediaRecorder({
592
+ mimeType: 'audio/mp4',
593
+ timeslice: 1000, // Chunks of 1 second
594
+ ignoreMutedMedia: true,
595
+ onstart: () => console.log('Recording started in startWebRTCRecording'),
596
+ onstop: (file) => {
597
+ console.log('Final audio file:', file);
598
+ setResultAudioBlob(file);
599
+ setAudioChunks([]); // Clear recorded chunks
600
+ },
601
+ ondataavailable: (event) => {
602
+ if (event.data.size > 0) {
603
+ setAudioChunks((prevChunks) => [...prevChunks, event.data]);
604
+ }
605
+ },
606
+ });
607
+
608
+ mediaRecorder.current = recorder;
609
+ recorder.start(destination.stream);
610
+ setIsRecording(true);
611
+
612
+ console.log('WebRTC recording started.');
582
613
  } catch (error) {
583
- console.error('Ошибка при создании MediaRecorder:', error);
614
+ console.error('Error starting WebRTC recording:', error);
584
615
  }
585
616
  };
586
- // previous version - startRecording:
587
- // const startRecording = async () => {
588
- // if (!stream) return;
589
- // const mimeTypes = [
590
- // 'audio/aac',
591
- // 'audio/mp4',
592
- // 'audio/mpeg',
593
- // 'audio/ogg',
594
- // 'audio/wav',
595
- // 'audio/webm',
596
- // 'audio/3gpp',
597
- // 'audio/flac',
598
- // 'audio/x-aiff',
599
- // 'audio/x-m4a',
600
- // ];
601
- //
602
- // console.log('MIME TYPES: ');
603
- // mimeTypes.forEach((mType) => {
604
- // if (MediaRecorder.isTypeSupported(mimeType)) {
605
- // console.log(`${mType} is supported`);
606
- // } else {
607
- // console.log(`${mType} is not supported`);
608
- // }
609
- // });
610
- // // audio/mp4;codecs=mp4a audio/webm;codecs=opus audio/webm;codecs=vp9,opus
611
- // const mimeContent = window.MediaRecorder.isTypeSupported('audio/mp4')
612
- // ? 'audio/mp4;codecs=mp4a'
613
- // : 'audio/webm;codecs=opus';
614
- //
615
- // const media = new MediaRecorder(stream, { mimeType: mimeContent });
616
- //
617
- // mediaRecorder.current = media;
618
- // mediaRecorder.current.start();
619
- //
620
- // const localAudioChunks: any[] = [];
621
- //
622
- // mediaRecorder.current.ondataavailable = (event) => {
623
- // if (typeof event.data === 'undefined') return;
624
- // if (event.data.size === 0) return;
625
- // localAudioChunks.push(event.data);
626
- // };
627
- //
628
- // setAudioChunks(localAudioChunks);
629
- // };
617
+ // eslint-disable-next-line @typescript-eslint/require-await
618
+ const startRecording = async () => {
619
+ if (!stream) return;
630
620
 
631
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
632
- const stopRecording = () => {
633
- if (!mediaRecorder.current) return;
634
- mediaRecorder.current.stop();
621
+ // Detect browser type
622
+ const userAgent = navigator.userAgent.toLowerCase();
623
+ const isSafari = /^((?!chrome|android).)*safari/.test(userAgent);
635
624
 
636
- mediaRecorder.current.onstop = () => {
637
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
638
- const mimeContent = window.MediaRecorder.isTypeSupported(
639
- 'audio/mp4;codecs=mp4a',
640
- )
641
- ? 'audio/mp4;codecs=mp4a'
642
- : 'audio/webm;codecs=opus';
643
- // const audioBlob = new Blob(audioChunks, { type: mimeContent }); // mimeType
644
- // const mp4Blob = new Blob(recordedChunks, { type: 'video/mp4' });
625
+ if (isSafari || !window.MediaRecorder) {
626
+ console.log('Safari detected, using WebRTC.');
627
+ await startWebRTCRecording();
628
+ return;
629
+ }
645
630
 
646
- // const audioBlob = new Blob(audioChunks, { type: 'video/mp4' }); // mimeType
647
- // const audioBlob = new Blob(audioChunks, { type: 'audio/mp4' }); // mimeType
648
- const audioBlob = new Blob(audioChunks, { type: 'audio/mp4' });
631
+ console.log('Using QBMediaRecorder.');
632
+ const mimeType = detectBrowserAndMimeType();
649
633
 
650
- setResultAudioBlob(audioBlob);
634
+ console.log(`Selected MIME-type: ${mimeType}`);
651
635
 
652
- setAudioChunks([]);
653
- //
654
- stream?.getAudioTracks().forEach((track) => {
655
- track.stop();
656
- });
657
- setPermission(false);
658
- //
659
- };
636
+ const recorder = new QBMediaRecorder({
637
+ mimeType,
638
+ timeslice: 1000, // Chunks of 1 second
639
+ ignoreMutedMedia: true,
640
+ onstart: () => console.log('Recording started in startRecording'),
641
+ onstop: (file) => {
642
+ console.log('Final audio file:', file);
643
+ setResultAudioBlob(file);
644
+ setAudioChunks([]); // Clear recorded chunks
645
+ },
646
+ ondataavailable: (event) => {
647
+ if (event.data.size > 0) {
648
+ setAudioChunks((prev) => [...prev, event.data]);
649
+ }
650
+ },
651
+ onerror: (error) => console.error('Recording error:', error),
652
+ });
653
+
654
+ mediaRecorder.current = recorder;
655
+ recorder.start(stream);
660
656
  };
661
657
 
662
- const blobToFile = (theBlob: Blob, fileName: string): File => {
663
- const b: any = theBlob;
658
+ // Stop recording
659
+ const stopRecording = () => {
660
+ if (!mediaRecorder.current) return;
664
661
 
665
- // A Blob() is almost a File() - it's just missing the two properties below which we will add
666
- b.lastModifiedDate = new Date();
667
- b.name = fileName;
662
+ mediaRecorder.current.stop();
668
663
 
669
- // Cast to a File() type
670
- const resultFile = theBlob as unknown as File;
664
+ // Stop WebRTC stream
665
+ if (peerConnection) {
666
+ peerConnection.close();
667
+ setPeerConnection(null);
668
+ }
669
+ };
671
670
 
672
- return resultFile;
671
+ // Convert Blob to File
672
+ const blobToFile = (blob: Blob, fileName: string): File => {
673
+ return new File([blob], fileName, { type: blob.type });
673
674
  };
674
675
 
676
+
675
677
  function sendTextMessageActions(textToSend: string) {
676
678
  if (isOnline) {
677
679
  // closeReplyMessageFlowHandler
@@ -1081,7 +1083,6 @@ export default function useQuickBloxUIKit({
1081
1083
  // 972
1082
1084
  return {
1083
1085
  constants: {
1084
- mimeType,
1085
1086
  messagePerPage,
1086
1087
  maxFileSize,
1087
1088
  maxWidthToResizing,
@@ -1116,7 +1117,6 @@ export default function useQuickBloxUIKit({
1116
1117
  defaultAIRephraseWidget,
1117
1118
  defaultAITranslateWidget,
1118
1119
  defaultAIAssistWidget,
1119
- mimeType,
1120
1120
  messagePerPage,
1121
1121
  maxTokensForAIRephrase,
1122
1122
  rephraseTones,
@@ -26,7 +26,8 @@
26
26
  },
27
27
  "include": [
28
28
  "src",
29
- "global.d.ts"
29
+ "global.d.ts",
30
+ "media-recorder-js.d.ts",
30
31
  ],
31
32
  "exclude": [
32
33
  "node_modules",
package/tsconfig.json CHANGED
@@ -27,7 +27,8 @@
27
27
  },
28
28
  "include": [
29
29
  "src",
30
- "global.d.ts"
30
+ "global.d.ts",
31
+ "media-recorder-js.d.ts"
31
32
  ],
32
33
  "exclude": ["node_modules", "dist"]
33
34
  }
@@ -14,6 +14,8 @@ module.exports = {
14
14
  extensions: ['.tsx', '.ts', '.js'],
15
15
  alias: {
16
16
  path: require.resolve("path-browserify"), // Поддержка path-browserify
17
+ './errors': path.resolve(__dirname, 'node_modules/media-recorder-js/src/errors.js'),
18
+ './mimeTypes': path.resolve(__dirname, 'node_modules/media-recorder-js/src/mimeTypes.js'),
17
19
  },
18
20
  },
19
21
  module: {
@@ -16,6 +16,10 @@ module.exports = {
16
16
  },
17
17
  resolve: {
18
18
  extensions: ['.tsx', '.ts', '.js'],
19
+ alias: {
20
+ './errors': path.resolve(__dirname, 'node_modules/media-recorder-js/src/errors.js'),
21
+ './mimeTypes': path.resolve(__dirname, 'node_modules/media-recorder-js/src/mimeTypes.js'),
22
+ },
19
23
  modules: [path.resolve(__dirname, 'src'), 'node_modules'],
20
24
  },
21
25
  externals: {
@@ -16,6 +16,10 @@ module.exports = {
16
16
  },
17
17
  resolve: {
18
18
  extensions: ['.tsx', '.ts', '.js'],
19
+ alias: {
20
+ './errors': path.resolve(__dirname, 'node_modules/media-recorder-js/src/errors.js'),
21
+ './mimeTypes': path.resolve(__dirname, 'node_modules/media-recorder-js/src/mimeTypes.js'),
22
+ },
19
23
  modules: [path.resolve(__dirname, 'src'), 'node_modules'],
20
24
  },
21
25
  externals: {