@micromag/screen-conversation 0.4.71 → 0.4.74

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/es/index.d.ts +2 -2
  2. package/es/index.js +549 -403
  3. package/package.json +18 -17
package/es/index.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import * as react_jsx_runtime from 'react/jsx-runtime';
2
2
  import { ForwardedRef } from 'react';
3
- import { HeadingElement, BackgroundElement, Header, Footer, Conversation, Transitions } from '@micromag/core';
3
+ import { HeadingElement, BackgroundElement, Header, Footer, Conversation, Transitions, MediaElement } from '@micromag/core';
4
4
 
5
5
  interface ConversationScreenProps {
6
6
  title?: HeadingElement | null;
@@ -15,7 +15,7 @@ interface ConversationScreenProps {
15
15
  type?: string | null;
16
16
  conversation?: Conversation | null;
17
17
  transitions?: Transitions | null;
18
- mediaRef?: ForwardedRef<HTMLMediaElement> | null;
18
+ mediaRef?: ForwardedRef<MediaElement> | null;
19
19
  className?: string | null;
20
20
  }
21
21
  declare function ConversationScreen({ title, timing: timingMode, readingSpeed, spacing, background, header, footer, current, preload, type, conversation, transitions, mediaRef: customMediaRef, className, }: ConversationScreenProps): react_jsx_runtime.JSX.Element;
package/es/index.js CHANGED
@@ -1,10 +1,6 @@
1
1
  import { FormattedMessage, defineMessage } from 'react-intl';
2
- import _objectSpread from '@babel/runtime/helpers/objectSpread2';
3
- import _defineProperty from '@babel/runtime/helpers/defineProperty';
4
- import _toConsumableArray from '@babel/runtime/helpers/toConsumableArray';
5
- import _slicedToArray from '@babel/runtime/helpers/slicedToArray';
6
2
  import classNames from 'classnames';
7
- import { useState, useMemo, useEffect, useRef, useCallback } from 'react';
3
+ import { useState, useEffect, useRef, useCallback, useMemo } from 'react';
8
4
  import { v1 } from 'uuid';
9
5
  import { PlayIcon, PauseIcon, Button, ScreenElement, Transitions } from '@micromag/core/components';
10
6
  import { useScreenSize, useViewerContext, useViewerWebView, usePlaybackContext, usePlaybackMediaRef, useScreenRenderContext } from '@micromag/core/contexts';
@@ -17,77 +13,137 @@ import Header from '@micromag/element-header';
17
13
  import Heading from '@micromag/element-heading';
18
14
  import Layout from '@micromag/element-layout';
19
15
  import Scroll from '@micromag/element-scroll';
16
+ import { c } from 'react/compiler-runtime';
20
17
  import Text from '@micromag/element-text';
21
18
  import Visual from '@micromag/element-visual';
22
19
  import Audio from '@micromag/element-audio';
23
- import { jsxs, jsx } from 'react/jsx-runtime';
20
+ import { jsx, jsxs } from 'react/jsx-runtime';
24
21
 
25
22
  var styles = {"container":"micromag-screen-conversation-container","background":"micromag-screen-conversation-background","content":"micromag-screen-conversation-content","empty":"micromag-screen-conversation-empty","title":"micromag-screen-conversation-title","footer":"micromag-screen-conversation-footer","disabled":"micromag-screen-conversation-disabled","layout":"micromag-screen-conversation-layout","messageContainer":"micromag-screen-conversation-messageContainer","message":"micromag-screen-conversation-message","normalLeft":"micromag-screen-conversation-normalLeft","inBetweenLeft":"micromag-screen-conversation-inBetweenLeft","normalRight":"micromag-screen-conversation-normalRight","inBetweenRight":"micromag-screen-conversation-inBetweenRight","last":"micromag-screen-conversation-last","messageBody":"micromag-screen-conversation-messageBody","speakerDetails":"micromag-screen-conversation-speakerDetails","right":"micromag-screen-conversation-right","avatarContainer":"micromag-screen-conversation-avatarContainer","avatar":"micromag-screen-conversation-avatar","imageContainer":"micromag-screen-conversation-imageContainer","withAnimation":"micromag-screen-conversation-withAnimation","loadingContainer":"micromag-screen-conversation-loadingContainer","loading":"micromag-screen-conversation-loading","dot":"micromag-screen-conversation-dot","loadingSpeakerName":"micromag-screen-conversation-loadingSpeakerName","audioMessageContainer":"micromag-screen-conversation-audioMessageContainer","playButtonContainer":"micromag-screen-conversation-playButtonContainer","button":"micromag-screen-conversation-button","audioMessage":"micromag-screen-conversation-audioMessage","icon":"micromag-screen-conversation-icon"};
26
23
 
27
- function ConversationAudioAttachment(_ref) {
28
- var _ref$audio = _ref.audio,
29
- audio = _ref$audio === void 0 ? null : _ref$audio,
30
- _ref$audioEventsChann = _ref.audioEventsChannelName,
31
- audioEventsChannelName = _ref$audioEventsChann === void 0 ? null : _ref$audioEventsChann,
32
- _ref$messageId = _ref.messageId,
33
- messageId = _ref$messageId === void 0 ? null : _ref$messageId,
34
- _ref$nextAudioMessage = _ref.nextAudioMessageId,
35
- nextAudioMessageId = _ref$nextAudioMessage === void 0 ? null : _ref$nextAudioMessage,
36
- _ref$className = _ref.className,
37
- className = _ref$className === void 0 ? null : _ref$className;
38
- var _useState = useState(true),
39
- _useState2 = _slicedToArray(_useState, 2),
40
- paused = _useState2[0],
41
- setPaused = _useState2[1];
42
- var audioEventsChannel = useMemo(function () {
43
- return new BroadcastChannel(audioEventsChannelName);
44
- }, [audioEventsChannelName]);
45
- useEffect(function () {
46
- var onMessage = function onMessage(message) {
47
- var _message$data = message.data,
48
- type = _message$data.type,
49
- id = _message$data.id;
50
- switch (type) {
51
- case 'play':
52
- setPaused(id !== messageId);
53
- break;
54
- }
24
+ function ConversationAudioAttachment(t0) {
25
+ const $ = c(29);
26
+ const {
27
+ audio: t1,
28
+ audioEventsChannelName: t2,
29
+ messageId: t3,
30
+ nextAudioMessageId: t4,
31
+ className: t5
32
+ } = t0;
33
+ const audio = t1 === undefined ? null : t1;
34
+ const audioEventsChannelName = t2 === undefined ? null : t2;
35
+ const messageId = t3 === undefined ? null : t3;
36
+ const nextAudioMessageId = t4 === undefined ? null : t4;
37
+ const className = t5 === undefined ? null : t5;
38
+ const [paused, setPaused] = useState(true);
39
+ let t6;
40
+ if ($[0] !== audioEventsChannelName) {
41
+ t6 = new BroadcastChannel(audioEventsChannelName);
42
+ $[0] = audioEventsChannelName;
43
+ $[1] = t6;
44
+ } else {
45
+ t6 = $[1];
46
+ }
47
+ const audioEventsChannel = t6;
48
+ let t7;
49
+ if ($[2] !== audioEventsChannel || $[3] !== messageId) {
50
+ t7 = () => {
51
+ const onMessage = message => {
52
+ const {
53
+ type,
54
+ id
55
+ } = message.data;
56
+ bb25: switch (type) {
57
+ case "play":
58
+ {
59
+ setPaused(id !== messageId);
60
+ break bb25;
61
+ }
62
+ }
63
+ };
64
+ audioEventsChannel.addEventListener("message", onMessage);
65
+ return () => audioEventsChannel.removeEventListener("message", onMessage);
55
66
  };
56
- audioEventsChannel.addEventListener('message', onMessage);
57
- return function () {
58
- return audioEventsChannel.removeEventListener('message', onMessage);
67
+ $[2] = audioEventsChannel;
68
+ $[3] = messageId;
69
+ $[4] = t7;
70
+ } else {
71
+ t7 = $[4];
72
+ }
73
+ let t8;
74
+ if ($[5] !== audioEventsChannel) {
75
+ t8 = [audioEventsChannel];
76
+ $[5] = audioEventsChannel;
77
+ $[6] = t8;
78
+ } else {
79
+ t8 = $[6];
80
+ }
81
+ useEffect(t7, t8);
82
+ let t9;
83
+ if ($[7] !== audioEventsChannel || $[8] !== messageId || $[9] !== paused) {
84
+ t9 = () => {
85
+ if (paused) {
86
+ setPaused(false);
87
+ audioEventsChannel.postMessage({
88
+ type: "play",
89
+ id: messageId
90
+ });
91
+ } else {
92
+ setPaused(true);
93
+ }
59
94
  };
60
- }, [audioEventsChannel]);
61
- var togglePaused = function togglePaused() {
62
- if (paused) {
63
- setPaused(false);
64
- audioEventsChannel.postMessage({
65
- type: 'play',
66
- id: messageId
67
- });
68
- } else {
95
+ $[7] = audioEventsChannel;
96
+ $[8] = messageId;
97
+ $[9] = paused;
98
+ $[10] = t9;
99
+ } else {
100
+ t9 = $[10];
101
+ }
102
+ const togglePaused = t9;
103
+ let t10;
104
+ if ($[11] !== audioEventsChannel || $[12] !== nextAudioMessageId) {
105
+ t10 = () => {
106
+ if (nextAudioMessageId) {
107
+ audioEventsChannel.postMessage({
108
+ type: "play",
109
+ id: nextAudioMessageId
110
+ });
111
+ }
69
112
  setPaused(true);
70
- }
71
- };
72
- var onAudioEnded = function onAudioEnded() {
73
- if (nextAudioMessageId) {
74
- audioEventsChannel.postMessage({
75
- type: 'play',
76
- id: nextAudioMessageId
77
- });
78
- }
79
- setPaused(true);
80
- };
81
- return /*#__PURE__*/jsxs("div", {
82
- className: classNames([styles.audioMessageContainer, className]),
83
- children: [/*#__PURE__*/jsx("div", {
113
+ };
114
+ $[11] = audioEventsChannel;
115
+ $[12] = nextAudioMessageId;
116
+ $[13] = t10;
117
+ } else {
118
+ t10 = $[13];
119
+ }
120
+ const onAudioEnded = t10;
121
+ let t11;
122
+ if ($[14] !== className) {
123
+ t11 = classNames([styles.audioMessageContainer, className]);
124
+ $[14] = className;
125
+ $[15] = t11;
126
+ } else {
127
+ t11 = $[15];
128
+ }
129
+ let t12;
130
+ if ($[16] !== paused) {
131
+ t12 = /*#__PURE__*/jsx("div", {
84
132
  className: styles.playButtonContainer,
85
133
  children: paused ? /*#__PURE__*/jsx(PlayIcon, {
86
134
  className: styles.icon
87
135
  }) : /*#__PURE__*/jsx(PauseIcon, {
88
136
  className: styles.icon
89
137
  })
90
- }), /*#__PURE__*/jsx(Audio, {
138
+ });
139
+ $[16] = paused;
140
+ $[17] = t12;
141
+ } else {
142
+ t12 = $[17];
143
+ }
144
+ let t13;
145
+ if ($[18] !== audio || $[19] !== onAudioEnded || $[20] !== paused) {
146
+ t13 = /*#__PURE__*/jsx(Audio, {
91
147
  withWave: true,
92
148
  autoWaveHeight: true,
93
149
  className: styles.audioMessage,
@@ -95,256 +151,375 @@ function ConversationAudioAttachment(_ref) {
95
151
  paused: paused,
96
152
  updateInterval: 100,
97
153
  onEnded: onAudioEnded
98
- }), /*#__PURE__*/jsx(Button, {
154
+ });
155
+ $[18] = audio;
156
+ $[19] = onAudioEnded;
157
+ $[20] = paused;
158
+ $[21] = t13;
159
+ } else {
160
+ t13 = $[21];
161
+ }
162
+ let t14;
163
+ if ($[22] !== togglePaused) {
164
+ t14 = /*#__PURE__*/jsx(Button, {
99
165
  onClick: togglePaused,
100
166
  className: styles.button
101
- })]
102
- });
167
+ });
168
+ $[22] = togglePaused;
169
+ $[23] = t14;
170
+ } else {
171
+ t14 = $[23];
172
+ }
173
+ let t15;
174
+ if ($[24] !== t11 || $[25] !== t12 || $[26] !== t13 || $[27] !== t14) {
175
+ t15 = /*#__PURE__*/jsxs("div", {
176
+ className: t11,
177
+ children: [t12, t13, t14]
178
+ });
179
+ $[24] = t11;
180
+ $[25] = t12;
181
+ $[26] = t13;
182
+ $[27] = t14;
183
+ $[28] = t15;
184
+ } else {
185
+ t15 = $[28];
186
+ }
187
+ return t15;
103
188
  }
104
189
 
105
- function ConversationMessage(_ref) {
106
- var _ref$message = _ref.message,
107
- message = _ref$message === void 0 ? null : _ref$message,
108
- _ref$messageId = _ref.messageId,
109
- messageId = _ref$messageId === void 0 ? null : _ref$messageId,
110
- _ref$previousMessage = _ref.previousMessage,
111
- previousMessage = _ref$previousMessage === void 0 ? null : _ref$previousMessage,
112
- _ref$nextMessage = _ref.nextMessage,
113
- nextMessage = _ref$nextMessage === void 0 ? null : _ref$nextMessage,
114
- _ref$nextAudioMessage = _ref.nextAudioMessageId,
115
- nextAudioMessageId = _ref$nextAudioMessage === void 0 ? null : _ref$nextAudioMessage,
116
- _ref$nextMessageState = _ref.nextMessageState,
117
- nextMessageState = _ref$nextMessageState === void 0 ? null : _ref$nextMessageState,
118
- _ref$currentSpeaker = _ref.currentSpeaker,
119
- currentSpeaker = _ref$currentSpeaker === void 0 ? null : _ref$currentSpeaker,
120
- _ref$conversationTimi = _ref.conversationTiming,
121
- conversationTiming = _ref$conversationTimi === void 0 ? null : _ref$conversationTimi,
122
- _ref$typingTiming = _ref.typingTiming,
123
- typingTiming = _ref$typingTiming === void 0 ? null : _ref$typingTiming,
124
- _ref$onChange = _ref.onChange,
125
- onChange = _ref$onChange === void 0 ? null : _ref$onChange,
126
- _ref$withAnimation = _ref.withAnimation,
127
- withAnimation = _ref$withAnimation === void 0 ? false : _ref$withAnimation,
128
- _ref$active = _ref.active,
129
- active = _ref$active === void 0 ? false : _ref$active,
130
- _ref$isPlaying = _ref.isPlaying,
131
- isPlaying = _ref$isPlaying === void 0 ? false : _ref$isPlaying,
132
- _ref$messageStyle = _ref.messageStyle,
133
- messageStyle = _ref$messageStyle === void 0 ? null : _ref$messageStyle,
134
- _ref$speakerStyle = _ref.speakerStyle,
135
- speakerStyle = _ref$speakerStyle === void 0 ? null : _ref$speakerStyle,
136
- _ref$className = _ref.className,
137
- className = _ref$className === void 0 ? null : _ref$className,
138
- _ref$audioEventsChann = _ref.audioEventsChannelName,
139
- audioEventsChannelName = _ref$audioEventsChann === void 0 ? null : _ref$audioEventsChann;
140
- var _ref2 = message || {},
141
- messageBody = _ref2.message,
142
- _ref2$image = _ref2.image,
143
- image = _ref2$image === void 0 ? null : _ref2$image,
144
- audio = _ref2.audio,
145
- _ref2$putAudioBeforeT = _ref2.putAudioBeforeText,
146
- putAudioBeforeText = _ref2$putAudioBeforeT === void 0 ? false : _ref2$putAudioBeforeT;
147
- var _ref3 = currentSpeaker || {},
148
- _ref3$avatar = _ref3.avatar,
149
- avatar = _ref3$avatar === void 0 ? null : _ref3$avatar,
150
- speakerName = _ref3.name,
151
- _ref3$side = _ref3.side,
152
- side = _ref3$side === void 0 ? 'left' : _ref3$side,
153
- currentSpeakerId = _ref3.id,
154
- color = _ref3.color;
155
- var _ref4 = avatar || {},
156
- _ref4$url = _ref4.url,
157
- avatarUrl = _ref4$url === void 0 ? null : _ref4$url;
158
- var right = side === 'right';
159
- var isPrevSpeakerTheSame = previousMessage !== null && previousMessage.speaker === currentSpeakerId;
160
- var isNextSpeakerTheSame = nextMessage !== null && nextMessage.speaker === currentSpeakerId;
161
-
162
- // Timing
163
- var _useState = useState(withAnimation ? 'pause' : 'send'),
164
- _useState2 = _slicedToArray(_useState, 2),
165
- messageState = _useState2[0],
166
- setMessageState = _useState2[1];
167
- var pauseBeforeTyping = conversationTiming;
168
- var typingDuration = typingTiming;
169
- useEffect(function () {
170
- if (!withAnimation || !isPlaying) {
171
- return function () {};
172
- }
173
- var timeout = null;
174
- if (messageState === 'pause') {
175
- timeout = setTimeout(function () {
176
- return setMessageState('typing');
177
- }, pauseBeforeTyping);
178
- } else if (messageState === 'typing') {
179
- timeout = setTimeout(function () {
180
- return setMessageState('send');
181
- }, typingDuration);
182
- }
183
- return function () {
184
- if (timeout !== null) {
185
- clearTimeout(timeout);
190
+ function ConversationMessage(t0) {
191
+ const $ = c(41);
192
+ const {
193
+ message: t1,
194
+ messageId: t2,
195
+ previousMessage: t3,
196
+ nextMessage: t4,
197
+ nextAudioMessageId: t5,
198
+ nextMessageState: t6,
199
+ currentSpeaker: t7,
200
+ conversationTiming: t8,
201
+ typingTiming: t9,
202
+ onChange: t10,
203
+ withAnimation: t11,
204
+ active: t12,
205
+ isPlaying: t13,
206
+ messageStyle: t14,
207
+ speakerStyle: t15,
208
+ className: t16,
209
+ audioEventsChannelName: t17
210
+ } = t0;
211
+ const message = t1 === undefined ? null : t1;
212
+ const messageId = t2 === undefined ? null : t2;
213
+ const previousMessage = t3 === undefined ? null : t3;
214
+ const nextMessage = t4 === undefined ? null : t4;
215
+ const nextAudioMessageId = t5 === undefined ? null : t5;
216
+ const nextMessageState = t6 === undefined ? null : t6;
217
+ const currentSpeaker = t7 === undefined ? null : t7;
218
+ const conversationTiming = t8 === undefined ? null : t8;
219
+ const typingTiming = t9 === undefined ? null : t9;
220
+ const onChange = t10 === undefined ? null : t10;
221
+ const withAnimation = t11 === undefined ? false : t11;
222
+ const active = t12 === undefined ? false : t12;
223
+ const isPlaying = t13 === undefined ? false : t13;
224
+ const messageStyle = t14 === undefined ? null : t14;
225
+ const speakerStyle = t15 === undefined ? null : t15;
226
+ const className = t16 === undefined ? null : t16;
227
+ const audioEventsChannelName = t17 === undefined ? null : t17;
228
+ let t18;
229
+ if ($[0] !== message) {
230
+ t18 = message || {};
231
+ $[0] = message;
232
+ $[1] = t18;
233
+ } else {
234
+ t18 = $[1];
235
+ }
236
+ const {
237
+ message: messageBody,
238
+ image: t19,
239
+ audio,
240
+ putAudioBeforeText: t20
241
+ } = t18;
242
+ const image = t19 === undefined ? null : t19;
243
+ const putAudioBeforeText = t20 === undefined ? false : t20;
244
+ let t21;
245
+ if ($[2] !== currentSpeaker) {
246
+ t21 = currentSpeaker || {};
247
+ $[2] = currentSpeaker;
248
+ $[3] = t21;
249
+ } else {
250
+ t21 = $[3];
251
+ }
252
+ const {
253
+ avatar: t22,
254
+ name: speakerName,
255
+ side: t23,
256
+ id: currentSpeakerId,
257
+ color
258
+ } = t21;
259
+ const avatar = t22 === undefined ? null : t22;
260
+ const side = t23 === undefined ? "left" : t23;
261
+ let t24;
262
+ if ($[4] !== avatar) {
263
+ t24 = avatar || {};
264
+ $[4] = avatar;
265
+ $[5] = t24;
266
+ } else {
267
+ t24 = $[5];
268
+ }
269
+ const {
270
+ url: t25
271
+ } = t24;
272
+ const avatarUrl = t25 === undefined ? null : t25;
273
+ const right = side === "right";
274
+ const isPrevSpeakerTheSame = previousMessage !== null && previousMessage.speaker === currentSpeakerId;
275
+ const isNextSpeakerTheSame = nextMessage !== null && nextMessage.speaker === currentSpeakerId;
276
+ const [messageState, setMessageState] = useState(withAnimation ? "pause" : "send");
277
+ const pauseBeforeTyping = conversationTiming;
278
+ const typingDuration = typingTiming;
279
+ let t26;
280
+ let t27;
281
+ if ($[6] !== isPlaying || $[7] !== messageState || $[8] !== pauseBeforeTyping || $[9] !== typingDuration || $[10] !== withAnimation) {
282
+ t26 = () => {
283
+ if (!withAnimation || !isPlaying) {
284
+ return _temp;
285
+ }
286
+ let timeout = null;
287
+ if (messageState === "pause") {
288
+ timeout = setTimeout(() => setMessageState("typing"), pauseBeforeTyping);
289
+ } else {
290
+ if (messageState === "typing") {
291
+ timeout = setTimeout(() => setMessageState("send"), typingDuration);
292
+ }
186
293
  }
294
+ return () => {
295
+ if (timeout !== null) {
296
+ clearTimeout(timeout);
297
+ }
298
+ };
187
299
  };
188
- }, [withAnimation, isPlaying, messageState, setMessageState, pauseBeforeTyping, typingDuration]);
189
- useEffect(function () {
190
- if (messageState !== 'pause' && onChange !== null) {
191
- onChange(messageState);
192
- }
193
- }, [messageState]);
194
- var betweenStyle = isNextSpeakerTheSame && nextMessageState;
195
- return messageState !== 'pause' ? /*#__PURE__*/jsx("div", {
196
- className: classNames([styles.messageContainer, className, _defineProperty(_defineProperty({}, styles.withAnimation, withAnimation === true), styles.right, right)]),
197
- children: messageState === 'typing' ? /*#__PURE__*/jsxs("div", {
198
- className: styles.loadingContainer,
199
- children: [/*#__PURE__*/jsxs("div", {
200
- className: styles.loading,
201
- children: [/*#__PURE__*/jsx("div", {
202
- className: styles.dot
203
- }), /*#__PURE__*/jsx("div", {
204
- className: styles.dot
300
+ t27 = [withAnimation, isPlaying, messageState, setMessageState, pauseBeforeTyping, typingDuration];
301
+ $[6] = isPlaying;
302
+ $[7] = messageState;
303
+ $[8] = pauseBeforeTyping;
304
+ $[9] = typingDuration;
305
+ $[10] = withAnimation;
306
+ $[11] = t26;
307
+ $[12] = t27;
308
+ } else {
309
+ t26 = $[11];
310
+ t27 = $[12];
311
+ }
312
+ useEffect(t26, t27);
313
+ let t28;
314
+ if ($[13] !== messageState || $[14] !== onChange) {
315
+ t28 = () => {
316
+ if (messageState !== "pause" && onChange !== null) {
317
+ onChange(messageState);
318
+ }
319
+ };
320
+ $[13] = messageState;
321
+ $[14] = onChange;
322
+ $[15] = t28;
323
+ } else {
324
+ t28 = $[15];
325
+ }
326
+ let t29;
327
+ if ($[16] !== messageState) {
328
+ t29 = [messageState];
329
+ $[16] = messageState;
330
+ $[17] = t29;
331
+ } else {
332
+ t29 = $[17];
333
+ }
334
+ useEffect(t28, t29);
335
+ const betweenStyle = isNextSpeakerTheSame && nextMessageState;
336
+ let t30;
337
+ if ($[18] !== active || $[19] !== audio || $[20] !== audioEventsChannelName || $[21] !== avatarUrl || $[22] !== betweenStyle || $[23] !== className || $[24] !== color || $[25] !== image || $[26] !== isNextSpeakerTheSame || $[27] !== isPlaying || $[28] !== isPrevSpeakerTheSame || $[29] !== messageBody || $[30] !== messageId || $[31] !== messageState || $[32] !== messageStyle || $[33] !== nextAudioMessageId || $[34] !== putAudioBeforeText || $[35] !== right || $[36] !== side || $[37] !== speakerName || $[38] !== speakerStyle || $[39] !== withAnimation) {
338
+ t30 = messageState !== "pause" ? /*#__PURE__*/jsx("div", {
339
+ className: classNames([styles.messageContainer, className, {
340
+ [styles.withAnimation]: withAnimation === true,
341
+ [styles.right]: right
342
+ }]),
343
+ children: messageState === "typing" ? /*#__PURE__*/jsxs("div", {
344
+ className: styles.loadingContainer,
345
+ children: [/*#__PURE__*/jsxs("div", {
346
+ className: styles.loading,
347
+ children: [/*#__PURE__*/jsx("div", {
348
+ className: styles.dot
349
+ }), /*#__PURE__*/jsx("div", {
350
+ className: styles.dot
351
+ }), /*#__PURE__*/jsx("div", {
352
+ className: styles.dot
353
+ })]
205
354
  }), /*#__PURE__*/jsx("div", {
206
- className: styles.dot
355
+ className: styles.loadingSpeakerName,
356
+ children: speakerName
207
357
  })]
208
- }), /*#__PURE__*/jsx("div", {
209
- className: styles.loadingSpeakerName,
210
- children: speakerName
211
- })]
212
- }) : /*#__PURE__*/jsxs("div", {
213
- className: classNames([styles.message, _defineProperty(_defineProperty(_defineProperty(_defineProperty(_defineProperty(_defineProperty({}, styles.normalRight, right), styles.nextTheSame, isNextSpeakerTheSame === true && isPrevSpeakerTheSame), styles.inBetweenRight, betweenStyle && right), styles.normalLeft, !right), styles.inBetweenLeft, betweenStyle && !right), styles.last, isNextSpeakerTheSame === false)]),
214
- style: _objectSpread({}, getStyleFromColor(color)),
215
- children: [!isPrevSpeakerTheSame ? /*#__PURE__*/jsxs("div", {
216
- className: classNames([styles.speakerDetails, _defineProperty({}, styles.right, side === 'right')]),
217
- children: [avatarUrl !== null ? /*#__PURE__*/jsx("div", {
218
- className: classNames([styles.avatarContainer, _defineProperty({}, styles.right, side === 'right')]),
219
- children: /*#__PURE__*/jsx("img", {
220
- className: styles.avatar,
221
- src: avatarUrl,
222
- alt: speakerName,
223
- loading: "lazy"
224
- })
225
- }) : null, /*#__PURE__*/jsx(Text, {
226
- body: speakerName,
227
- textStyle: speakerStyle
358
+ }) : /*#__PURE__*/jsxs("div", {
359
+ className: classNames([styles.message, {
360
+ [styles.normalRight]: right,
361
+ [styles.nextTheSame]: isNextSpeakerTheSame === true && isPrevSpeakerTheSame,
362
+ [styles.inBetweenRight]: betweenStyle && right,
363
+ [styles.normalLeft]: !right,
364
+ [styles.inBetweenLeft]: betweenStyle && !right,
365
+ [styles.last]: isNextSpeakerTheSame === false
366
+ }]),
367
+ style: {
368
+ ...getStyleFromColor(color)
369
+ },
370
+ children: [!isPrevSpeakerTheSame ? /*#__PURE__*/jsxs("div", {
371
+ className: classNames([styles.speakerDetails, {
372
+ [styles.right]: side === "right"
373
+ }]),
374
+ children: [avatarUrl !== null ? /*#__PURE__*/jsx("div", {
375
+ className: classNames([styles.avatarContainer, {
376
+ [styles.right]: side === "right"
377
+ }]),
378
+ children: /*#__PURE__*/jsx("img", {
379
+ className: styles.avatar,
380
+ src: avatarUrl,
381
+ alt: speakerName,
382
+ loading: "lazy"
383
+ })
384
+ }) : null, /*#__PURE__*/jsx(Text, {
385
+ body: speakerName,
386
+ textStyle: speakerStyle
387
+ })]
388
+ }) : null, /*#__PURE__*/jsxs("div", {
389
+ className: styles.messageBody,
390
+ children: [image !== null ? /*#__PURE__*/jsx("div", {
391
+ className: styles.imageContainer,
392
+ children: /*#__PURE__*/jsx(Visual, {
393
+ media: image,
394
+ width: "100%",
395
+ playing: isPlaying,
396
+ active: active
397
+ })
398
+ }) : null, audio && putAudioBeforeText ? /*#__PURE__*/jsx(ConversationAudioAttachment, {
399
+ audio: audio,
400
+ messageId: messageId,
401
+ nextAudioMessageId: nextAudioMessageId,
402
+ audioEventsChannelName: audioEventsChannelName,
403
+ className: classNames(styles.audioAttachment, styles.beforeText)
404
+ }) : null, /*#__PURE__*/jsx(Text, {
405
+ className: styles.messageText,
406
+ body: messageBody,
407
+ textStyle: messageStyle
408
+ }), audio && !putAudioBeforeText ? /*#__PURE__*/jsx(ConversationAudioAttachment, {
409
+ audio: audio,
410
+ messageId: messageId,
411
+ nextAudioMessageId: nextAudioMessageId,
412
+ audioEventsChannelName: audioEventsChannelName,
413
+ className: classNames(styles.audioAttachment, styles.afterText)
414
+ }) : null]
228
415
  })]
229
- }) : null, /*#__PURE__*/jsxs("div", {
230
- className: styles.messageBody,
231
- children: [image !== null ? /*#__PURE__*/jsx("div", {
232
- className: styles.imageContainer,
233
- children: /*#__PURE__*/jsx(Visual, {
234
- media: image,
235
- width: "100%",
236
- playing: isPlaying,
237
- active: active
238
- })
239
- }) : null, audio && putAudioBeforeText ? /*#__PURE__*/jsx(ConversationAudioAttachment, {
240
- audio: audio,
241
- messageId: messageId,
242
- nextAudioMessageId: nextAudioMessageId,
243
- audioEventsChannelName: audioEventsChannelName,
244
- className: classNames(styles.audioAttachment, styles.beforeText)
245
- }) : null, /*#__PURE__*/jsx(Text, {
246
- className: styles.messageText,
247
- body: messageBody,
248
- textStyle: messageStyle
249
- }), audio && !putAudioBeforeText ? /*#__PURE__*/jsx(ConversationAudioAttachment, {
250
- audio: audio,
251
- messageId: messageId,
252
- nextAudioMessageId: nextAudioMessageId,
253
- audioEventsChannelName: audioEventsChannelName,
254
- className: classNames(styles.audioAttachment, styles.afterText)
255
- }) : null]
256
- })]
257
- })
258
- }) : null;
416
+ })
417
+ }) : null;
418
+ $[18] = active;
419
+ $[19] = audio;
420
+ $[20] = audioEventsChannelName;
421
+ $[21] = avatarUrl;
422
+ $[22] = betweenStyle;
423
+ $[23] = className;
424
+ $[24] = color;
425
+ $[25] = image;
426
+ $[26] = isNextSpeakerTheSame;
427
+ $[27] = isPlaying;
428
+ $[28] = isPrevSpeakerTheSame;
429
+ $[29] = messageBody;
430
+ $[30] = messageId;
431
+ $[31] = messageState;
432
+ $[32] = messageStyle;
433
+ $[33] = nextAudioMessageId;
434
+ $[34] = putAudioBeforeText;
435
+ $[35] = right;
436
+ $[36] = side;
437
+ $[37] = speakerName;
438
+ $[38] = speakerStyle;
439
+ $[39] = withAnimation;
440
+ $[40] = t30;
441
+ } else {
442
+ t30 = $[40];
443
+ }
444
+ return t30;
259
445
  }
446
+ function _temp() {}
260
447
 
261
- function ConversationScreen(_ref) {
262
- var _ref$title = _ref.title,
263
- title = _ref$title === void 0 ? null : _ref$title,
264
- _ref$timing = _ref.timing,
265
- timingMode = _ref$timing === void 0 ? 'sequence' : _ref$timing,
266
- _ref$readingSpeed = _ref.readingSpeed,
267
- readingSpeed = _ref$readingSpeed === void 0 ? 255 : _ref$readingSpeed,
268
- _ref$spacing = _ref.spacing,
269
- spacing = _ref$spacing === void 0 ? 20 : _ref$spacing,
270
- _ref$background = _ref.background,
271
- background = _ref$background === void 0 ? null : _ref$background,
272
- _ref$header = _ref.header,
273
- header = _ref$header === void 0 ? null : _ref$header,
274
- _ref$footer = _ref.footer,
275
- footer = _ref$footer === void 0 ? null : _ref$footer,
276
- _ref$current = _ref.current,
277
- current = _ref$current === void 0 ? true : _ref$current,
278
- _ref$preload = _ref.preload,
279
- preload = _ref$preload === void 0 ? true : _ref$preload,
280
- _ref$type = _ref.type,
281
- type = _ref$type === void 0 ? null : _ref$type,
282
- _ref$conversation = _ref.conversation,
283
- conversation = _ref$conversation === void 0 ? null : _ref$conversation,
284
- _ref$transitions = _ref.transitions,
285
- transitions = _ref$transitions === void 0 ? null : _ref$transitions,
286
- _ref$mediaRef = _ref.mediaRef,
287
- customMediaRef = _ref$mediaRef === void 0 ? null : _ref$mediaRef,
288
- _ref$className = _ref.className,
289
- className = _ref$className === void 0 ? null : _ref$className;
290
- var _useScreenSize = useScreenSize(),
291
- width = _useScreenSize.width,
292
- height = _useScreenSize.height,
293
- resolution = _useScreenSize.resolution;
294
- var _useViewerContext = useViewerContext(),
295
- viewerTopHeight = _useViewerContext.topHeight,
296
- viewerBottomHeight = _useViewerContext.bottomHeight,
297
- viewerBottomSidesWidth = _useViewerContext.bottomSidesWidth;
298
- var _useViewerWebView = useViewerWebView(),
299
- openWebView = _useViewerWebView.open;
300
- var trackScreenEvent = useTrackScreenEvent(type);
301
- var _usePlaybackContext = usePlaybackContext(),
302
- muted = _usePlaybackContext.muted;
303
- var _usePlaybackMediaRef = usePlaybackMediaRef(current, true),
304
- mediaRef = _usePlaybackMediaRef.ref,
305
- _usePlaybackMediaRef$ = _usePlaybackMediaRef.isCurrent,
306
- isCurrentMedia = _usePlaybackMediaRef$ === void 0 ? false : _usePlaybackMediaRef$;
307
- var audioEventsChannel = new BroadcastChannel("conversation_".concat(v1(), "_audioEvents"));
308
- var _useScreenRenderConte = useScreenRenderContext(),
309
- isView = _useScreenRenderConte.isView,
310
- isPreview = _useScreenRenderConte.isPreview,
311
- isPlaceholder = _useScreenRenderConte.isPlaceholder,
312
- isEdit = _useScreenRenderConte.isEdit,
313
- isStatic = _useScreenRenderConte.isStatic,
314
- isCapture = _useScreenRenderConte.isCapture;
315
- var backgroundPlaying = current && (isView || isEdit) && (isCurrentMedia || !isView);
316
- var mediaShouldLoad = current || preload;
317
- var withAnimation = isView && !isStatic && timingMode === 'sequence';
318
- var _ref2 = conversation || {},
319
- _ref2$speakers = _ref2.speakers,
320
- speakers = _ref2$speakers === void 0 ? null : _ref2$speakers,
321
- _ref2$messages = _ref2.messages,
322
- messages = _ref2$messages === void 0 ? [] : _ref2$messages,
323
- messageStyle = _ref2.messageStyle,
324
- speakerStyle = _ref2.speakerStyle;
325
- var _useState = useState([]),
326
- _useState2 = _slicedToArray(_useState, 2),
327
- conversationState = _useState2[0],
328
- setConversationState = _useState2[1];
329
- var chatBottomRef = useRef(null);
330
- var hasHeader = isHeaderFilled(header);
331
- var hasFooter = isFooterFilled(footer);
332
- var footerProps = getFooterProps(footer, {
333
- isView: isView,
334
- current: current,
335
- openWebView: openWebView,
336
- isPreview: isPreview
448
+ /* eslint-disable react/jsx-props-no-spreading */
449
+ function ConversationScreen({
450
+ // layout,
451
+ title = null,
452
+ timing: timingMode = 'sequence',
453
+ readingSpeed = 255,
454
+ spacing = 20,
455
+ background = null,
456
+ header = null,
457
+ footer = null,
458
+ current = true,
459
+ preload = true,
460
+ type = null,
461
+ conversation = null,
462
+ transitions = null,
463
+ mediaRef: customMediaRef = null,
464
+ className = null
465
+ }) {
466
+ const {
467
+ width,
468
+ height,
469
+ resolution
470
+ } = useScreenSize();
471
+ const {
472
+ topHeight: viewerTopHeight,
473
+ bottomHeight: viewerBottomHeight,
474
+ bottomSidesWidth: viewerBottomSidesWidth
475
+ } = useViewerContext();
476
+ const {
477
+ open: openWebView
478
+ } = useViewerWebView();
479
+ const trackScreenEvent = useTrackScreenEvent(type);
480
+ const {
481
+ muted
482
+ } = usePlaybackContext();
483
+ const {
484
+ ref: mediaRef,
485
+ isCurrent: isCurrentMedia = false
486
+ } = usePlaybackMediaRef(current, true);
487
+ const audioEventsChannel = new BroadcastChannel(`conversation_${v1()}_audioEvents`);
488
+ const {
489
+ isView,
490
+ isPreview,
491
+ isPlaceholder,
492
+ isEdit,
493
+ isStatic,
494
+ isCapture
495
+ } = useScreenRenderContext();
496
+ const backgroundPlaying = current && (isView || isEdit) && (isCurrentMedia || !isView);
497
+ const mediaShouldLoad = current || preload;
498
+ const withAnimation = isView && !isStatic && timingMode === 'sequence';
499
+ const {
500
+ speakers = null,
501
+ messages = [],
502
+ messageStyle,
503
+ speakerStyle
504
+ } = conversation || {};
505
+ const [conversationState, setConversationState] = useState([]);
506
+ const chatBottomRef = useRef(null);
507
+ const hasHeader = isHeaderFilled(header);
508
+ const hasFooter = isFooterFilled(footer);
509
+ const footerProps = getFooterProps(footer, {
510
+ isView,
511
+ current,
512
+ openWebView,
513
+ isPreview
337
514
  });
338
- var hasTitle = isTextFilled(title);
339
- var _useDimensionObserver = useDimensionObserver(),
340
- contentRef = _useDimensionObserver.ref,
341
- scrollHeight = _useDimensionObserver.height;
342
- var scrollRef = useRef(null);
343
- var _useState3 = useState(false),
344
- _useState4 = _slicedToArray(_useState3, 2),
345
- scrolledBottom = _useState4[0],
346
- setScrolledBottom = _useState4[1];
347
- useEffect(function () {
515
+ const hasTitle = isTextFilled(title);
516
+ const {
517
+ ref: contentRef,
518
+ height: scrollHeight
519
+ } = useDimensionObserver();
520
+ const scrollRef = useRef(null);
521
+ const [scrolledBottom, setScrolledBottom] = useState(false);
522
+ useEffect(() => {
348
523
  if (withAnimation && scrollRef.current !== null && scrolledBottom) {
349
524
  scrollRef.current.scrollTo({
350
525
  top: scrollHeight,
@@ -352,9 +527,9 @@ function ConversationScreen(_ref) {
352
527
  });
353
528
  }
354
529
  }, [scrollHeight, withAnimation]);
355
- var animationFinished = messages.length === conversationState.length;
356
- var conversationStateChange = useCallback(function (state) {
357
- var newConversationState = _toConsumableArray(conversationState);
530
+ const animationFinished = messages.length === conversationState.length;
531
+ const conversationStateChange = useCallback(state => {
532
+ const newConversationState = [...conversationState];
358
533
  if (state === 'send') {
359
534
  newConversationState.push(true);
360
535
  setConversationState(newConversationState);
@@ -362,40 +537,36 @@ function ConversationScreen(_ref) {
362
537
  }, [conversationState, setConversationState]);
363
538
 
364
539
  // sequence timings
365
- var defaultHesitationDelay = 1500;
366
- var imageReadDelay = 5000; // 5 seconds
367
- var millisecondsPerWord = 60 * 1000 / readingSpeed;
368
- var filteredMessages = (messages || []).filter(function (m) {
369
- return m !== null;
370
- });
371
- var timings = filteredMessages.map(function (messageParams, messageIndex) {
540
+ const defaultHesitationDelay = 1500;
541
+ const imageReadDelay = 5000; // 5 seconds
542
+ const millisecondsPerWord = 60 * 1000 / readingSpeed;
543
+ const filteredMessages = (messages || []).filter(m => m !== null);
544
+ const timings = filteredMessages.map((messageParams, messageIndex) => {
372
545
  if (messageIndex === 0) {
373
546
  return 0;
374
547
  }
375
- var _ref3 = messageParams || {},
376
- _ref3$timing = _ref3.timing,
377
- timing = _ref3$timing === void 0 ? null : _ref3$timing,
378
- _ref3$message = _ref3.message,
379
- message = _ref3$message === void 0 ? null : _ref3$message,
380
- image = _ref3.image,
381
- audio = _ref3.audio,
382
- timingOverrides = _ref3.timingOverrides;
548
+ const {
549
+ timing = null,
550
+ message = null,
551
+ image,
552
+ audio,
553
+ timingOverrides
554
+ } = messageParams || {};
383
555
  if (timing !== null) {
384
556
  return timing;
385
557
  }
386
- if (timingOverrides !== null && timingOverrides !== void 0 && timingOverrides.enabled && Number.isFinite(timingOverrides === null || timingOverrides === void 0 ? void 0 : timingOverrides.writingDuration)) {
558
+ if (timingOverrides?.enabled && Number.isFinite(timingOverrides?.writingDuration)) {
387
559
  return timingOverrides.writingDuration * 1000; // seconds to milliseconds
388
560
  }
389
561
 
390
562
  // if the current message has an audio attachment, use the time it takes to record that message
391
563
  if (audio) {
392
- var _audio$metadata;
393
- return (_audio$metadata = audio.metadata) === null || _audio$metadata === void 0 ? void 0 : _audio$metadata.duration;
564
+ return audio.metadata?.duration;
394
565
  }
395
566
 
396
567
  // counting words: only keep whitespaces and alphanumeric characters, then split of whitespaces
397
- var wordCount = message ? message.replace(/[^\w\d\s]/g, '').trim().split(/\s/g).length : 0;
398
- var finalTimeMs = wordCount * millisecondsPerWord;
568
+ const wordCount = message ? message.replace(/[^\w\d\s]/g, '').trim().split(/\s/g).length : 0;
569
+ let finalTimeMs = wordCount * millisecondsPerWord;
399
570
 
400
571
  // if the message includes an image, add some more time to "read" it
401
572
  if (image) {
@@ -403,44 +574,44 @@ function ConversationScreen(_ref) {
403
574
  }
404
575
  return finalTimeMs;
405
576
  });
406
- var hesitationTimings = filteredMessages.map(function (messageParams, messageIndex) {
407
- var timingOverrides = messageParams.timingOverrides;
408
- if (messageIndex !== 0 && timingOverrides !== null && timingOverrides !== void 0 && timingOverrides.enabled && Number.isFinite(timingOverrides === null || timingOverrides === void 0 ? void 0 : timingOverrides.appearDelay)) {
409
- return timingOverrides.appearDelay * 1000; // seconds to milliseconds
577
+ const hesitationTimings = filteredMessages.map((messageParams_0, messageIndex_0) => {
578
+ const {
579
+ timingOverrides: timingOverrides_0
580
+ } = messageParams_0;
581
+ if (messageIndex_0 !== 0 && timingOverrides_0?.enabled && Number.isFinite(timingOverrides_0?.appearDelay)) {
582
+ return timingOverrides_0.appearDelay * 1000; // seconds to milliseconds
410
583
  }
411
584
  return defaultHesitationDelay;
412
585
  });
413
- var messagesUniqueId = useMemo(function () {
414
- return (messages || []).map(function () {
415
- return v1();
416
- });
417
- }, [messages]);
586
+ const messagesUniqueId = useMemo(() => (messages || []).map(() => v1()), [messages]);
418
587
 
419
588
  // scroll
420
- var transitionDisabled = isStatic || isCapture || isPlaceholder || isPreview || isEdit;
421
- var scrollingDisabled = !isEdit && transitionDisabled || !current;
422
- var showFooter = animationFinished && !isPlaceholder && hasFooter || !withAnimation;
423
- var onScrolledBottom = useCallback(function (_ref4) {
424
- var initial = _ref4.initial;
589
+ const transitionDisabled = isStatic || isCapture || isPlaceholder || isPreview || isEdit;
590
+ const scrollingDisabled = !isEdit && transitionDisabled || !current;
591
+ const showFooter = animationFinished && !isPlaceholder && hasFooter || !withAnimation;
592
+ const onScrolledBottom = useCallback(({
593
+ initial
594
+ }) => {
425
595
  if (initial) {
426
596
  trackScreenEvent('scroll', 'Screen');
427
597
  }
428
598
  setScrolledBottom(true);
429
599
  }, [trackScreenEvent]);
430
- var onScrolledNotBottom = useCallback(function () {
600
+ const onScrolledNotBottom = useCallback(() => {
431
601
  setScrolledBottom(false);
432
602
  }, [setScrolledBottom]);
433
- var onScrolledTrigger = useCallback(function () {
434
- var trigger = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
603
+ const onScrolledTrigger = useCallback((trigger = null) => {
435
604
  if (trigger !== null) {
436
- var scrollPercent = Math.round(trigger * 100);
605
+ const scrollPercent = Math.round(trigger * 100);
437
606
  trackScreenEvent('scroll', scrollPercent, {
438
- scrollPercent: scrollPercent
607
+ scrollPercent
439
608
  });
440
609
  }
441
610
  }, [trackScreenEvent]);
442
611
  return /*#__PURE__*/jsxs("div", {
443
- className: classNames([styles.container, className, _defineProperty({}, styles.isPlaceholder, isPlaceholder)]),
612
+ className: classNames([styles.container, className, {
613
+ [styles.isPlaceholder]: isPlaceholder
614
+ }]),
444
615
  "data-screen-ready": true,
445
616
  children: [/*#__PURE__*/jsx(Container, {
446
617
  width: width,
@@ -467,15 +638,14 @@ function ConversationScreen(_ref) {
467
638
  style: {
468
639
  paddingBottom: spacing
469
640
  },
470
- children: /*#__PURE__*/jsx(Header, _objectSpread({}, header))
641
+ children: /*#__PURE__*/jsx(Header, {
642
+ ...header
643
+ })
471
644
  }, "header") : null, /*#__PURE__*/jsx(ScreenElement, {
472
645
  placeholder: "conversation",
473
646
  emptyLabel: /*#__PURE__*/jsx(FormattedMessage, {
474
647
  id: "NmCfTO",
475
- defaultMessage: [{
476
- "type": 0,
477
- "value": "Conversation"
478
- }]
648
+ defaultMessage: "Conversation"
479
649
  }),
480
650
  emptyClassName: styles.empty,
481
651
  isEmpty: messages.length === 0 && title === null,
@@ -485,28 +655,27 @@ function ConversationScreen(_ref) {
485
655
  disabled: transitionDisabled
486
656
  // delay={0}
487
657
  ,
488
- children: [hasTitle ? /*#__PURE__*/jsx(Heading, _objectSpread(_objectSpread({}, title), {}, {
658
+ children: [hasTitle ? /*#__PURE__*/jsx(Heading, {
659
+ ...title,
489
660
  className: styles.title,
490
661
  isEmpty: title === null
491
- })) : null, /*#__PURE__*/jsx("div", {
662
+ }) : null, /*#__PURE__*/jsx("div", {
492
663
  className: styles.conversation,
493
- children: filteredMessages.map(function (m, messageI) {
494
- var previousMessage = messageI !== 0 ? messages[messageI - 1] : null;
495
- var nextMessage = messageI + 1 < messages.length ? messages[messageI + 1] : null;
496
- var speaker = m.speaker;
497
- var currentSpeaker = (speakers || []).find(function (s) {
498
- return s.id === speaker;
499
- }) || null;
500
- var shouldPlay = messageI === 0 || conversationState[messageI - 1] === true;
501
- var pauseTiming = hesitationTimings[messageI];
502
- var typingTiming = timings[messageI];
503
- var messageId = "".concat(m.message, "-").concat(messagesUniqueId[messageI]);
504
- var nextAudioMessage = filteredMessages.slice(messageI + 1).find(function (c) {
505
- return c.audio != null;
506
- });
507
- var nextAudioMessageId = nextAudioMessage ? "".concat(m.message, "-").concat(messagesUniqueId[filteredMessages.indexOf(nextAudioMessage)]) : null;
664
+ children: filteredMessages.map((m_0, messageI) => {
665
+ const previousMessage = messageI !== 0 ? messages[messageI - 1] : null;
666
+ const nextMessage = messageI + 1 < messages.length ? messages[messageI + 1] : null;
667
+ const {
668
+ speaker
669
+ } = m_0;
670
+ const currentSpeaker = (speakers || []).find(s => s.id === speaker) || null;
671
+ const shouldPlay = messageI === 0 || conversationState[messageI - 1] === true;
672
+ const pauseTiming = hesitationTimings[messageI];
673
+ const typingTiming = timings[messageI];
674
+ const messageId = `${m_0.message}-${messagesUniqueId[messageI]}`;
675
+ const nextAudioMessage = filteredMessages.slice(messageI + 1).find(c => c.audio != null);
676
+ const nextAudioMessageId = nextAudioMessage ? `${m_0.message}-${messagesUniqueId[filteredMessages.indexOf(nextAudioMessage)]}` : null;
508
677
  return /*#__PURE__*/jsx(ConversationMessage, {
509
- message: m,
678
+ message: m_0,
510
679
  messageId: messageId,
511
680
  previousMessage: previousMessage,
512
681
  nextMessage: nextMessage,
@@ -526,13 +695,17 @@ function ConversationScreen(_ref) {
526
695
  }, messageId);
527
696
  })
528
697
  }), showFooter ? /*#__PURE__*/jsx("div", {
529
- className: classNames([styles.footer, _defineProperty({}, styles.disabled, !scrolledBottom)]),
698
+ className: classNames([styles.footer, {
699
+ [styles.disabled]: !scrolledBottom
700
+ }]),
530
701
  style: {
531
702
  paddingLeft: Math.max(viewerBottomSidesWidth - spacing, 0),
532
703
  paddingRight: Math.max(viewerBottomSidesWidth - spacing, 0),
533
704
  paddingTop: spacing
534
705
  },
535
- children: /*#__PURE__*/jsx(Footer, _objectSpread({}, footerProps))
706
+ children: /*#__PURE__*/jsx(Footer, {
707
+ ...footerProps
708
+ })
536
709
  }) : null, /*#__PURE__*/jsx("div", {
537
710
  ref: chatBottomRef
538
711
  })]
@@ -562,19 +735,13 @@ var definition = {
562
735
  group: {
563
736
  label: defineMessage({
564
737
  id: "fIawTr",
565
- defaultMessage: [{
566
- "type": 0,
567
- "value": "Text"
568
- }]
738
+ defaultMessage: "Text"
569
739
  }),
570
740
  order: 3
571
741
  },
572
742
  title: defineMessage({
573
743
  id: "rBPIgw",
574
- defaultMessage: [{
575
- "type": 0,
576
- "value": "Conversation"
577
- }]
744
+ defaultMessage: "Conversation"
578
745
  }),
579
746
  component: ConversationScreen,
580
747
  layouts: ['normal'],
@@ -605,10 +772,7 @@ var definition = {
605
772
  }],
606
773
  label: defineMessage({
607
774
  id: "6DV50M",
608
- defaultMessage: [{
609
- "type": 0,
610
- "value": "Timing"
611
- }]
775
+ defaultMessage: "Timing"
612
776
  })
613
777
  }, {
614
778
  name: 'title',
@@ -618,20 +782,14 @@ var definition = {
618
782
  },
619
783
  label: defineMessage({
620
784
  id: "N25iDO",
621
- defaultMessage: [{
622
- "type": 0,
623
- "value": "Title"
624
- }]
785
+ defaultMessage: "Title"
625
786
  })
626
787
  }, {
627
788
  name: 'conversation',
628
789
  type: 'conversation',
629
790
  label: defineMessage({
630
791
  id: "8tANs8",
631
- defaultMessage: [{
632
- "type": 0,
633
- "value": "Conversation"
634
- }]
792
+ defaultMessage: "Conversation"
635
793
  })
636
794
  }, {
637
795
  name: 'readingSpeed',
@@ -639,30 +797,21 @@ var definition = {
639
797
  defaultValue: 255,
640
798
  label: defineMessage({
641
799
  id: "QjbLZ9",
642
- defaultMessage: [{
643
- "type": 0,
644
- "value": "Reading speed (in Words Per Minute)"
645
- }]
800
+ defaultMessage: "Reading speed (in Words Per Minute)"
646
801
  })
647
802
  }, {
648
803
  name: 'background',
649
804
  type: 'background',
650
805
  label: defineMessage({
651
806
  id: "+MPZRu",
652
- defaultMessage: [{
653
- "type": 0,
654
- "value": "Background"
655
- }]
807
+ defaultMessage: "Background"
656
808
  })
657
809
  }, {
658
810
  name: 'header',
659
811
  type: 'header',
660
812
  label: defineMessage({
661
813
  id: "rhuDxI",
662
- defaultMessage: [{
663
- "type": 0,
664
- "value": "Header"
665
- }]
814
+ defaultMessage: "Header"
666
815
  }),
667
816
  theme: {
668
817
  badge: {
@@ -677,10 +826,7 @@ var definition = {
677
826
  type: 'footer',
678
827
  label: defineMessage({
679
828
  id: "g4nybp",
680
- defaultMessage: [{
681
- "type": 0,
682
- "value": "Footer"
683
- }]
829
+ defaultMessage: "Footer"
684
830
  }),
685
831
  theme: {
686
832
  callToAction: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@micromag/screen-conversation",
3
- "version": "0.4.71",
3
+ "version": "0.4.74",
4
4
  "private": false,
5
5
  "description": "",
6
6
  "keywords": [
@@ -36,6 +36,7 @@
36
36
  "exports": {
37
37
  ".": {
38
38
  "types": "./es/index.d.ts",
39
+ "style": "./assets/css/styles.css",
39
40
  "import": "./es/index.js"
40
41
  },
41
42
  "./assets/css/styles": "./assets/css/styles.css",
@@ -52,26 +53,26 @@
52
53
  "build": "../../scripts/prepare-package.sh --types"
53
54
  },
54
55
  "devDependencies": {
55
- "react": "^18.3.0 || ^19.0.0",
56
- "react-dom": "^18.3.0 || ^19.0.0"
56
+ "react": "^19.0.0",
57
+ "react-dom": "^19.0.0"
57
58
  },
58
59
  "peerDependencies": {
59
- "react": "^18.3.0 || ^19.0.0",
60
- "react-dom": "^18.3.0 || ^19.0.0"
60
+ "react": "^19.0.0",
61
+ "react-dom": "^19.0.0"
61
62
  },
62
63
  "dependencies": {
63
64
  "@babel/runtime": "^7.28.6",
64
- "@micromag/core": "^0.4.71",
65
- "@micromag/element-background": "^0.4.71",
66
- "@micromag/element-container": "^0.4.71",
67
- "@micromag/element-footer": "^0.4.71",
68
- "@micromag/element-header": "^0.4.71",
69
- "@micromag/element-heading": "^0.4.71",
70
- "@micromag/element-layout": "^0.4.71",
71
- "@micromag/element-scroll": "^0.4.71",
72
- "@micromag/element-text": "^0.4.71",
73
- "@micromag/element-visual": "^0.4.71",
74
- "@micromag/transforms": "^0.4.71",
65
+ "@micromag/core": "^0.4.74",
66
+ "@micromag/element-background": "^0.4.74",
67
+ "@micromag/element-container": "^0.4.74",
68
+ "@micromag/element-footer": "^0.4.74",
69
+ "@micromag/element-header": "^0.4.74",
70
+ "@micromag/element-heading": "^0.4.74",
71
+ "@micromag/element-layout": "^0.4.74",
72
+ "@micromag/element-scroll": "^0.4.74",
73
+ "@micromag/element-text": "^0.4.74",
74
+ "@micromag/element-visual": "^0.4.74",
75
+ "@micromag/transforms": "^0.4.74",
75
76
  "classnames": "^2.2.6",
76
77
  "lodash": "^4.17.23",
77
78
  "react-intl": "^8.1.3 || ^10.0.0",
@@ -81,6 +82,6 @@
81
82
  "access": "public",
82
83
  "registry": "https://registry.npmjs.org/"
83
84
  },
84
- "gitHead": "9101554bc5761e32b4a002a10d26800608c69773",
85
+ "gitHead": "fe510ee87845280d0760cb292aef9d2eb69e67c1",
85
86
  "types": "es/index.d.ts"
86
87
  }