@versa_ai/vmml-editor 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/.turbo/turbo-build.log +335 -0
  2. package/CHANGELOG.md +16 -0
  3. package/README.md +1 -0
  4. package/biome.json +7 -0
  5. package/dist/index.d.mts +5 -0
  6. package/dist/index.d.ts +5 -0
  7. package/dist/index.js +2675 -0
  8. package/dist/index.js.map +1 -0
  9. package/dist/index.mjs +2673 -0
  10. package/dist/index.mjs.map +1 -0
  11. package/package.json +48 -0
  12. package/postcss.config.js +3 -0
  13. package/src/assets/css/closeLayer.scss +50 -0
  14. package/src/assets/css/colorSelector.scss +59 -0
  15. package/src/assets/css/editorTextMenu.less +130 -0
  16. package/src/assets/css/editorTextMenu.scss +149 -0
  17. package/src/assets/css/index.scss +252 -0
  18. package/src/assets/css/loading.scss +31 -0
  19. package/src/assets/css/maxTextLayer.scss +31 -0
  20. package/src/assets/img/icon_Brush.png +0 -0
  21. package/src/assets/img/icon_Change.png +0 -0
  22. package/src/assets/img/icon_Cut.png +0 -0
  23. package/src/assets/img/icon_Face.png +0 -0
  24. package/src/assets/img/icon_Graffiti.png +0 -0
  25. package/src/assets/img/icon_Mute.png +0 -0
  26. package/src/assets/img/icon_Refresh.png +0 -0
  27. package/src/assets/img/icon_Text1.png +0 -0
  28. package/src/assets/img/icon_Text2.png +0 -0
  29. package/src/assets/img/icon_Volume.png +0 -0
  30. package/src/assets/img/icon_Word.png +0 -0
  31. package/src/components/CloseLayer.tsx +25 -0
  32. package/src/components/ColorSelector.tsx +90 -0
  33. package/src/components/Controls.tsx +32 -0
  34. package/src/components/EditorCanvas.tsx +566 -0
  35. package/src/components/Loading.tsx +16 -0
  36. package/src/components/MaxTextLayer.tsx +27 -0
  37. package/src/components/SeekBar.tsx +126 -0
  38. package/src/components/TextMenu.tsx +332 -0
  39. package/src/components/VideoMenu.tsx +49 -0
  40. package/src/index.tsx +551 -0
  41. package/src/utils/HistoryClass.ts +131 -0
  42. package/src/utils/VmmlConverter.ts +339 -0
  43. package/src/utils/const.ts +10 -0
  44. package/src/utils/keyBoardUtils.ts +199 -0
  45. package/src/utils/usePeekControl.ts +242 -0
  46. package/tsconfig.json +5 -0
  47. package/tsup.config.ts +14 -0
@@ -0,0 +1,566 @@
1
+ import { fabric } from "fabric";
2
+ import { forwardRef, useEffect, useImperativeHandle, useMemo, useRef, useState } from "react";
3
+ import { getFrames, argbToRgba, urlToBlob, getFontSize } from "@versa_ai/vmml-utils"
4
+ import { v4 as uuidv4 } from "uuid";
5
+ import HistoryClass from "../utils/HistoryClass";
6
+ import VmmlConverter from "../utils/VmmlConverter";
7
+ import { usePeekControl } from "../utils/usePeekControl";
8
+ import { toSvg } from 'dom-to-image'
9
+
10
+ const EditorCanvas = forwardRef(
11
+ ({ previewState, showCanvas, canvasSize, enterPreview, intoTextEdit, frame, vmml, dragState, initFcObjs, onVideoChange }: any, ref: any) => {
12
+ const [fc, setFc] = useState<any>(null);
13
+ const [history, setHistory] = useState<any>(null);
14
+ const waitFcTasks = useRef<any>([]);
15
+ const vmmlConverterRef = useRef<any>(null);
16
+ const heightScaleRef = useRef<number>(1);
17
+ const widthScaleRef = useRef<number>(1);
18
+
19
+ const initCanvas = () => {
20
+ const canvas = new fabric.Canvas("canvas", {
21
+ width: canvasSize.width,
22
+ height: canvasSize.height,
23
+ selection: false, // 画布框选
24
+ });
25
+ heightScaleRef.current = vmml.template.dimension.height / canvasSize.height;
26
+ widthScaleRef.current = vmml.template.dimension.width / canvasSize.width;
27
+
28
+ if (initFcObjs.length) {
29
+ createFcObjs(canvas);
30
+ }
31
+ setFc(canvas);
32
+ initCanvasEvent(canvas);
33
+ usePeekControl(canvas);
34
+ };
35
+
36
+ const createFcObjs = (canvas: any) => {
37
+ const ns = Math.floor((frame / 30) * 1000000);
38
+ fabric.util.enlivenObjects(initFcObjs, (objects: any) => {
39
+ objects.forEach((item: any) => {
40
+ item.on('modified', () => {
41
+ const fObj = convertToJSON(item)
42
+ vmmlConverterRef.current.updateClip(fObj);
43
+ });
44
+ item.set("visible", ns >= item.clipData.inPoint);
45
+ canvas.add(item);
46
+ })
47
+ canvas.renderAll();
48
+ }, "")
49
+ }
50
+
51
+ // 检查已有元素的inpoint时间
52
+ const checkObjectInPoint = (f?: number) => {
53
+ if (fc) {
54
+ const ns = Math.floor(((f ?? frame) / 30) * 1000000);
55
+ const objects = fc.getObjects();
56
+ objects.forEach((item: any) => {
57
+ if (item?.clipData?.type === "文字") {
58
+ item.set("visible", ns >= item.clipData.inPoint && ns < item.clipData.inPoint + (item.clipData.duration || vmml.template.duration));
59
+ } else {
60
+ item.set("visible", ns >= item.clipData.inPoint && ns < item.clipData.inPoint + item.clipData?.fileUrl?.duration);
61
+ }
62
+ });
63
+ fc.discardActiveObject();
64
+ fc.renderAll();
65
+ }
66
+ };
67
+
68
+ const initCanvasEvent = (canvas: any) => {
69
+ canvas.on("mouse:down", (e: any) => {
70
+ const active = canvas.getActiveObject();
71
+ if (active) {
72
+ if (active.isSelected !== null && active.clipData.type === "文字") {
73
+ //文字内容双击
74
+ imitateDBclick(active, canvas);
75
+ }
76
+ } else {
77
+ enterPreview(canvas);
78
+ }
79
+ });
80
+ canvas.on('object:removed', (e: any) => {
81
+ if (e.target.clipData && !e.target.clipData.originClip) {
82
+ onVideoChange(e.target.clipData);
83
+ }
84
+ const { clipData } = e.target.toJSON(['clipData']);
85
+ vmmlConverterRef.current.deleteClip(clipData);
86
+ });
87
+
88
+ canvas.on('object:muteChange', (e: any) => {
89
+ const { clipData } = e.target.toJSON(['clipData'])
90
+ vmmlConverterRef.current.changeMute(clipData);
91
+ });
92
+
93
+ canvas.on('object:textEdit', (e: any) => {
94
+ const active = canvas.getActiveObject();
95
+ handleIntoTextMenu(active, canvas)
96
+ })
97
+ };
98
+ //模拟双击
99
+ const imitateDBclick = (active: any, canvas: any) => {
100
+ active.isSelected++;
101
+ //设定一个计时器,1秒内没操作重新计数
102
+ const timer = setTimeout(() => {
103
+ active.isSelected = 0;
104
+ }, 500);
105
+ if (active.isSelected === 2) {
106
+ active.isSelected = 0;
107
+ clearTimeout(timer);
108
+ //从画布直接跳到文字编辑
109
+ handleIntoTextMenu(active, canvas);
110
+ }
111
+ };
112
+ const handleIntoTextMenu = (active: any, canvas: any) => {
113
+ //从画布的Text实例里取到需要的属性,回传给文字编辑
114
+ const {
115
+ left,
116
+ top,
117
+ angle,
118
+ scaleX,
119
+ scaleY,
120
+ zoomX,
121
+ zoomY,
122
+ clipData: { textBasicInfo, textColor, text, bgColor, fontFamily, id, fontAssetUrl },
123
+ } = active;
124
+ const textInfo = {
125
+ id,
126
+ text,
127
+ textColor,
128
+ bgColor,
129
+ left,
130
+ top,
131
+ angle,
132
+ scaleX,
133
+ scaleY,
134
+ zoomX,
135
+ zoomY,
136
+ textBasicInfo,
137
+ fontFamily,
138
+ fontAssetUrl
139
+ // width,
140
+ // height
141
+ };
142
+ active.set('visible', false);
143
+ canvas.renderAll();
144
+ intoTextEdit(textInfo);
145
+ };
146
+
147
+ const convertToJSON = (data: any) => {
148
+ const centerPoint = data.getCenterPoint();
149
+ return { ...data.toJSON(["clipData", "type"]), centerPoint };
150
+ };
151
+
152
+ // 创建图片
153
+ const createImage = (file: any, emojiId: string) => {
154
+ const url = file.resourcesType === "image" ? file.visibleUrl : "";
155
+ return new Promise((resolve, reject) => {
156
+ try {
157
+ fabric.Image.fromURL(
158
+ url,
159
+ (img: any) => {
160
+ const scaleX = canvasSize.width / 2 / file.width;
161
+ img.set({
162
+ width: file.width,
163
+ height: file.height,
164
+ scaleX,
165
+ scaleY: scaleX,
166
+ left: canvasSize.width / 2,
167
+ top: canvasSize.height / 2,
168
+ originX: 'center',
169
+ originY: 'center',
170
+ clipData: {
171
+ id: uuidv4(),
172
+ isMute: file.hasAudio === "true" ? false : null,
173
+ inPoint: Math.floor((frame / 30) * 1000000),
174
+ inFrame: frame,
175
+ type: "表情包",
176
+ once: false,
177
+ emojiId,
178
+ fileUrl: file
179
+ },
180
+ });
181
+ img.on('modified', () => {
182
+ const fObj = convertToJSON(img);
183
+ fObj.src = "";
184
+ vmmlConverterRef.current.updateClip(fObj);
185
+ });
186
+ fc.add(img);
187
+ onVideoChange(img.clipData);
188
+ const fObj = convertToJSON(img);
189
+ resolve(fObj);
190
+ },
191
+ {
192
+ padding: 3
193
+ }
194
+ );
195
+ } catch (err) {
196
+ reject(err + "添加表情包失败");
197
+ }
198
+ });
199
+ };
200
+
201
+ //更新表情包图片
202
+ const updateImage = (id: string, base64: string, fc2: any) => {
203
+ const canvas = fc || fc2;
204
+ if (canvas) {
205
+ const imgs = canvas.getObjects().filter((item: any) => item?.clipData?.type === "表情包");
206
+ const target = imgs.find((item: any) => item.clipData.id === id);
207
+ if (target) {
208
+ target.setSrc(base64, (img: any) => {
209
+ img.setCoords();
210
+ canvas.renderAll();
211
+ });
212
+ }
213
+ } else {
214
+ waitFcTasks.current.push(updateImage.bind(this, id, base64));
215
+ }
216
+ };
217
+
218
+ // 创建可编辑的videoClip
219
+ const createImageFromClip = (clip: any, fc2: any) => {
220
+ const canvas = fc || fc2;
221
+ if (canvas && canvasSize.width) {
222
+ const url = /video/g.test(clip.videoClip.mimeType) ? "" : clip.videoClip.sourceUrl;
223
+ fabric.Image.fromURL(url, (img: any) => {
224
+ const { dimension, posParam } = clip.videoClip;
225
+ const scaleX = posParam.scaleX / heightScaleRef.current;
226
+ const scaleY = posParam.scaleY / widthScaleRef.current;
227
+ const left = canvasSize.width * posParam.centerX;
228
+ const top = canvasSize.height * posParam.centerY;
229
+ const inFrame = getFrames(clip.inPoint, 30);
230
+ const durationFrame = getFrames(clip.duration, 30);
231
+ img.set({
232
+ width: dimension.width,
233
+ height: dimension.height,
234
+ scaleX,
235
+ scaleY,
236
+ left,
237
+ top,
238
+ angle: posParam.rotationZ,
239
+ originX: 'center',
240
+ originY: 'center',
241
+ clipData: {
242
+ id: clip.id,
243
+ isMute: null,
244
+ inPoint: clip.inPoint,
245
+ inFrame,
246
+ type: "表情包",
247
+ once: false,
248
+ fileUrl: {
249
+ duration: clip.duration
250
+ },
251
+ originClip: clip
252
+ },
253
+ visible: frame >= inFrame && frame < inFrame + durationFrame
254
+ });
255
+ canvas.add(img);
256
+ img.on('modified', () => {
257
+ const fObj = convertToJSON(img);
258
+ fObj.src = "";
259
+ vmmlConverterRef.current.updateClip(fObj);
260
+ });
261
+ });
262
+ } else {
263
+ waitFcTasks.current.push(createImageFromClip.bind(this, clip));
264
+ }
265
+ }
266
+
267
+ const handleRedo = () => {
268
+ history.redo();
269
+ };
270
+ const handleUndo = () => {
271
+ history.undo();
272
+ };
273
+
274
+ // 创建可编辑的textclip
275
+ const createTextFromClip = async (clip: any, fc2: any) => {
276
+ const canvas = fc || fc2;
277
+ if (canvas) {
278
+ const { width, height } = vmml.template.dimension;
279
+ const fontSize = getFontSize(width, height);
280
+ const { textContent, backgroundColor, textColor, posParam, fontAssetUrl, alignType } = clip.textClip;
281
+ const scaleX = posParam.scaleX * fontSize / 22 / widthScaleRef.current;
282
+ const scaleY = posParam.scaleY * fontSize / 22 / heightScaleRef.current;
283
+ const left = canvasSize.width * posParam.centerX;
284
+ const top = canvasSize.height * posParam.centerY;
285
+ const bgColor = backgroundColor ? argbToRgba(backgroundColor) : 'transparent';
286
+ const isAiError = textContent === '请输入文案' && textColor === '#00000000';
287
+ const textFill = argbToRgba(isAiError ? '#ffffffff' : (textColor || '#ffffffff'));
288
+ const textBasicInfo = {
289
+ isBack: backgroundColor ? true : false,
290
+ colorValue: textFill,
291
+ colorName: 'custom',
292
+ textAlign: alignType === 1 ? 'center' : (alignType === 2 ? 'right' : 'left')
293
+ }
294
+ const textImgData = await createTextImg({ textContent, bgColor, textColor: textFill, fontAssetUrl, textBasicInfo });
295
+ const fontJSON = localStorage.getItem("VMML_PLAYER_FONTSMAP");
296
+ let fontMap: any = {};
297
+ try {
298
+ fontMap = fontJSON ? JSON.parse(fontJSON) : {};
299
+ } catch {
300
+ fontMap = {};
301
+ }
302
+ const fontFamily = fontMap[fontAssetUrl] || '';
303
+ fabric.Image.fromURL(textImgData.base64Image, (imgData: any) => {
304
+ imgData.set({
305
+ left,
306
+ top,
307
+ width: textImgData.width,
308
+ height: textImgData.height,
309
+ scaleX,
310
+ scaleY,
311
+ angle: posParam.rotationZ,
312
+ originX: 'center',
313
+ originY: 'center',
314
+ clipData: {
315
+ id: uuidv4(),
316
+ inPoint: clip.inPoint,
317
+ inFrame: getFrames(clip.inPoint, 30),
318
+ type: "文字",
319
+ textColor: textFill,
320
+ text: textContent,
321
+ bgColor,
322
+ originClip: clip,
323
+ fontAssetUrl,
324
+ fontFamily,
325
+ textBasicInfo,
326
+ isAiError,
327
+ duration: clip.duration
328
+ },
329
+ })
330
+ imgData.on("selected", (options: any) => {
331
+ options.target.isSelected = -1;
332
+ });
333
+ imgData.on("moving", (options: any) => {
334
+ options.transform.target.isSelected = 0;
335
+ });
336
+ imgData.on('modified', () => {
337
+ const fObj = convertToJSON(imgData);
338
+ if (fObj.clipData.isAiError) {
339
+ fObj.clipData.textColor = 'rgba(0, 0, 0, 0)';
340
+ }
341
+ vmmlConverterRef.current.updateClip(fObj);
342
+ });
343
+ canvas.add(imgData).renderAll();
344
+ })
345
+ } else {
346
+ waitFcTasks.current.push(createTextFromClip.bind(this, clip));
347
+ }
348
+ }
349
+
350
+ const embedFontInSVG = async (svgString: string, url: string) => {
351
+ if (url) {
352
+ const res = await urlToBlob({ url });
353
+ const fontFace = `
354
+ @font-face {
355
+ font-family: 'font-${url}';
356
+ src: url(${res});
357
+ }
358
+ `;
359
+ const styleElement = `<style type="text/css"><![CDATA[${fontFace}]]></style>`;
360
+ return svgString.replace('</svg>', `${styleElement}</svg>`);
361
+ }
362
+ return svgString
363
+ }
364
+
365
+ //文字转图片
366
+ const createTextImg = async ({ textContent, bgColor, textColor, fontAssetUrl = null, textBasicInfo }: any) => {
367
+ const container = document.createElement('div');
368
+ container.style.backgroundColor = bgColor
369
+ // container.style.width = `fit-content`
370
+ // container.style.height = `fit-content`
371
+ container.style.boxSizing = 'content-box'
372
+ container.style.display = 'inline-block'
373
+ container.style.textAlign = textBasicInfo.textAlign || 'left';
374
+ const lines = textContent.split('\n');
375
+ lines.forEach((line: string) => {
376
+ const p = document.createElement('p');
377
+ p.style.color = textColor;
378
+ p.style.fontSize = '22px'
379
+ p.style.lineHeight = '22px'
380
+ const font = fontAssetUrl ? `font-${fontAssetUrl}` : 'sansMedium';
381
+ p.style.fontFamily = font
382
+ p.style.whiteSpace = "nowrap"
383
+ // p.style.backgroundColor = bgColor;
384
+ p.style.padding = '0';
385
+ p.textContent = line || " "
386
+ container.appendChild(p);
387
+ })
388
+ container.style.padding = '6.5px 7px 6.5px 7px'
389
+ // container.style.padding = '7.5px 7px 5.8px 7px'
390
+ container.style.borderRadius = '5px'
391
+ document.body.appendChild(container)
392
+ const { width, height } = container?.getBoundingClientRect() as any;
393
+ const dataurl = await toSvg(container);
394
+ document.body.removeChild(container);
395
+ const base64Image = await embedFontInSVG(dataurl, fontAssetUrl);
396
+ return { base64Image, height, width };
397
+ }
398
+ const createText = async ({ textContent, bgColor, textColor, position, textBasicInfo, id }: any, fc2: any) => {
399
+ const canvas = fc || fc2;
400
+ const { left, top, angle, scaleX, scaleY, zoomX, zoomY } = position;
401
+ const textImgData = await createTextImg({ textContent, bgColor, textColor, textBasicInfo });
402
+ return new Promise((resolve, reject) => {
403
+ fabric.Image.fromURL(textImgData.base64Image, (imgData: any) => {
404
+ imgData.set({
405
+ left,
406
+ top,
407
+ angle,
408
+ width: textImgData.width,
409
+ height: textImgData.height,
410
+ scaleX,
411
+ scaleY,
412
+ clipData: {
413
+ id: uuidv4(),
414
+ inPoint: Math.floor((frame / 30) * 1000000),
415
+ inFrame: frame,
416
+ type: "文字",
417
+ textBasicInfo,
418
+ textColor: textColor,
419
+ text: textContent,
420
+ bgColor: bgColor
421
+ },
422
+ })
423
+ imgData.on("selected", (options: any) => {
424
+ options.target.isSelected = -1;
425
+ });
426
+ imgData.on("moving", (options: any) => {
427
+ options.transform.target.isSelected = 0;
428
+ });
429
+ imgData.on('modified', () => {
430
+ const fObj = convertToJSON(imgData)
431
+ vmmlConverterRef.current.updateClip(fObj);
432
+ });
433
+ canvas.centerObject(imgData);
434
+ canvas.add(imgData)
435
+ setTimeout(()=>{
436
+ canvas.renderAll();
437
+ })
438
+ onVideoChange(imgData.clipData);
439
+ vmmlConverterRef.current.addTextClip(convertToJSON(imgData));
440
+ resolve(true);
441
+ })
442
+ })
443
+ };
444
+ const updateText = async ({ id, textContent, bgColor, textColor, textBasicInfo, fontAssetUrl }: any) => {
445
+ const textImgData = await createTextImg({ textContent, bgColor, textColor, fontAssetUrl, textBasicInfo });
446
+ const target = fc.getObjects().find((item: any) => item.clipData.id === id);
447
+ target.setSrc(textImgData.base64Image, (img: any) => {
448
+ img.set({
449
+ visible: true,
450
+ clipData: {
451
+ ...img.clipData,
452
+ textBasicInfo,
453
+ textColor: textColor,
454
+ text: textContent,
455
+ bgColor: bgColor,
456
+ isAiError: false
457
+ }
458
+ })
459
+ img.setCoords();
460
+ fc.renderAll();
461
+ vmmlConverterRef.current.updateClip(convertToJSON(img));
462
+ });
463
+ }
464
+ const changeObjectVisible = (id: string, visible: boolean = true) => {
465
+ const target = fc.getObjects().find((item: any) => item.clipData.id === id);
466
+ target.set({ visible });
467
+ fc.renderAll();
468
+ }
469
+
470
+ const getfObjectNums = (type: string) => {
471
+ if (fc) {
472
+ const objects = fc.getObjects();
473
+ return objects.filter((item: any) => item.clipData.type === type && !item.clipData.originClip).length;
474
+ }
475
+ };
476
+
477
+ const getfcObject = () => {
478
+ if (fc) {
479
+ return fc.getObjects();
480
+ }
481
+ }
482
+
483
+ const styles: any = useMemo(() => {
484
+ return {
485
+ position: "absolute",
486
+ top: canvasSize.top,
487
+ display: showCanvas ? "block" : "none",
488
+ }
489
+ }, [showCanvas])
490
+
491
+ useEffect(() => {
492
+ if (!fc && canvasSize.width) {
493
+ initCanvas();
494
+ }
495
+ return () => {
496
+ if (fc) {
497
+ fc.dispose();
498
+ setFc(null);
499
+ }
500
+ };
501
+ }, [canvasSize.width]);
502
+
503
+ useEffect(() => {
504
+ if (!previewState && fc) {
505
+ fc.discardActiveObject();
506
+ checkObjectInPoint();
507
+ }
508
+ }, [previewState]);
509
+
510
+ useEffect(() => {
511
+ if (fc) {
512
+ if (dragState === 1) {
513
+ fc.discardActiveObject();
514
+ }
515
+ }
516
+ }, [fc, dragState])
517
+
518
+ useEffect(() => {
519
+ if (canvasSize.width && canvasSize.height && !vmmlConverterRef.current) {
520
+ vmmlConverterRef.current = new VmmlConverter({ vmml, canvasSize });
521
+ }
522
+ }, [canvasSize, vmml]);
523
+
524
+ useEffect(() => {
525
+ if (fc) {
526
+ const historyClass = new HistoryClass(fc);
527
+ setHistory(historyClass);
528
+ if (waitFcTasks.current.length) {
529
+ waitFcTasks.current.forEach((item: any) => item(fc));
530
+ waitFcTasks.current = [];
531
+ }
532
+ }
533
+ }, [fc]);
534
+
535
+ useImperativeHandle(ref, () => ({
536
+ createImage,
537
+ createText,
538
+ updateText,
539
+ handleRedo,
540
+ handleUndo,
541
+ getActions,
542
+ getfObjectNums,
543
+ updateImage,
544
+ getfcObject,
545
+ checkObjectInPoint,
546
+ createImageFromClip,
547
+ createTextFromClip,
548
+ changeObjectVisible
549
+ }));
550
+
551
+ const getActions = () => {
552
+ if (history) {
553
+ return history.getActionType();
554
+ }
555
+ return []
556
+ };
557
+
558
+ return (
559
+ <div style={styles} >
560
+ <canvas id="canvas" />
561
+ </div>
562
+ );
563
+ },
564
+ );
565
+
566
+ export default EditorCanvas;
@@ -0,0 +1,16 @@
1
+ import "../assets/css/loading.scss"
2
+
3
+ const Loaidng = ({ show }: any) => {
4
+
5
+ const styles = {
6
+ display: show ? "flex" : "none"
7
+ }
8
+
9
+ return (
10
+ <div style={styles} className="page-loader">
11
+ <div className="light"></div>
12
+ </div>
13
+ )
14
+ }
15
+
16
+ export default Loaidng
@@ -0,0 +1,27 @@
1
+ import { useEffect } from "react";
2
+ import "../assets/css/maxTextLayer.scss";
3
+ const MaxTextLayer = ({
4
+ show = false,
5
+ textLayerHide,
6
+ text,
7
+ }: { show: boolean; textLayerHide: () => void; text: string }) => {
8
+ useEffect(() => {
9
+ const timer = setTimeout(() => {
10
+ textLayerHide();
11
+ }, 2000);
12
+ return () => {
13
+ clearTimeout(timer);
14
+ };
15
+ });
16
+ return (
17
+ show && (
18
+ <>
19
+ <div className="container" onClick={textLayerHide}>
20
+ <span className="main_text">{text}</span>
21
+ </div>
22
+ </>
23
+ )
24
+ );
25
+ };
26
+
27
+ export default MaxTextLayer;