@chat21/chat21-ionic 3.4.10 → 3.4.12-rc.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
1
  # chat21-ionic ver 3.0
2
2
 
3
+ ### 3.4.12-rc.2
4
+ - added: chat-audio component to manage audio file sent and received
5
+
6
+ ### 3.4.11 in PROD
7
+
3
8
  ### 3.4.10 in PROD
4
9
 
5
10
  ### 3.4.10-rc.1
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@chat21/chat21-ionic",
3
3
  "author": "Tiledesk SRL",
4
- "version": "3.4.10",
4
+ "version": "3.4.12-rc.1",
5
5
  "license": "MIT License",
6
6
  "homepage": "https://tiledesk.com/",
7
7
  "repository": {
@@ -173,6 +173,7 @@
173
173
  [fontColor]="stylesMap?.get('bubbleReceivedTextColor')"
174
174
  [fontSize]="stylesMap?.get('fontSize')"
175
175
  [fontFamily]="stylesMap?.get('fontFamily')"
176
+ [stylesMap]="stylesMap"
176
177
  (onBeforeMessageRender)="onBeforeMessageRenderFN($event)"
177
178
  (onAfterMessageRender)="onAfterMessageRenderFN($event)"
178
179
  (onElementRendered)="onElementRenderedFN($event)"
@@ -35,6 +35,11 @@
35
35
  }
36
36
  }
37
37
 
38
+ :host .base_receive .msg_receive ::ng-deep > div > div > chat-audio {
39
+ --textColor: var(--col-msg-received) !important;
40
+ --backgroundColor: var(--bck-msg-received) !important;
41
+ }
42
+
38
43
 
39
44
  //------- MANAGE CHAT-OPTIONS component :: INIT ------- //
40
45
  :host .base_sent .msg_sent.no-background ::ng-deep > div,
@@ -1,21 +1,37 @@
1
1
 
2
2
 
3
- <div id="audio_container" #audio_container>
3
+ <div class="audio-container">
4
4
 
5
- <audio aria-label="traccia audio" #audio_msg controls controlsList="nodownload" id="audio_msg" (pause)="pauseAudioMsg($event)" (play)="playAudioMsg($event)" (timeupdate)="updateTimeAudioMsg($event)">
5
+ <!-- <audio aria-label="traccia audio" #audio_msg controls controlsList="nodownload" id="audio_msg" (pause)="pauseAudioMsg($event)" (play)="playAudioMsg($event)" (timeupdate)="updateTimeAudioMsg($event)">
6
6
  <source [src]="metadata?.src" [type]="metadata?.type">
7
- <!-- {{metadata?.src}} -->
8
- <!-- controlsList="nodownload" -->
9
- </audio>
10
-
11
- <!-- <button id="play-icon" (click)="onPlayPause('play')" *ngIf="status === 'play'">
12
- <svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 0 24 24" width="24px" fill="#000000"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M10 8.64L15.27 12 10 15.36V8.64M8 5v14l11-7L8 5z"/></svg>
13
- </button>
14
- <button id="pause-icon" (click)="onPlayPause('pause')" *ngIf="status === 'pause'">
15
- <svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 0 24 24" width="24px" fill="#000000"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M6 19h4V5H6v14zm8-14v14h4V5h-4z"/></svg>
16
- </button>
17
- <div id="duration" #duration>0:00</div>
18
- <input type="range" id="seek-slider" max="100" value="0"> -->
7
+ <!- - {{metadata?.src}} -->
8
+ <!-- controlsList="nodownload" - ->
9
+ </audio> -->
10
+
11
+ <div class="audio-track">
12
+ <button *ngIf="!isPlaying" class="play-pause" (click)="playPauseAudio()">
13
+ <svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px">
14
+ <path d="M320-200v-560l440 280-440 280Z"/>
15
+ </svg>
16
+ <!-- <i class="material-icons">play_arrow</i> -->
17
+ </button>
18
+ <button *ngIf="isPlaying" class="play-pause" (click)="playPauseAudio()">
19
+ <svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" >
20
+ <path d="M560-200v-560h160v560H560Zm-320 0v-560h160v560H240Z"/>
21
+ </svg>
22
+ <!-- <i class="material-icons">pause</i> -->
23
+ </button>
24
+ <div class="duration" [style.color]="color" [style.font-size]="fontSize" >
25
+ <span *ngIf="!isPlaying">{{ audioDuration ? formatTime(audioDuration) : '00:00' }}</span>
26
+ <span *ngIf="isPlaying">{{ formatTime(currentTime) }}</span>
27
+ </div>
28
+
29
+ </div>
30
+
31
+ <div class="audio-player-custom">
32
+ <audio #audioElement [src]="audioUrl"></audio>
33
+ <canvas #canvasElement class="waveformCanvas"></canvas>
34
+ </div>
35
+
19
36
  </div>
20
- <div id="example"></div>
21
37
 
@@ -1,3 +1,116 @@
1
+ :host {
2
+ --backgroundColor: #{var(--bck-msg-sent)};
3
+ --textColor: #{var(--col-msg-sent)};
4
+ --hoverBackgroundColor: var(--textColor);
5
+ --hoverTextColor:var(--backgroundColor);
6
+ }
7
+
8
+
9
+ .audio-container{
10
+ position: relative;
11
+ display: inline-flex;
12
+ width: 100%;
13
+ padding: 0px 12px;
14
+ margin: 6px 0px;
15
+ }
16
+
17
+ audio {
18
+ width: 272px;
19
+ height: 30px;
20
+ margin: 0;
21
+ padding: 10px;
22
+ }
23
+
24
+ .audio-recorder {
25
+ text-align: center;
26
+ margin: 0px;
27
+ display: inline-flex;
28
+ align-items: center;
29
+ justify-content: center;
30
+ height: 100%;
31
+ width: 100%;
32
+ float: left;
33
+ }
34
+
35
+ button {
36
+ margin: 0px;
37
+ padding: 0px;
38
+ font-size: 16px;
39
+ border: none;
40
+ background-color: transparent;
41
+ color: var(--textColor);
42
+ fill: var(--textColor);
43
+ }
44
+
45
+ .waveformCanvas {
46
+ width: 100%;
47
+ height: 28px;
48
+ z-index: 1;
49
+ padding: 0px;
50
+ margin: 0%;
51
+ }
52
+
53
+ .audio-track {
54
+ // width: 247px;//272px;
55
+ // height: 30px;
56
+ position: relative;
57
+ display: flex;
58
+ align-items: center;
59
+ // margin: 0 13px;
60
+ margin: 0px;
61
+ .play-pause {
62
+ font-size: 20px;
63
+ width: 30px;
64
+ background-color: transparent;
65
+ border-radius: 50%;
66
+ height: 30px;
67
+ margin: 0px;
68
+ transition: background-color 0.5s ease;
69
+
70
+ }
71
+ .play-pause:hover {
72
+ // background-color: #ddd;
73
+ // background-color: rgb(82, 160, 252);
74
+ background-color: var(--hoverBackgroundColor);
75
+ color: var(--hoverTextColor);
76
+ svg{
77
+ //fill:#fff;
78
+ fill: var(--hoverTextColor)
79
+ }
80
+ }
81
+ .duration {
82
+ padding: 0 3px;
83
+ }
84
+ }
85
+
86
+ .audio-player-custom {
87
+ // width: 200px;
88
+ // height: 32px;
89
+ // margin-left: 75px;
90
+ // position: absolute;
91
+ // overflow: hidden;
92
+ // z-index: 1;
93
+ // display: flex;
94
+ // align-items: center;
95
+
96
+ // width: calc(100% - 75px);
97
+ // height: 32px;
98
+ // margin-left: 65px;
99
+ // position: absolute;
100
+ min-width: 120px;
101
+ overflow: hidden;
102
+ z-index: 1;
103
+ display: flex;
104
+ align-items: center;
105
+ width: 100%;
106
+ }
107
+
108
+
109
+
110
+
111
+
112
+
113
+
1
114
  #audio_container{
2
115
  display: flex;
3
116
  }
@@ -1,7 +1,7 @@
1
1
  import { Component, ElementRef, EventEmitter, Input, OnInit, Output, ViewChild } from '@angular/core';
2
- // import * as WaveSurfer from 'wavesurfer.js';
2
+ import { DomSanitizer, SafeUrl } from '@angular/platform-browser';
3
+ import { convertColorToRGBA } from 'src/chat21-core/utils/utils';
3
4
 
4
- // declare var WaveSurfer
5
5
  @Component({
6
6
  selector: 'chat-audio',
7
7
  templateUrl: './audio.component.html',
@@ -9,158 +9,153 @@ import { Component, ElementRef, EventEmitter, Input, OnInit, Output, ViewChild }
9
9
  })
10
10
  export class AudioComponent implements OnInit {
11
11
 
12
- @Input() metadata: any;
12
+ @ViewChild('audioElement', { static: true }) audioElement!: ElementRef<HTMLAudioElement>;
13
+ @ViewChild('canvasElement', { static: true }) waveformCanvas!: ElementRef<HTMLCanvasElement>;
14
+
15
+ @Input() metadata: any | null = null;
16
+ @Input() audioBlob: Blob | null = null;
17
+ @Input() color: string;
18
+ @Input() fontSize: string;
19
+ @Input() stylesMap: Map<string, string>;
13
20
  @Output() onElementRendered = new EventEmitter<{element: string, status: boolean}>();
14
21
 
15
- uidAudioPlayng: string = ''
16
- divPlay: HTMLAudioElement
17
- playState: HTMLElement
18
- status: 'play' | 'pause' = 'play'
22
+ audioUrl: SafeUrl | null = null;
23
+ rawAudioUrl: string | null = null;
24
+ audioContext!: AudioContext;
25
+ audioBuffer!: AudioBuffer;
26
+ audioDuration: number | null = null;
27
+ currentTime: number = 0;
28
+ isPlaying: boolean = false;
19
29
 
20
- // wavesurfer: any;
21
- constructor(private elementRef: ElementRef) { }
30
+ constructor(
31
+ private sanitizer: DomSanitizer,
32
+ private elementRef: ElementRef
33
+ ) {}
22
34
 
23
35
  ngOnInit() {
24
- // console.log('metadataaaaaa', this.metadata)
25
- // this.divPlay = this.elementRef.nativeElement.querySelector('#audio_container').querySelector('#audio_msg')
26
- // this.playState= this.elementRef.nativeElement.querySelector('#audio_container').querySelector('#duration')
27
- this.loadLib()
28
36
  }
29
37
 
30
- // ionViewWillEnter(){
31
- // this.loadLib()
32
- // }
33
-
34
- private async loadLib(){
35
- // const SiriWave = await import("siriwave");
36
- // console.log('elementttt', document.getElementById("example"), this.elementRef.nativeElement.querySelector("#example"))
37
- // var instance = new SiriWave({
38
- // // container: this.elementRef.nativeElement.querySelector("#example"),
39
- // container: document.getElementById("example"),
40
- // width: 300,
41
- // height: 120,
42
- // });
43
- // instance.start();
44
- // this.wavesurfer = WaveSurfer.create({
45
- // container: "#" + 'example',
46
- // waveColor: "#e1f5fe",
47
- // progressColor: "#03a9f4",
48
- // cursorColor: "rgb(255 255 255 / 50%)",
49
- // barWidth: 4,
50
- // barHeight: 1,
51
- // barRadius: 2,
52
- // cursorWidth: 1,
53
- // // height: 150,
54
- // fillParent: true,
55
- // barGap: 0,
56
- // // backend: 'MediaElement',
57
- // // mediaType:'audio',
58
- // normalize: true,
59
- // // url: this.metadata.url
60
- // })
61
-
62
- // this.wavesurfer.load('https://eu.rtmv3.tiledesk.com/api/files/download?path=uploads/public/files/2f715ae1-6dc6-4cbf-a94c-42be26f1a723/media-2b92sy.ogg');
63
-
64
- // this.wavesurfer.on('ready', function () {
65
- // console.log('readyyyyy')
66
- // // wavesurfer.play();
67
- // });
68
-
38
+ ngAfterViewInit() {
39
+ if (this.audioBlob) {
40
+ this.rawAudioUrl = URL.createObjectURL(this.audioBlob);
41
+ this.audioUrl = this.sanitizer.bypassSecurityTrustUrl(this.rawAudioUrl);
42
+ this.setupAudioContext();
43
+ } else {
44
+ this.rawAudioUrl = this.metadata.src;
45
+ this.audioUrl = this.sanitizer.bypassSecurityTrustUrl(this.rawAudioUrl);
46
+ this.setupAudioContext();
47
+ }
69
48
  }
70
49
 
71
- onPlayPause(status: string){
72
- // const divPlay = (<HTMLAudioElement>document.getElementById('audio_msg'));
73
- if(status === 'play') {
74
- this.divPlay.play();
75
- this.status = 'pause'
76
- } else {
77
- this.divPlay.pause();
78
- this.status = 'play'
50
+ async setupAudioContext() {
51
+ this.audioContext = new AudioContext();
52
+ if (this.rawAudioUrl) {
53
+ const response = await fetch(this.rawAudioUrl);
54
+ const audioData = await response.arrayBuffer();
55
+ this.audioBuffer = await this.audioContext.decodeAudioData(audioData);
56
+ this.getAudioDuration();
57
+ this.drawWaveform(this.audioBuffer);
79
58
  }
80
59
  }
81
- pauseAudioMsg(e) {
82
- try {
83
- // stop all audio
84
- if (this.uidAudioPlayng) {
85
- const divPlay = (<HTMLAudioElement>document.getElementById(this.uidAudioPlayng));
86
- divPlay.pause();
87
- // console.log('> pausa: ', divPlay);
60
+
61
+ drawWaveform(audioBuffer: AudioBuffer) {
62
+ const canvas = this.waveformCanvas.nativeElement;
63
+ const canvasCtx = canvas.getContext('2d');
64
+ if (!canvasCtx) return;
65
+ const width = canvas.width;
66
+ const height = canvas.height;
67
+ const rawData = audioBuffer.getChannelData(0);
68
+
69
+ const samples = 40;
70
+ const blockSize = Math.floor(rawData.length / samples);
71
+ const waveform = new Float32Array(samples);
72
+
73
+ for (let i = 0; i < samples; i++) {
74
+ let sum = 0;
75
+ for (let j = 0; j < blockSize; j++) {
76
+ sum += Math.abs(rawData[i * blockSize + j]);
88
77
  }
89
- } catch (error) {
90
- console.log('> Error is: ', error);
78
+ waveform[i] = sum / blockSize;
91
79
  }
92
80
 
93
- try {
94
- // console.log(e.target.id);
95
- if (this.uidAudioPlayng) {
96
- this.uidAudioPlayng = '';
81
+ canvasCtx.clearRect(0, 0, width, height);
82
+ const padding = 2;
83
+ const barWidth = (width / samples) - padding * 2;
84
+ const audio = this.audioElement.nativeElement;
85
+ const playedPercent = audio.currentTime / this.audioDuration;
86
+ // console.log('playedPercent: ', audio.currentTime, this.audioDuration);
87
+
88
+ for (let i = 0; i < samples; i++) {
89
+ var barHeight = waveform[i] * height * 4;
90
+ if (barHeight < 4) barHeight = 4;
91
+ const x = i * (barWidth + padding * 2) + padding;
92
+
93
+ if (i / samples < playedPercent) {
94
+ canvasCtx.fillStyle = this.color;
95
+ } else {
96
+ canvasCtx.fillStyle = convertColorToRGBA(this.color, 50);
97
97
  }
98
- } catch (error) {
99
- console.log('> Error is: ', error);
98
+ canvasCtx.fillRect(x, height / 2 - barHeight, barWidth, barHeight);
99
+ canvasCtx.fillRect(x, height / 2, barWidth, barHeight);
100
100
  }
101
101
  }
102
102
 
103
- playAudioMsg(e) {
104
- // stop all audio
105
- if (this.uidAudioPlayng) {
106
- const divPlay = (<HTMLAudioElement>document.getElementById(this.uidAudioPlayng));
107
- divPlay.pause();
108
- // console.log('> pausa: ', divPlay);
109
- }
110
- try {
111
- // console.log(e.target.id);
112
- // set uid audio playng
113
- this.uidAudioPlayng = e.target.id;
114
- } catch (error) {
115
- console.log('> Error is: ', error);
103
+ playPauseAudio() {
104
+ const audio = this.audioElement.nativeElement;
105
+ if (audio.paused) {
106
+ this.isPlaying = true;
107
+ this.updateWaveform();
108
+ audio.play();
109
+ this.audioContext.resume();
110
+ } else {
111
+ audio.pause();
112
+ this.isPlaying = false;
116
113
  }
114
+ audio.ontimeupdate = () => {
115
+ this.currentTime = audio.currentTime;
116
+ this.updateWaveform();
117
+ };
118
+ audio.onended = () => {
119
+ this.isPlaying = false;
120
+ };
117
121
  }
118
122
 
119
- updateTimeAudioMsg(ev){
120
- var currTime = Math.floor(ev.target.currentTime);
121
- var duration = Math.floor(ev.target.duration);
122
123
 
123
- let minutes = 0;
124
- if(currTime > 60){
125
- minutes = Math.floor(currTime / 60);
124
+ updateWaveform() {
125
+ this.drawWaveform(this.audioBuffer);
126
+ if (this.isPlaying) {
127
+ requestAnimationFrame(() => this.updateWaveform());
126
128
  }
127
- const seconds = currTime - minutes * 60
128
- // console.log('timeeee', minutes + ':' + seconds )
129
- // this.playState.innerHTML = minutes + ':' + seconds
130
129
  }
131
130
 
131
+ formatTime(seconds: number): string {
132
+ const minutes = Math.floor(seconds / 60);
133
+ const sec = Math.floor(seconds % 60);
134
+ return `${minutes}:${sec < 10 ? '0' + sec : sec}`;
135
+ }
132
136
 
133
-
134
- /**
135
- *
136
- * @param uid
137
- */
138
- playPausaAudioMsg(uid: string) {
139
- // console.log('playPausaAudioMsg: ', uid);
140
- const that = this;
141
- try {
142
- const divPause = (<HTMLAudioElement>document.getElementById(that.uidAudioPlayng));
143
- const divPlay = (<HTMLAudioElement>document.getElementById(uid));
144
- if (divPause) {
145
- divPause.pause();
146
- }
147
-
148
- if (that.uidAudioPlayng === uid) {
149
- that.uidAudioPlayng = '';
137
+ getAudioDuration() {
138
+ const audio = new Audio();
139
+ audio.src = this.rawAudioUrl!;
140
+ audio.addEventListener('loadedmetadata', () => {
141
+ if (audio.duration === Infinity) {
142
+ audio.currentTime = Number.MAX_SAFE_INTEGER;
143
+ audio.ontimeupdate = () => {
144
+ audio.ontimeupdate = null;
145
+ audio.currentTime = 0;
146
+ this.audioDuration = audio.duration;
147
+ };
150
148
  } else {
151
- if (divPlay) {
152
- setTimeout(function() {
153
- // if (that.g.autoplay_activated) {
154
- // divPlay.play();
155
- // }
156
- this.uidAudioPlayng = uid;
157
- }, 300);
158
- }
149
+ this.audioDuration = audio.duration;
159
150
  }
151
+ });
152
+ }
160
153
 
161
- } catch (error) {
162
- console.log('> Error is: ', error);
163
- }
154
+ extractFirstColor(gradient: string): string | null {
155
+ const colorRegex = /rgba?\((\d+,\s*\d+,\s*\d+(,\s*\d+(\.\d+)?)?)\)/;
156
+ const match = gradient.match(colorRegex);
157
+ return match ? match[0] : null;
164
158
  }
165
159
 
160
+
166
161
  }
@@ -49,9 +49,11 @@
49
49
  (onElementRendered)="onElementRenderedFN($event)">
50
50
  </chat-frame>
51
51
 
52
- <chat-audio *ngIf="isAudio(message)"
53
- [metadata]="message.metadata"
54
- (onElementRendered)="onElementRenderedFN($event)">
52
+ <chat-audio *ngIf="isAudio(message)"
53
+ [metadata]="message.metadata"
54
+ [color]="fontColor"
55
+ [fontSize]="fontSize"
56
+ [stylesMap]="stylesMap">
55
57
  </chat-audio>
56
58
 
57
59
  <!-- <chat-frame *ngIf="message.metadata && message.metadata.type && message.metadata.type.includes('video')"
@@ -24,6 +24,7 @@ export class BubbleMessageComponent implements OnInit, OnChanges {
24
24
  @Input() fontColor: string;
25
25
  @Input() fontSize: string;
26
26
  @Input() fontFamily: string;
27
+ @Input() stylesMap: Map<string, string>;
27
28
  @Input() supportMode: boolean;
28
29
  @Output() onBeforeMessageRender = new EventEmitter();
29
30
  @Output() onAfterMessageRender = new EventEmitter();