@design.estate/dees-wcctools 1.2.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/dist_bundle/bundle.js +1764 -218
  2. package/dist_bundle/bundle.js.map +4 -4
  3. package/dist_ts_demotools/demotools.d.ts +1 -1
  4. package/dist_ts_demotools/demotools.js +86 -38
  5. package/dist_ts_web/00_commitinfo_data.js +1 -1
  6. package/dist_ts_web/elements/wcc-dashboard.d.ts +11 -10
  7. package/dist_ts_web/elements/wcc-dashboard.js +370 -246
  8. package/dist_ts_web/elements/wcc-frame.d.ts +3 -3
  9. package/dist_ts_web/elements/wcc-frame.js +108 -57
  10. package/dist_ts_web/elements/wcc-properties.d.ts +14 -8
  11. package/dist_ts_web/elements/wcc-properties.js +442 -323
  12. package/dist_ts_web/elements/wcc-record-button.d.ts +12 -0
  13. package/dist_ts_web/elements/wcc-record-button.js +165 -0
  14. package/dist_ts_web/elements/wcc-recording-panel.d.ts +42 -0
  15. package/dist_ts_web/elements/wcc-recording-panel.js +1067 -0
  16. package/dist_ts_web/elements/wcc-sidebar.d.ts +7 -5
  17. package/dist_ts_web/elements/wcc-sidebar.js +250 -81
  18. package/dist_ts_web/elements/wcctools.helpers.d.ts +13 -0
  19. package/dist_ts_web/elements/wcctools.helpers.js +26 -1
  20. package/dist_ts_web/index.d.ts +3 -0
  21. package/dist_ts_web/index.js +5 -1
  22. package/dist_ts_web/services/ffmpeg.service.d.ts +42 -0
  23. package/dist_ts_web/services/ffmpeg.service.js +276 -0
  24. package/dist_ts_web/services/mp4.service.d.ts +32 -0
  25. package/dist_ts_web/services/mp4.service.js +139 -0
  26. package/dist_ts_web/services/recorder.service.d.ts +44 -0
  27. package/dist_ts_web/services/recorder.service.js +307 -0
  28. package/dist_watch/bundle.js +2126 -541
  29. package/dist_watch/bundle.js.map +4 -4
  30. package/package.json +8 -8
  31. package/readme.md +133 -141
  32. package/ts_web/00_commitinfo_data.ts +1 -1
  33. package/ts_web/elements/wcc-dashboard.ts +86 -26
  34. package/ts_web/elements/wcc-frame.ts +3 -3
  35. package/ts_web/elements/wcc-properties.ts +53 -9
  36. package/ts_web/elements/wcc-record-button.ts +108 -0
  37. package/ts_web/elements/wcc-recording-panel.ts +978 -0
  38. package/ts_web/elements/wcc-sidebar.ts +133 -22
  39. package/ts_web/elements/wcctools.helpers.ts +31 -0
  40. package/ts_web/index.ts +5 -0
  41. package/ts_web/readme.md +123 -0
  42. package/ts_web/services/recorder.service.ts +393 -0
@@ -0,0 +1,393 @@
1
+ /**
2
+ * RecorderService - Handles all MediaRecorder, audio monitoring, and video export logic
3
+ */
4
+
5
+ export interface IRecorderEvents {
6
+ onDurationUpdate?: (duration: number) => void;
7
+ onRecordingComplete?: (blob: Blob) => void;
8
+ onAudioLevelUpdate?: (level: number) => void;
9
+ onError?: (error: Error) => void;
10
+ onStreamEnded?: () => void;
11
+ }
12
+
13
+ export interface IRecordingOptions {
14
+ mode: 'viewport' | 'screen';
15
+ audioDeviceId?: string;
16
+ viewportElement?: HTMLElement;
17
+ }
18
+
19
+ export class RecorderService {
20
+ // Recording state
21
+ private mediaRecorder: MediaRecorder | null = null;
22
+ private recordedChunks: Blob[] = [];
23
+ private durationInterval: number | null = null;
24
+ private _duration: number = 0;
25
+ private _recordedBlob: Blob | null = null;
26
+ private _isRecording: boolean = false;
27
+
28
+ // Audio monitoring state
29
+ private audioContext: AudioContext | null = null;
30
+ private audioAnalyser: AnalyserNode | null = null;
31
+ private audioMonitoringInterval: number | null = null;
32
+ private monitoringStream: MediaStream | null = null;
33
+
34
+ // Current recording stream
35
+ private currentStream: MediaStream | null = null;
36
+
37
+ // Event callbacks
38
+ private events: IRecorderEvents = {};
39
+
40
+ constructor(events?: IRecorderEvents) {
41
+ if (events) {
42
+ this.events = events;
43
+ }
44
+ }
45
+
46
+ // Public getters
47
+ get isRecording(): boolean {
48
+ return this._isRecording;
49
+ }
50
+
51
+ get duration(): number {
52
+ return this._duration;
53
+ }
54
+
55
+ get recordedBlob(): Blob | null {
56
+ return this._recordedBlob;
57
+ }
58
+
59
+ // Update event callbacks
60
+ setEvents(events: IRecorderEvents): void {
61
+ this.events = { ...this.events, ...events };
62
+ }
63
+
64
+ // ==================== Microphone Management ====================
65
+
66
+ async loadMicrophones(requestPermission: boolean = false): Promise<MediaDeviceInfo[]> {
67
+ try {
68
+ if (requestPermission) {
69
+ // Request permission by getting a temporary stream
70
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
71
+ stream.getTracks().forEach(track => track.stop());
72
+ }
73
+
74
+ const devices = await navigator.mediaDevices.enumerateDevices();
75
+ return devices.filter(d => d.kind === 'audioinput');
76
+ } catch (error) {
77
+ console.error('Error loading microphones:', error);
78
+ return [];
79
+ }
80
+ }
81
+
82
+ async startAudioMonitoring(deviceId: string): Promise<void> {
83
+ this.stopAudioMonitoring();
84
+
85
+ if (!deviceId) return;
86
+
87
+ try {
88
+ const stream = await navigator.mediaDevices.getUserMedia({
89
+ audio: { deviceId: { exact: deviceId } }
90
+ });
91
+
92
+ this.monitoringStream = stream;
93
+ this.audioContext = new AudioContext();
94
+ const source = this.audioContext.createMediaStreamSource(stream);
95
+ this.audioAnalyser = this.audioContext.createAnalyser();
96
+ this.audioAnalyser.fftSize = 256;
97
+ source.connect(this.audioAnalyser);
98
+
99
+ const dataArray = new Uint8Array(this.audioAnalyser.frequencyBinCount);
100
+
101
+ this.audioMonitoringInterval = window.setInterval(() => {
102
+ if (this.audioAnalyser) {
103
+ this.audioAnalyser.getByteFrequencyData(dataArray);
104
+ const average = dataArray.reduce((a, b) => a + b) / dataArray.length;
105
+ const level = Math.min(100, (average / 128) * 100);
106
+ this.events.onAudioLevelUpdate?.(level);
107
+ }
108
+ }, 50);
109
+ } catch (error) {
110
+ console.error('Error starting audio monitoring:', error);
111
+ this.events.onAudioLevelUpdate?.(0);
112
+ }
113
+ }
114
+
115
+ stopAudioMonitoring(): void {
116
+ if (this.audioMonitoringInterval) {
117
+ clearInterval(this.audioMonitoringInterval);
118
+ this.audioMonitoringInterval = null;
119
+ }
120
+ if (this.audioContext) {
121
+ this.audioContext.close();
122
+ this.audioContext = null;
123
+ }
124
+ if (this.monitoringStream) {
125
+ this.monitoringStream.getTracks().forEach(track => track.stop());
126
+ this.monitoringStream = null;
127
+ }
128
+ this.audioAnalyser = null;
129
+ }
130
+
131
+ // ==================== Recording Control ====================
132
+
133
+ async startRecording(options: IRecordingOptions): Promise<void> {
134
+ try {
135
+ // Stop audio monitoring before recording
136
+ this.stopAudioMonitoring();
137
+
138
+ // Get video stream based on mode
139
+ const displayMediaOptions: DisplayMediaStreamOptions = {
140
+ video: {
141
+ displaySurface: options.mode === 'viewport' ? 'browser' : 'monitor'
142
+ } as MediaTrackConstraints,
143
+ audio: false
144
+ };
145
+
146
+ // Add preferCurrentTab hint for viewport mode
147
+ if (options.mode === 'viewport') {
148
+ (displayMediaOptions as any).preferCurrentTab = true;
149
+ }
150
+
151
+ const videoStream = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
152
+
153
+ // If viewport mode, try to crop to viewport element using Element Capture API
154
+ if (options.mode === 'viewport' && options.viewportElement) {
155
+ try {
156
+ if ('CropTarget' in window) {
157
+ const cropTarget = await (window as any).CropTarget.fromElement(options.viewportElement);
158
+ const [videoTrack] = videoStream.getVideoTracks();
159
+ await (videoTrack as any).cropTo(cropTarget);
160
+ }
161
+ } catch (e) {
162
+ console.warn('Element Capture not supported, recording full tab:', e);
163
+ }
164
+ }
165
+
166
+ // Combine video with audio if enabled
167
+ let combinedStream = videoStream;
168
+ if (options.audioDeviceId) {
169
+ try {
170
+ const audioStream = await navigator.mediaDevices.getUserMedia({
171
+ audio: { deviceId: { exact: options.audioDeviceId } }
172
+ });
173
+ combinedStream = new MediaStream([
174
+ ...videoStream.getVideoTracks(),
175
+ ...audioStream.getAudioTracks()
176
+ ]);
177
+ } catch (audioError) {
178
+ console.warn('Could not add audio:', audioError);
179
+ }
180
+ }
181
+
182
+ // Store stream for cleanup
183
+ this.currentStream = combinedStream;
184
+
185
+ // Create MediaRecorder
186
+ const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
187
+ ? 'video/webm;codecs=vp9'
188
+ : 'video/webm';
189
+
190
+ this.mediaRecorder = new MediaRecorder(combinedStream, { mimeType });
191
+ this.recordedChunks = [];
192
+
193
+ this.mediaRecorder.ondataavailable = (e) => {
194
+ if (e.data.size > 0) {
195
+ this.recordedChunks.push(e.data);
196
+ }
197
+ };
198
+
199
+ this.mediaRecorder.onstop = () => this.handleRecordingComplete();
200
+
201
+ // Handle stream ending (user clicks "Stop sharing")
202
+ videoStream.getVideoTracks()[0].onended = () => {
203
+ if (this._isRecording) {
204
+ this.stopRecording();
205
+ this.events.onStreamEnded?.();
206
+ }
207
+ };
208
+
209
+ this.mediaRecorder.start(1000); // Capture in 1-second chunks
210
+
211
+ // Start duration timer
212
+ this._duration = 0;
213
+ this.durationInterval = window.setInterval(() => {
214
+ this._duration++;
215
+ this.events.onDurationUpdate?.(this._duration);
216
+ }, 1000);
217
+
218
+ this._isRecording = true;
219
+ } catch (error) {
220
+ console.error('Error starting recording:', error);
221
+ this._isRecording = false;
222
+ this.events.onError?.(error as Error);
223
+ throw error;
224
+ }
225
+ }
226
+
227
+ stopRecording(): void {
228
+ if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
229
+ this.mediaRecorder.stop();
230
+ }
231
+
232
+ if (this.durationInterval) {
233
+ clearInterval(this.durationInterval);
234
+ this.durationInterval = null;
235
+ }
236
+ }
237
+
238
+ private async handleRecordingComplete(): Promise<void> {
239
+ // Create blob from recorded chunks
240
+ const blob = new Blob(this.recordedChunks, { type: 'video/webm' });
241
+
242
+ this._recordedBlob = blob;
243
+
244
+ // Stop all tracks
245
+ if (this.currentStream) {
246
+ this.currentStream.getTracks().forEach(track => track.stop());
247
+ this.currentStream = null;
248
+ }
249
+
250
+ this._isRecording = false;
251
+ this.events.onRecordingComplete?.(this._recordedBlob);
252
+ }
253
+
254
+ // ==================== Trim & Export ====================
255
+
256
+ async exportTrimmedVideo(
257
+ videoElement: HTMLVideoElement,
258
+ trimStart: number,
259
+ trimEnd: number
260
+ ): Promise<Blob> {
261
+ return new Promise((resolve, reject) => {
262
+ // Create a canvas for capturing frames
263
+ const canvas = document.createElement('canvas');
264
+ canvas.width = videoElement.videoWidth || 1280;
265
+ canvas.height = videoElement.videoHeight || 720;
266
+ const ctx = canvas.getContext('2d');
267
+
268
+ if (!ctx) {
269
+ reject(new Error('Could not get canvas context'));
270
+ return;
271
+ }
272
+
273
+ // Create canvas stream for video
274
+ const canvasStream = canvas.captureStream(30);
275
+
276
+ // Try to capture audio from video element
277
+ let combinedStream: MediaStream;
278
+
279
+ try {
280
+ // Create audio context to capture video's audio
281
+ const audioCtx = new AudioContext();
282
+ const source = audioCtx.createMediaElementSource(videoElement);
283
+ const destination = audioCtx.createMediaStreamDestination();
284
+ source.connect(destination);
285
+ source.connect(audioCtx.destination); // Also play through speakers
286
+
287
+ // Combine video (from canvas) and audio (from video element)
288
+ combinedStream = new MediaStream([
289
+ ...canvasStream.getVideoTracks(),
290
+ ...destination.stream.getAudioTracks()
291
+ ]);
292
+
293
+ // Store audioCtx for cleanup
294
+ const cleanup = () => {
295
+ audioCtx.close();
296
+ };
297
+
298
+ this.recordTrimmedStream(videoElement, canvas, ctx, combinedStream, trimStart, trimEnd, cleanup, resolve, reject);
299
+ } catch (audioError) {
300
+ console.warn('Could not capture audio, recording video only:', audioError);
301
+ combinedStream = canvasStream;
302
+ this.recordTrimmedStream(videoElement, canvas, ctx, combinedStream, trimStart, trimEnd, () => {}, resolve, reject);
303
+ }
304
+ });
305
+ }
306
+
307
+ private recordTrimmedStream(
308
+ video: HTMLVideoElement,
309
+ canvas: HTMLCanvasElement,
310
+ ctx: CanvasRenderingContext2D,
311
+ stream: MediaStream,
312
+ trimStart: number,
313
+ trimEnd: number,
314
+ cleanup: () => void,
315
+ resolve: (blob: Blob) => void,
316
+ reject: (error: Error) => void
317
+ ): void {
318
+ const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
319
+ ? 'video/webm;codecs=vp9'
320
+ : 'video/webm';
321
+
322
+ const recorder = new MediaRecorder(stream, { mimeType });
323
+ const chunks: Blob[] = [];
324
+
325
+ recorder.ondataavailable = (e) => {
326
+ if (e.data.size > 0) {
327
+ chunks.push(e.data);
328
+ }
329
+ };
330
+
331
+ recorder.onstop = () => {
332
+ cleanup();
333
+ resolve(new Blob(chunks, { type: 'video/webm' }));
334
+ };
335
+
336
+ recorder.onerror = (e) => {
337
+ cleanup();
338
+ reject(new Error('Recording error: ' + e));
339
+ };
340
+
341
+ // Seek to trim start
342
+ video.currentTime = trimStart;
343
+
344
+ video.onseeked = () => {
345
+ // Start recording
346
+ recorder.start(100);
347
+
348
+ // Start playing
349
+ video.play();
350
+
351
+ // Draw frames to canvas
352
+ const drawFrame = () => {
353
+ if (video.currentTime >= trimEnd || video.paused || video.ended) {
354
+ video.pause();
355
+ video.onseeked = null;
356
+
357
+ // Give a small delay before stopping to ensure last frame is captured
358
+ setTimeout(() => {
359
+ if (recorder.state === 'recording') {
360
+ recorder.stop();
361
+ }
362
+ }, 100);
363
+ return;
364
+ }
365
+
366
+ ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
367
+ requestAnimationFrame(drawFrame);
368
+ };
369
+
370
+ drawFrame();
371
+ };
372
+ }
373
+
374
+ // ==================== Cleanup ====================
375
+
376
+ reset(): void {
377
+ this._recordedBlob = null;
378
+ this.recordedChunks = [];
379
+ this._duration = 0;
380
+ this._isRecording = false;
381
+ }
382
+
383
+ dispose(): void {
384
+ this.stopRecording();
385
+ this.stopAudioMonitoring();
386
+ this.reset();
387
+
388
+ if (this.currentStream) {
389
+ this.currentStream.getTracks().forEach(track => track.stop());
390
+ this.currentStream = null;
391
+ }
392
+ }
393
+ }