@prabhjeet.me/wakeywakey 1.0.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,11 @@
1
1
  import * as i0 from '@angular/core';
2
- import { InjectionToken, inject, Injectable, PLATFORM_ID, HostListener, ViewChild, Component, EventEmitter, Output, provideAppInitializer } from '@angular/core';
3
- import { Subject, withLatestFrom, concatMap, filter, map, distinctUntilChanged, switchMap, EMPTY, tap, ignoreElements, timer, merge, take, throttleTime, share, delay, takeUntil, scan } from 'rxjs';
2
+ import { Injectable, InjectionToken, inject, PLATFORM_ID, NgZone, HostListener, ViewChild, Component, EventEmitter, Output, provideAppInitializer } from '@angular/core';
3
+ import { Subject, filter, withLatestFrom, concatMap, map, tap, distinctUntilChanged, switchMap, EMPTY, ignoreElements, timer, merge, take, throttleTime, share, takeUntil, scan } from 'rxjs';
4
4
  import { SubSink } from 'subsink';
5
- import * as THREE from 'three';
6
5
  import { Tensor, env, InferenceSession } from 'onnxruntime-web';
7
6
  import { loadRnnoise, RnnoiseWorkletNode } from '@sapphi-red/web-noise-suppressor';
8
7
  import { isPlatformBrowser, isPlatformServer } from '@angular/common';
8
+ import * as THREE from 'three';
9
9
 
10
10
  /**
11
11
  * Audio sample rate
@@ -143,6 +143,25 @@ class AudioUtil {
143
143
  }
144
144
  }
145
145
 
146
+ class OrbComponentService {
147
+ constructor() {
148
+ this.state = new Subject();
149
+ }
150
+ /**
151
+ * Set state of orb
152
+ */
153
+ setState(state) {
154
+ this.state.next(state);
155
+ }
156
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: OrbComponentService, deps: [], target: i0.ɵɵFactoryTarget.Injectable }); }
157
+ static { this.ɵprov = i0.ɵɵngDeclareInjectable({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: OrbComponentService }); }
158
+ }
159
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: OrbComponentService, decorators: [{
160
+ type: Injectable
161
+ }] });
162
+
163
+ const DEFAULT_THROTTLE_TIME = 1000;
164
+
146
165
  /**
147
166
  * Wakey wakey config token
148
167
  */
@@ -174,7 +193,7 @@ class ConfigService {
174
193
  * Throttle time
175
194
  */
176
195
  get throttleTime() {
177
- return this._config.throttleTime;
196
+ return this._config.throttleTime ?? DEFAULT_THROTTLE_TIME;
178
197
  }
179
198
  /**
180
199
  * Mode
@@ -186,7 +205,13 @@ class ConfigService {
186
205
  * Base path of assets
187
206
  */
188
207
  get basePath() {
189
- return this._config.basePath || '/wakeywakey';
208
+ return this._config.basePath ?? '/wakeywakey';
209
+ }
210
+ /**
211
+ * Hotkey
212
+ */
213
+ get hotkey() {
214
+ return this._config.hotkey ?? 'Space';
190
215
  }
191
216
  static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: ConfigService, deps: [], target: i0.ɵɵFactoryTarget.Injectable }); }
192
217
  static { this.ɵprov = i0.ɵɵngDeclareInjectable({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: ConfigService }); }
@@ -400,6 +425,7 @@ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImpo
400
425
  }] });
401
426
 
402
427
  const DEFAULT_SILENCE_DURATION = 1000;
428
+ const DEFAULT_SPEECH_THRESHOLD_TIME = 300;
403
429
 
404
430
  class MicrophoneService {
405
431
  constructor() {
@@ -416,8 +442,10 @@ class MicrophoneService {
416
442
  * List of available microphones
417
443
  */
418
444
  this._microphones = [];
419
- // Init mic
420
- this._init();
445
+ /**
446
+ * Is muted
447
+ */
448
+ this._isMuted = false;
421
449
  }
422
450
  /**
423
451
  * List of available microphones
@@ -431,39 +459,81 @@ class MicrophoneService {
431
459
  get data() {
432
460
  return this._data;
433
461
  }
462
+ /**
463
+ * Audio context
464
+ */
465
+ get audioContext() {
466
+ return this._audioContext;
467
+ }
468
+ /**
469
+ * Analyzer node
470
+ */
471
+ get analyzer() {
472
+ return this._analyser;
473
+ }
474
+ /**
475
+ * Media steam source node
476
+ */
477
+ get sourceNode() {
478
+ return this._source;
479
+ }
480
+ /**
481
+ * Muted state
482
+ */
483
+ get isMuted() {
484
+ return this._isMuted;
485
+ }
486
+ /**
487
+ * Set gain
488
+ */
489
+ set gain(value) {
490
+ if (this._gainNode)
491
+ this._gainNode.gain.value = value;
492
+ }
493
+ /**
494
+ * Set muted state
495
+ */
496
+ set isMuted(set) {
497
+ this._isMuted = set;
498
+ }
434
499
  /**
435
500
  * Set input source
436
501
  */
437
502
  set source(deviceId) {
438
- this._init(deviceId);
503
+ this.init(deviceId);
439
504
  }
440
505
  ngOnDestroy() {
441
506
  // close audio context
442
507
  this._audioContext?.close();
508
+ this._source?.disconnect();
509
+ this._analyser?.disconnect();
510
+ this._stream?.getTracks().forEach((track) => {
511
+ track.stop();
512
+ });
443
513
  }
444
514
  /**
445
515
  * Initialize
446
516
  *
447
517
  * @param deviceId Input device id (from microphone list)
448
518
  */
449
- async _init(deviceId) {
519
+ async init(deviceId = 'default') {
450
520
  try {
451
521
  // cleanup
452
522
  this.ngOnDestroy();
453
523
  // request permission
454
524
  this._stream = await navigator.mediaDevices.getUserMedia({
455
- audio: !deviceId
456
- ? {
457
- noiseSuppression: false,
458
- echoCancellation: false,
459
- }
460
- : { deviceId: { exact: deviceId } },
525
+ audio: {
526
+ deviceId: { exact: deviceId },
527
+ noiseSuppression: this._config.audio.noiseSuppression?.nativeNoiseSuppression || false,
528
+ echoCancellation: this._config.audio.noiseSuppression?.nativeEchoCancellation || false,
529
+ autoGainControl: this._config.audio.noiseSuppression?.autoGainControl || false,
530
+ },
461
531
  });
462
532
  this._event.log.next(`${MicrophoneService.name}: Microphone permission granted (deviceid: '${deviceId ?? 'default'}')!`);
463
533
  // save list of microphones
464
534
  this._microphones = await this._microphoneList();
465
535
  // monitor audio
466
- this._monitor();
536
+ await this._monitor();
467
537
  return true;
468
538
  }
469
539
  catch (error) {
@@ -501,6 +571,8 @@ class MicrophoneService {
501
571
  async _workletNode() {
502
572
  // Create audio context
503
573
  this._audioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
574
+ this._analyser = this._audioContext.createAnalyser();
575
+ this._analyser.fftSize = 256;
504
576
  if (this._config.audio.noiseSuppression) {
505
577
  await this._audioContext.audioWorklet.addModule(this._config.audio.noiseSuppression.worklet ??
506
578
  `${this._config.basePath}/worklets/workletProcessor.js`);
@@ -511,7 +583,7 @@ class MicrophoneService {
511
583
  await this._audioContext.audioWorklet.addModule(workletURL);
512
584
  URL.revokeObjectURL(workletURL);
513
585
  // Create Nodes
514
- const source = this._audioContext.createMediaStreamSource(this._stream);
586
+ this._source = this._audioContext.createMediaStreamSource(this._stream);
515
587
  // Gain Node
516
588
  const gainNode = this._audioContext.createGain();
517
589
  gainNode.gain.value = this._config.audio.gain;
@@ -528,12 +600,16 @@ class MicrophoneService {
528
600
  wasmBinary: rnnoiseWasmBinary,
529
601
  maxChannels: 1, // Standard for mono microphone input
530
602
  });
531
- source.connect(rnnoiseNode);
603
+ this._source.connect(rnnoiseNode);
532
604
  rnnoiseNode.connect(gainNode);
533
605
  }
534
606
  else {
535
- source.connect(gainNode);
607
+ this._source.connect(gainNode);
536
608
  }
609
+ this._source.connect(this._analyser);
610
+ // loop back mic sound
611
+ if (this._config.audio.loopBackToSpeakers)
612
+ this._analyser.connect(this.audioContext.destination);
537
613
  // Custom Worklet Node
538
614
  const workletNode = new AudioWorkletNode(this._audioContext, MICROPHONE_PROCESSOR_NAME);
539
615
  // Connect the Graph: Source -> RNNoise (if noise suppression) -> Gain -> Custom Worklet
@@ -545,7 +621,7 @@ class MicrophoneService {
545
621
  }
546
622
  i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: MicrophoneService, decorators: [{
547
623
  type: Injectable
548
- }], ctorParameters: () => [] });
624
+ }] });
549
625
 
550
626
  class PlatformService {
551
627
  constructor() {
@@ -570,64 +646,6 @@ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImpo
570
646
  type: Injectable
571
647
  }] });
572
648
 
573
- class SpeakerService {
574
- constructor() {
575
- /**
576
- * Dependencies
577
- */
578
- this._config = inject(ConfigService);
579
- this._platform = inject(PlatformService);
580
- this._event = inject(EventService);
581
- this._subs = new SubSink();
582
- if (this._config.audio.sound?.enable === false)
583
- return;
584
- // Audio is only available in browser context
585
- if (this._platform.isBrowser) {
586
- this._upSound = new Audio(this._config.audio.sound?.up ?? `${this._config.basePath}/sounds/up.mp3`);
587
- this._downSound = new Audio(this._config.audio.sound?.down ?? `${this._config.basePath}/sounds/down.mp3`);
588
- this._upSound.preload = this._downSound.preload = 'auto';
589
- this._loadSubscriptions();
590
- }
591
- }
592
- ngOnDestroy() {
593
- this._subs.unsubscribe();
594
- }
595
- /**
596
- * Play on sound
597
- */
598
- playUp() {
599
- if (this._config.audio.sound?.enable === false)
600
- return;
601
- this._upSound.play();
602
- }
603
- /**
604
- * Play off sound
605
- */
606
- playDown() {
607
- if (this._config.audio.sound?.enable === false)
608
- return;
609
- this._downSound.play();
610
- }
611
- /**
612
- * Load subscriptions
613
- */
614
- _loadSubscriptions() {
615
- this._subs.sink = this._event.wakeword.subscribe(() => {
616
- this.playUp();
617
- });
618
- // If default, on silence, play down
619
- this._subs.sink = this._event.silence.subscribe((ev) => {
620
- if (!ev.interimResponse)
621
- this.playDown();
622
- });
623
- }
624
- static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: SpeakerService, deps: [], target: i0.ɵɵFactoryTarget.Injectable }); }
625
- static { this.ɵprov = i0.ɵɵngDeclareInjectable({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: SpeakerService }); }
626
- }
627
- i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: SpeakerService, decorators: [{
628
- type: Injectable
629
- }], ctorParameters: () => [] });
630
-
631
649
  class SpeechRecognitionService {
632
650
  constructor() {
633
651
  /**
@@ -668,6 +686,10 @@ class SpeechRecognitionService {
668
686
  this._recognition.lang = 'en-US'; // Set language
669
687
  this._recognition.continuous = true; // Keep listening even if the user pauses
670
688
  this._recognition.interimResults = true; // Show results while the user is still speaking
689
+ this._recognition.onend = () => {
690
+ this.reset();
691
+ this.init(); // start
692
+ };
671
693
  // 3. Handle Results
672
694
  this._recognition.onresult = (event) => {
673
695
  this._transcript = '';
@@ -781,7 +803,6 @@ class AudioService {
781
803
  /**
782
804
  * Dependencies
783
805
  */
784
- this.__speaker = inject(SpeakerService); // Initialize
785
806
  this._config = inject(ConfigService);
786
807
  this._event = inject(EventService);
787
808
  this._mic = inject(MicrophoneService);
@@ -809,6 +830,10 @@ class AudioService {
809
830
  * Initialize audio
810
831
  */
811
832
  async init() {
833
+ if (!this._mic.microphones.length) {
834
+ this._event.exception.next(new Error('Microphone permission required!'));
835
+ return;
836
+ }
812
837
  const data = await this._mic.data;
813
838
  // Init VAD
814
839
  this._vad.init();
@@ -821,7 +846,8 @@ class AudioService {
821
846
  // Fire a speech event
822
847
  this._event.speech.next({
823
848
  ...data,
824
- vadScore: await this._vad.score(data.sample),
849
+ sample: this._mic.isMuted ? new Float32Array(1280).fill(0) : data.sample, // no speech data if muted
850
+ vadScore: await this._vad.score(data.sample), // emit vad score even if muted
825
851
  get hasVoiceActivity() {
826
852
  return this.vadScore > (that._config.audio.vadThreshold ?? DEFAULT_VAD_THRESHOLD);
827
853
  },
@@ -853,6 +879,7 @@ class AudioService {
853
879
  */
854
880
  forceEndRecording() {
855
881
  this._endCurrentRecording = true;
882
+ this._isInitialized = false;
856
883
  this._event.silence.next({
857
884
  chunk: new Float32Array(),
858
885
  transcript: '',
@@ -876,10 +903,24 @@ class AudioService {
876
903
  _listenForWakeword() {
877
904
  const vad$ = this._getWakeWordStream();
878
905
  this._subs.sink = this._event.speech
879
- .pipe(withLatestFrom(vad$), concatMap(async ([speech, vadState]) => {
906
+ .pipe(filter(() => !this._isInitialized && !this._mic.isMuted), withLatestFrom(vad$), concatMap(async ([speech, vadState]) => {
880
907
  const score = await this._pipeline.run(speech);
881
908
  return { speech, score, chunk: vadState.buffer };
882
- }), filter(({ score }) => score > (this._config.onnx.wakewordInferenceThreshold ?? DEFAULT_INFERENCE_SCORE)))
909
+ }), filter(({ score }) => {
910
+ if (this._config.onnx.wakeword) {
911
+ const transcriptArray = this._speechRecognition.transcript
912
+ .toLowerCase()
913
+ .trim()
914
+ .split(' ');
915
+ // use speech recognition for wake word identification
916
+ if (transcriptArray.length === this._config.onnx.wakeword.length)
917
+ for (const idx in this._config.onnx.wakeword)
918
+ if (+idx < transcriptArray.length &&
919
+ transcriptArray[idx].includes(this._config.onnx.wakeword[idx].toLowerCase()))
920
+ return true;
921
+ }
922
+ return score > (this._config.onnx.wakewordInferenceThreshold ?? DEFAULT_INFERENCE_SCORE);
923
+ }))
883
924
  .subscribe(({ speech, score, chunk }) => {
884
925
  this._event.wakeword.next({ ...speech, inferenceScore: score, chunk });
885
926
  });
@@ -890,66 +931,88 @@ class AudioService {
890
931
  _captureCommandAfterWakeword() {
891
932
  const SILENCE_DURATION = this._config.audio.silenceDuration ?? DEFAULT_SILENCE_DURATION;
892
933
  const VAD_THRESHOLD = this._config.audio.vadThreshold ?? DEFAULT_VAD_THRESHOLD;
893
- // --- TRIGGER 1: Wakeword ---
934
+ const PRE_ROLL_MS = this._config.audio.speechThresholdTime ?? DEFAULT_SPEECH_THRESHOLD_TIME;
935
+ const CHUNK_DURATION_MS = 80; // Each speech event is fired (by audio processor) at 80 ms
936
+ const MAX_SLIDING_WINDOW_CHUNKS = Math.ceil(PRE_ROLL_MS / CHUNK_DURATION_MS);
937
+ // speech sliding window state
938
+ const speechSlidingWindow = [];
939
+ // trigger 1: wakeword
894
940
  const wakewordTrigger$ = this._event.wakeword.pipe(filter(() => !this._isRecording), // Ignore wakeword if already recording
895
941
  map(() => []));
896
- // --- TRIGGER 2: Continuous VAD > THRESHOLD for 1 second ---
897
- const continuousVadTrigger$ = this._event.speech.pipe(map((s) => s.vadScore > VAD_THRESHOLD), filter(() => !this._isRecording && this._isInitialized), // Ignore and prevent background buffering if already recording
898
- distinctUntilChanged(), switchMap((isVoiceActive) => {
942
+ // trigger 2: Continuous VAD > THRESHOLD
943
+ const continuousVadTrigger$ = this._event.speech.pipe(
944
+ // intercept the raw stream to constantly update the threshold window
945
+ tap((s) => {
946
+ // Only buffer if we aren't already formally recording
947
+ if (!this._isRecording && this._isInitialized) {
948
+ speechSlidingWindow.push(s.sample);
949
+ // Keep the array length strictly to our configured window size
950
+ if (speechSlidingWindow.length > MAX_SLIDING_WINDOW_CHUNKS)
951
+ speechSlidingWindow.shift();
952
+ }
953
+ }),
954
+ // check VAD score
955
+ map((s) => s.vadScore > VAD_THRESHOLD), filter(() => !this._isRecording && this._isInitialized), distinctUntilChanged(), switchMap((isVoiceActive) => {
899
956
  if (!isVoiceActive)
900
- return EMPTY; // Cancel if voice stops
957
+ return EMPTY; // Cancel if voice stops before threshold
901
958
  this._speechRecognition.reset();
902
- const bufferedChunks = [];
903
- // 1. Accumulate audio chunks silently
959
+ // PRE-FILL the actual recording buffer with our duration (ex: 300ms) look back window!
960
+ // clone so future sliding window updates don't mutate the captured audio.
961
+ const bufferedChunks = [...speechSlidingWindow];
962
+ // continue accumulating new chunks silently while we wait for the timer
904
963
  const buffer$ = this._event.speech.pipe(tap((s) => bufferedChunks.push(s.sample)), ignoreElements());
905
- // 2. Timer that emits the accumulated chunks after 1 second
906
- const timer$ = timer(300).pipe(map(() => bufferedChunks));
907
- // Merge both. If the timer fires, take(1) stops the buffer$ stream.
908
- // If isVoiceActive turns false before 1s, switchMap cancels both.
964
+ // timer that emits the fully accumulated chunks (pre-roll)
965
+ const timer$ = timer(PRE_ROLL_MS).pipe(map(() => bufferedChunks));
909
966
  return merge(buffer$, timer$).pipe(take(1));
910
967
  }));
911
- // --- COMBINE TRIGGERS ---
968
+ // combine triggers
912
969
  const startRecordingTrigger$ = merge(wakewordTrigger$.pipe(tap(() => {
913
970
  if (!this._isInitialized)
914
971
  this._isInitialized = true; // initialized
915
- })), continuousVadTrigger$).pipe(throttleTime(1000));
916
- // --- MAIN RECORDING PIPELINE ---
972
+ })), continuousVadTrigger$).pipe(throttleTime(this._config.throttleTime));
973
+ // recording pipeline
917
974
  this._subs.sink = startRecordingTrigger$
918
975
  .pipe(tap(() => {
919
976
  this._isRecording = true;
920
977
  this._speechRecognition.reset();
921
- this._event.recording.next(); // recording event
922
978
  }), switchMap((bufferedChunks) => {
923
979
  // Initialize our command chunks with anything captured during the 1s VAD wait
924
980
  const commandChunks = [...bufferedChunks];
925
- const speech$ = this._event.speech.pipe(tap((speech) => commandChunks.push(speech.sample)), share());
981
+ const speech$ = this._event.speech.pipe(tap((speech) => {
982
+ commandChunks.push(speech.sample);
983
+ if (this._isRecording)
984
+ // emit recording events
985
+ this._event.recording.next({
986
+ chunk: this._flatten(commandChunks),
987
+ transcript: this._speechRecognition.transcript,
988
+ });
989
+ }), share());
926
990
  const silence$ = speech$.pipe(map((s) => s.vadScore < VAD_THRESHOLD), distinctUntilChanged());
927
- // 1. Normal silence timeout logic
928
- const normalSilenceTimeout$ = silence$.pipe(delay(500), switchMap((isSilent) => {
991
+ // silence timeout logic
992
+ const normalSilenceTimeout$ = silence$.pipe(switchMap((isSilent) => {
929
993
  if (!isSilent) {
930
994
  return EMPTY; // if voice cancel the timer
931
995
  }
932
996
  // silence started, start timer
933
997
  return timer(SILENCE_DURATION).pipe(takeUntil(silence$.pipe(filter((silent) => !silent))));
934
998
  }));
935
- // 2. Force complete logic checking the variable
999
+ // force end recording
936
1000
  const forceComplete$ = speech$.pipe(filter(() => this._endCurrentRecording));
937
- // 3. Complete whenever the timer fires OR the flag is set to true
938
1001
  return merge(normalSilenceTimeout$, forceComplete$).pipe(take(1), map(() => this._flatten(commandChunks)));
939
1002
  }))
940
1003
  .subscribe({
941
1004
  next: (chunk) => {
942
- const interimResponse = this._config.mode === 'DEFAULT' ? false : true;
1005
+ const interimResponse = this._config.mode === 'DEFAULT' || this._config.mode === 'PTT' ? false : true;
943
1006
  this._event.silence.next({
944
1007
  chunk,
945
1008
  transcript: this._speechRecognition.transcript,
946
1009
  interimResponse,
947
1010
  }); // emit silence event
948
1011
  // Default case
949
- if (this._config.mode === 'DEFAULT') {
1012
+ if (this._config.mode === 'DEFAULT' || this._config.mode === 'PTT') {
950
1013
  this._isInitialized = false;
951
- this._endCurrentRecording = false; // reset flag after recording ends
952
1014
  }
1015
+ this._endCurrentRecording = false; // reset flag after recording ends
953
1016
  this._isRecording = false;
954
1017
  },
955
1018
  error: (err) => {
@@ -1011,217 +1074,537 @@ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImpo
1011
1074
  type: Injectable
1012
1075
  }] });
1013
1076
 
1014
- class OrbComponent {
1077
+ class SpeakerService {
1015
1078
  constructor() {
1079
+ /**
1080
+ * Dependencies
1081
+ */
1016
1082
  this._config = inject(ConfigService);
1017
1083
  this._platform = inject(PlatformService);
1018
- this._audio = inject(AudioService);
1019
1084
  this._event = inject(EventService);
1085
+ this._mic = inject(MicrophoneService);
1086
+ this._orb = inject(OrbComponentService);
1087
+ this._audio = inject(AudioService);
1020
1088
  this._subs = new SubSink();
1021
- this.originalVertices = null;
1022
- this.targetIntensity = 0;
1023
- this.currentIntensity = 0;
1024
- this.clock = new THREE.Timer();
1025
- this.elapsedTime = 0;
1026
- /**
1027
- * Should orb be reacting to speech
1028
- */
1029
- this._isActive = false;
1030
- /**
1031
- * Animate
1032
- */
1033
- this._animate = () => {
1034
- this.animationId = requestAnimationFrame(this._animate);
1035
- // 1. Get the time passed since the last frame (delta)
1036
- const delta = this.clock.getDelta();
1037
- // 2. Smoothly update currentIntensity
1038
- this.currentIntensity += (this.targetIntensity - this.currentIntensity) * 0.05;
1039
- // 3. Increment our own elapsedTime ticker.
1040
- // We multiply delta by intensity so the pulse speeds up when busy,
1041
- // but it won't "run away" as performance.now() grows.
1042
- const speedFactor = 1 + this.currentIntensity / 20;
1043
- this.elapsedTime += delta * speedFactor;
1044
- // 4. Rotation (Constant per frame, scaled by intensity)
1045
- this.orb.rotation.y += 0.005 + this.currentIntensity / 5000;
1046
- this.orb.rotation.z += 0.002;
1047
- // 5. Vertex Displacement
1048
- const positionAttribute = this.orb.geometry.getAttribute('position');
1049
- for (let i = 0; i < positionAttribute.count; i++) {
1050
- const ix = i * 3;
1051
- const iy = i * 3 + 1;
1052
- const iz = i * 3 + 2;
1053
- const x = this.originalVertices[ix];
1054
- const y = this.originalVertices[iy];
1055
- const z = this.originalVertices[iz];
1056
- // We use this.elapsedTime instead of performance.now()
1057
- // This creates a stable frequency regardless of how long the app has been open
1058
- const wave = Math.sin(x * 2 + this.elapsedTime) *
1059
- Math.cos(y * 2 + this.elapsedTime) *
1060
- (this.currentIntensity / 300);
1061
- const currentRadius = Math.sqrt(x * x + y * y + z * z) + wave;
1062
- const finalScale = Math.min(currentRadius, 2.5) / 1.5;
1063
- positionAttribute.setXYZ(i, x * finalScale, y * finalScale, z * finalScale);
1064
- }
1065
- positionAttribute.needsUpdate = true;
1066
- // Optional: Update material feedback based on intensity
1067
- const material = this.orb.material;
1068
- material.emissiveIntensity = 0.2 + this.currentIntensity / 100;
1069
- material.opacity = 0.3 + this.currentIntensity / 200;
1070
- this.renderer.render(this.scene, this.camera);
1071
- };
1089
+ this._nextPlayTime = 0;
1090
+ this._sources = [];
1091
+ if (this._config.audio.sound?.enable === false)
1092
+ return;
1093
+ // Audio is only available in browser context
1094
+ if (this._platform.isBrowser) {
1095
+ this._upSound = new Audio(this._config.audio.sound?.up ?? `${this._config.basePath}/sounds/up.mp3`);
1096
+ this._downSound = new Audio(this._config.audio.sound?.down ?? `${this._config.basePath}/sounds/down.mp3`);
1097
+ this._upSound.preload = this._downSound.preload = 'auto';
1098
+ this._loadSubscriptions();
1099
+ }
1072
1100
  }
1073
- get isRecording() {
1074
- return this._audio.isRecording;
1101
+ ngOnDestroy() {
1102
+ this._subs.unsubscribe();
1075
1103
  }
1076
- get orbSize() {
1077
- return this._config.orb?.size ?? 400;
1104
+ /**
1105
+ * Play audio chunk
1106
+ *
1107
+ * @param buffer audio buffer
1108
+ * @param sampleRate sample rate to play in
1109
+ */
1110
+ playChunk(buffer, sampleRate) {
1111
+ // Calculate how many full 32-bit floats fit in this buffer
1112
+ const float32Count = Math.floor(buffer.byteLength / 4);
1113
+ // Convert raw bytes back to 32-bit floats
1114
+ const float32Array = new Float32Array(buffer, 0, float32Count);
1115
+ // Create an empty audio buffer mapping
1116
+ const audioBuffer = this._mic.audioContext.createBuffer(1, float32Array.length, sampleRate);
1117
+ audioBuffer.copyToChannel(float32Array, 0);
1118
+ this.playAudioBuffer(audioBuffer);
1078
1119
  }
1079
1120
  /**
1080
- * Change color or orb
1121
+ * Play audio buffer
1081
1122
  *
1082
- * @param color
1083
- * @param emissive
1123
+ * @param audioBuffer
1084
1124
  */
1085
- changeColor(color, emissive) {
1086
- const material = this.orb.material;
1087
- material.color = color;
1088
- material.emissive = emissive;
1125
+ playAudioBuffer(audioBuffer) {
1126
+ // if recording, clear the queue
1127
+ if (this._audio.isRecording) {
1128
+ this._clearQueue();
1129
+ return;
1130
+ }
1131
+ // Create a source node to play the buffer
1132
+ const source = this._mic.audioContext.createBufferSource();
1133
+ source.buffer = audioBuffer;
1134
+ source.connect(this._mic.analyzer);
1135
+ source.connect(this._mic.audioContext.destination);
1136
+ // Schedule the chunk to play exactly when the previous chunk finishes
1137
+ const currentTime = this._mic.audioContext.currentTime;
1138
+ if (this._nextPlayTime < currentTime) {
1139
+ this._nextPlayTime = currentTime; // Reset if the queue has emptied
1140
+ }
1141
+ source.start(this._nextPlayTime);
1142
+ this._sources.push(source); // keep instance of source to stop later
1143
+ this._nextPlayTime += audioBuffer.duration;
1144
+ if (!this._config.orb?.mode || this._config.orb?.mode === 'auto')
1145
+ this._orb.setState('speaking'); // speaking
1089
1146
  }
1090
1147
  /**
1091
- * Toggle recording
1148
+ * Play on sound
1092
1149
  */
1093
- toggleRecording() {
1094
- this._audio.toggleRecording();
1150
+ playUp() {
1151
+ if (this._config.audio.sound?.enable === false)
1152
+ return;
1153
+ this._upSound.play();
1095
1154
  }
1096
- ngOnInit() {
1097
- if (this._platform.isServer)
1155
+ /**
1156
+ * Play off sound
1157
+ */
1158
+ playDown() {
1159
+ if (this._config.audio.sound?.enable === false)
1098
1160
  return;
1099
- this._init();
1100
- this._animate();
1101
- this._loadSubscribers();
1161
+ this._downSound.play();
1162
+ }
1163
+ /**
1164
+ * Clear playback queue
1165
+ */
1166
+ _clearQueue() {
1167
+ this._sources.forEach((s) => s.stop());
1168
+ this._sources = [];
1169
+ this._nextPlayTime = 0;
1170
+ }
1171
+ /**
1172
+ * Load subscriptions
1173
+ */
1174
+ _loadSubscriptions() {
1175
+ this._subs.sink = this._event.wakeword
1176
+ .pipe(throttleTime(this._config.throttleTime))
1177
+ .subscribe(() => {
1178
+ this.playUp();
1179
+ });
1180
+ // If default, on silence, play down
1181
+ this._subs.sink = this._event.silence.subscribe((ev) => {
1182
+ if (!ev.interimResponse)
1183
+ this.playDown();
1184
+ });
1185
+ // If recording event
1186
+ this._subs.sink = this._event.recording.subscribe(() => {
1187
+ this._clearQueue();
1188
+ });
1189
+ }
1190
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: SpeakerService, deps: [], target: i0.ɵɵFactoryTarget.Injectable }); }
1191
+ static { this.ɵprov = i0.ɵɵngDeclareInjectable({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: SpeakerService }); }
1192
+ }
1193
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: SpeakerService, decorators: [{
1194
+ type: Injectable
1195
+ }], ctorParameters: () => [] });
1196
+
1197
+ class BridgeService {
1198
+ constructor() {
1199
+ this.mic = inject(MicrophoneService);
1200
+ this.speaker = inject(SpeakerService);
1201
+ this.speechRecognition = inject(SpeechRecognitionService);
1202
+ this.vad = inject(VadService);
1203
+ this.audio = inject(AudioService);
1204
+ this.config = inject(ConfigService);
1205
+ this.event = inject(EventService);
1206
+ this.model = inject(ModelService);
1207
+ this.pipeline = inject(PipelineService);
1208
+ this.platform = inject(PlatformService);
1209
+ this.orbComponentService = inject(OrbComponentService);
1210
+ }
1211
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: BridgeService, deps: [], target: i0.ɵɵFactoryTarget.Injectable }); }
1212
+ static { this.ɵprov = i0.ɵɵngDeclareInjectable({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: BridgeService }); }
1213
+ }
1214
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: BridgeService, decorators: [{
1215
+ type: Injectable
1216
+ }] });
1217
+
1218
+ /* eslint-disable @typescript-eslint/member-ordering */
1219
+ /* eslint-disable @typescript-eslint/prefer-for-of */
1220
+ /* eslint-disable @typescript-eslint/no-explicit-any */
1221
+ class OrbComponent {
1222
+ constructor() {
1223
+ this._config = inject(ConfigService);
1224
+ this._platform = inject(PlatformService);
1225
+ this._audio = inject(AudioService);
1226
+ this._mic = inject(MicrophoneService);
1227
+ this._event = inject(EventService);
1228
+ this._ngZone = inject(NgZone);
1229
+ this._service = inject(OrbComponentService);
1230
+ this._subs = new SubSink();
1231
+ this._clock = new THREE.Clock();
1232
+ this.micVolume = 0;
1233
+ // State Management
1234
+ this.currentState = 'idle';
1235
+ this.agentProfiles = {
1236
+ idle: {
1237
+ spike: 0.05,
1238
+ noiseScale: 1.0,
1239
+ speed: 0.2,
1240
+ twist: 0.0,
1241
+ pulse: 0.0,
1242
+ base: '#001133',
1243
+ peak: '#00aaff',
1244
+ },
1245
+ listening: {
1246
+ spike: 0.2,
1247
+ noiseScale: 2.5,
1248
+ speed: 1.0,
1249
+ twist: 0.0,
1250
+ pulse: 0.0,
1251
+ base: '#002211',
1252
+ peak: '#00ff88',
1253
+ },
1254
+ thinking: {
1255
+ spike: 0.2,
1256
+ noiseScale: 1.5,
1257
+ speed: 1.5,
1258
+ twist: 1.5,
1259
+ pulse: 0.0,
1260
+ base: '#220033',
1261
+ peak: '#ff00ff',
1262
+ },
1263
+ speaking: {
1264
+ spike: 0.1,
1265
+ noiseScale: 1.0,
1266
+ speed: 0.5,
1267
+ twist: 0.0,
1268
+ pulse: 0.1,
1269
+ base: '#331100',
1270
+ peak: '#ff8800',
1271
+ },
1272
+ };
1273
+ this.targets = { ...this.agentProfiles.idle };
1274
+ this.targetColorBase = new THREE.Color(this.targets.base);
1275
+ this.targetColorPeak = new THREE.Color(this.targets.peak);
1276
+ /**
1277
+ * Animator
1278
+ */
1279
+ this._animate = () => {
1280
+ this._animationFrameId = requestAnimationFrame(this._animate);
1281
+ const elapsedTime = this._clock.getElapsedTime();
1282
+ this._material.uniforms['uTime'].value = elapsedTime;
1283
+ let dynamicSpike = this.targets.spike;
1284
+ let dynamicPulse = this.targets.pulse;
1285
+ if (this.currentState === 'listening') {
1286
+ dynamicSpike += this.micVolume * 1.0;
1287
+ }
1288
+ else if (this.currentState === 'speaking') {
1289
+ const ttsVolume = this._getTTSVolume();
1290
+ dynamicPulse += ttsVolume * 0.5;
1291
+ }
1292
+ const lerpFactor = 0.08;
1293
+ this._material.uniforms['uSpike'].value +=
1294
+ (dynamicSpike - this._material.uniforms['uSpike'].value) * lerpFactor;
1295
+ this._material.uniforms['uPulse'].value +=
1296
+ (dynamicPulse - this._material.uniforms['uPulse'].value) * lerpFactor;
1297
+ this._material.uniforms['uNoiseScale'].value +=
1298
+ (this.targets.noiseScale - this._material.uniforms['uNoiseScale'].value) * lerpFactor;
1299
+ this._material.uniforms['uSpeed'].value +=
1300
+ (this.targets.speed - this._material.uniforms['uSpeed'].value) * lerpFactor;
1301
+ this._material.uniforms['uTwist'].value +=
1302
+ (this.targets.twist - this._material.uniforms['uTwist'].value) * lerpFactor;
1303
+ this._material.uniforms['uColorBase'].value.lerp(this.targetColorBase, lerpFactor);
1304
+ this._material.uniforms['uColorPeak'].value.lerp(this.targetColorPeak, lerpFactor);
1305
+ this._sphere.rotation.y = elapsedTime * 0.1;
1306
+ this._sphere.rotation.z = elapsedTime * 0.01;
1307
+ this._renderer.render(this._scene, this._camera);
1308
+ };
1309
+ }
1310
+ get orbSize() {
1311
+ return this._config.orb?.size ?? 400;
1102
1312
  }
1103
- ngOnChanges(changes) {
1313
+ ngAfterViewInit() {
1104
1314
  if (this._platform.isServer)
1105
1315
  return;
1106
- if (changes['intensity']) {
1107
- this.targetIntensity = changes['intensity'].currentValue;
1108
- }
1316
+ this.dataArray = new Uint8Array(this._mic.analyzer.frequencyBinCount);
1317
+ this._loadSubscribers();
1318
+ // MUST run outside Angular to prevent CD loops
1319
+ this._ngZone.runOutsideAngular(() => {
1320
+ this._initThreeJs();
1321
+ this._animate();
1322
+ });
1109
1323
  }
1110
1324
  ngOnDestroy() {
1111
1325
  if (this._platform.isServer)
1112
1326
  return;
1113
- cancelAnimationFrame(this.animationId);
1114
- this.renderer.dispose();
1115
- this.orb.geometry.dispose();
1116
- this.orb.material.dispose();
1327
+ cancelAnimationFrame(this._animationFrameId);
1117
1328
  this._subs.unsubscribe();
1329
+ if (this._renderer) {
1330
+ this._renderer.dispose();
1331
+ this._renderer.forceContextLoss();
1332
+ // Remove canvas from DOM to ensure cleanup
1333
+ const domElement = this._renderer.domElement;
1334
+ if (domElement && domElement.parentNode) {
1335
+ domElement.parentNode.removeChild(domElement);
1336
+ }
1337
+ }
1338
+ if (this._geometry)
1339
+ this._geometry.dispose();
1340
+ if (this._material)
1341
+ this._material.dispose();
1342
+ // Allow garbage collection
1343
+ this._scene = null;
1344
+ this._camera = null;
1345
+ this._renderer = null;
1118
1346
  }
1347
+ /**
1348
+ * Space bar press
1349
+ */
1119
1350
  handleSpacebarPress(event) {
1120
- event.preventDefault(); // Prevents the default space bar action (e.g., scrolling)
1351
+ if (event.code !== this._config.hotkey)
1352
+ return;
1353
+ event.preventDefault();
1121
1354
  this.toggleRecording();
1122
1355
  }
1123
1356
  /**
1124
- * Initialize
1357
+ * Toggle recording
1125
1358
  */
1126
- _init() {
1127
- this.scene = new THREE.Scene();
1128
- this.camera = new THREE.PerspectiveCamera(45, 1, 0.1, 1000);
1129
- this.camera.position.z = 3;
1130
- this.renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true });
1131
- this.renderer.setPixelRatio(window.devicePixelRatio);
1132
- this.rendererContainer.nativeElement.appendChild(this.renderer.domElement);
1133
- // Initial resize to fit container
1134
- this._resize();
1135
- // Orb Geometry (Icosahedron for organic detail)
1136
- const geometry = new THREE.IcosahedronGeometry(1.2, 32);
1137
- this.originalVertices = geometry.attributes['position'].array.slice();
1138
- const material = new THREE.MeshStandardMaterial({
1139
- color: 'red',
1140
- wireframe: true,
1141
- transparent: true,
1142
- opacity: 0.6,
1143
- emissive: 'red',
1144
- emissiveIntensity: 0.5,
1145
- });
1146
- this.orb = new THREE.Mesh(geometry, material);
1147
- this.scene.add(this.orb);
1148
- const light = new THREE.PointLight(0xffffff, 15, 10);
1149
- light.position.set(2, 2, 2);
1150
- this.scene.add(light);
1151
- this.scene.add(new THREE.AmbientLight(0x404040));
1359
+ toggleRecording() {
1360
+ this._audio.toggleRecording();
1152
1361
  }
1153
1362
  /**
1154
- * Resize container
1363
+ * Set state of orb
1364
+ *
1365
+ * @param state orb state
1155
1366
  */
1156
- _resize() {
1157
- const width = this.orbSize ?? this.rendererContainer.nativeElement.clientWidth;
1158
- const height = this.orbSize ?? this.rendererContainer.nativeElement.clientHeight;
1159
- this.renderer.setSize(width, height);
1160
- this.camera.aspect = width / height;
1161
- this.camera.updateProjectionMatrix();
1367
+ setState(state) {
1368
+ const profile = this.agentProfiles[state];
1369
+ this.currentState = state;
1370
+ this.targets = { ...profile };
1371
+ this.targetColorBase.set(profile.base);
1372
+ this.targetColorPeak.set(profile.peak);
1162
1373
  }
1163
1374
  /**
1164
1375
  * Subscriptions
1165
1376
  */
1166
1377
  _loadSubscribers() {
1167
- // Speech event
1168
- this._subs.sink = this._event.speech.subscribe((e) => {
1169
- if (this._isActive)
1170
- this.targetIntensity = e.dbNormalized * 100;
1171
- else
1172
- this.targetIntensity = 0;
1173
- });
1174
- // Wake word event
1175
- this._subs.sink = this._event.wakeword.subscribe(() => {
1176
- this.changeColor(new THREE.Color(0x00d2ff), new THREE.Color(0x0066ff));
1177
- this._isActive = true;
1378
+ this._subs.sink = this._event.speech.subscribe((data) => {
1379
+ this.micVolume = this._mic.isMuted ? 0 : data.dbNormalized;
1178
1380
  });
1179
- // Silence
1180
- this._subs.sink = this._event.silence.subscribe((ev) => {
1181
- if (!ev.interimResponse) {
1182
- // on silence, set color to red
1183
- this.changeColor(new THREE.Color('red'), new THREE.Color('red'));
1184
- this._isActive = false;
1185
- }
1381
+ // only update orb state if mode is auto
1382
+ if (!this._config.orb?.mode || this._config.orb?.mode === 'auto') {
1383
+ // after wakeword, set to listening
1384
+ this._subs.sink = this._event.wakeword.subscribe(() => {
1385
+ this.setState('listening');
1386
+ });
1387
+ // after silence, set thinking or idle
1388
+ this._subs.sink = this._event.silence.subscribe((ev) => {
1389
+ if (ev.interimResponse)
1390
+ this.setState('thinking');
1391
+ else
1392
+ this.setState('idle');
1393
+ });
1394
+ // if recording started
1395
+ this._subs.sink = this._event.recording.subscribe(() => {
1396
+ this.setState('listening');
1397
+ });
1398
+ // state change using service
1399
+ this._subs.sink = this._service.state.subscribe((state) => {
1400
+ this.setState(state);
1401
+ });
1402
+ }
1403
+ }
1404
+ /**
1405
+ * Speech volume for animation
1406
+ */
1407
+ _getTTSVolume() {
1408
+ this._mic.analyzer.getByteFrequencyData(this.dataArray);
1409
+ let sum = 0;
1410
+ for (let i = 0; i < this.dataArray.length; i++) {
1411
+ sum += this.dataArray[i];
1412
+ }
1413
+ return sum / this.dataArray.length / 255.0;
1414
+ }
1415
+ /**
1416
+ * Init
1417
+ */
1418
+ _initThreeJs() {
1419
+ const size = this.orbSize;
1420
+ this._scene = new THREE.Scene();
1421
+ this._camera = new THREE.PerspectiveCamera(50, 1, 0.1, 1000);
1422
+ this._camera.position.z = 6;
1423
+ this._renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true });
1424
+ this._renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2));
1425
+ this._renderer.setSize(size, size);
1426
+ this._rendererContainer.nativeElement.appendChild(this._renderer.domElement);
1427
+ const particlesCount = this._config.orb?.particlesCount ?? 30000;
1428
+ this._geometry = new THREE.BufferGeometry();
1429
+ const posArray = new Float32Array(particlesCount * 3);
1430
+ const randomArray = new Float32Array(particlesCount);
1431
+ const radius = this._config.orb?.radius ?? 1.8;
1432
+ for (let i = 0; i < particlesCount; i++) {
1433
+ const phi = Math.acos(-1 + (2 * i) / particlesCount);
1434
+ const theta = Math.sqrt(particlesCount * Math.PI) * phi;
1435
+ posArray[i * 3] = radius * Math.cos(theta) * Math.sin(phi);
1436
+ posArray[i * 3 + 1] = radius * Math.sin(theta) * Math.sin(phi);
1437
+ posArray[i * 3 + 2] = radius * Math.cos(phi);
1438
+ randomArray[i] = Math.random();
1439
+ }
1440
+ this._geometry.setAttribute('position', new THREE.BufferAttribute(posArray, 3));
1441
+ this._geometry.setAttribute('aRandom', new THREE.BufferAttribute(randomArray, 1));
1442
+ this._material = new THREE.ShaderMaterial({
1443
+ uniforms: {
1444
+ uTime: { value: 0 },
1445
+ uSpike: { value: 0.05 },
1446
+ uNoiseScale: { value: 1.0 },
1447
+ uSpeed: { value: 0.2 },
1448
+ uTwist: { value: 0.0 },
1449
+ uPulse: { value: 0.0 },
1450
+ uColorBase: { value: new THREE.Color('#002244') },
1451
+ uColorPeak: { value: new THREE.Color('#00ffff') },
1452
+ },
1453
+ vertexShader: this._getVertexShader(),
1454
+ fragmentShader: this._getFragmentShader(),
1455
+ transparent: true,
1456
+ blending: THREE.AdditiveBlending,
1457
+ depthWrite: false,
1186
1458
  });
1459
+ this._sphere = new THREE.Points(this._geometry, this._material);
1460
+ this._scene.add(this._sphere);
1461
+ }
1462
+ /**
1463
+ * Vertex shader
1464
+ */
1465
+ _getVertexShader() {
1466
+ return `
1467
+ uniform float uTime;
1468
+ uniform float uSpike;
1469
+ uniform float uNoiseScale;
1470
+ uniform float uSpeed;
1471
+ uniform float uTwist;
1472
+ uniform float uPulse;
1473
+
1474
+ varying vec3 vColor;
1475
+ varying float vDisplacement;
1476
+
1477
+ attribute float aRandom;
1478
+
1479
+ // Simplex 3D Noise
1480
+ vec4 permute(vec4 x){return mod(((x*34.0)+1.0)*x, 289.0);}
1481
+ vec4 taylorInvSqrt(vec4 r){return 1.79284291400159 - 0.85373472095314 * r;}
1482
+ float snoise(vec3 v){
1483
+ const vec2 C = vec2(1.0/6.0, 1.0/3.0);
1484
+ const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);
1485
+ vec3 i = floor(v + dot(v, C.yyy) );
1486
+ vec3 x0 = v - i + dot(i, C.xxx) ;
1487
+ vec3 g = step(x0.yzx, x0.xyz);
1488
+ vec3 l = 1.0 - g;
1489
+ vec3 i1 = min( g.xyz, l.zxy );
1490
+ vec3 i2 = max( g.xyz, l.zxy );
1491
+ vec3 x1 = x0 - i1 + 1.0 * C.xxx;
1492
+ vec3 x2 = x0 - i2 + 2.0 * C.xxx;
1493
+ vec3 x3 = x0 - 1.0 + 3.0 * C.xxx;
1494
+ i = mod(i, 289.0 );
1495
+ vec4 p = permute( permute( permute( i.z + vec4(0.0, i1.z, i2.z, 1.0 )) + i.y + vec4(0.0, i1.y, i2.y, 1.0 )) + i.x + vec4(0.0, i1.x, i2.x, 1.0 ));
1496
+ float n_ = 1.0/7.0;
1497
+ vec3 ns = n_ * D.wyz - D.xzx;
1498
+ vec4 j = p - 49.0 * floor(p * ns.z *ns.z);
1499
+ vec4 x_ = floor(j * ns.z);
1500
+ vec4 y_ = floor(j - 7.0 * x_ );
1501
+ vec4 x = x_ *ns.x + ns.yyyy;
1502
+ vec4 y = y_ *ns.x + ns.yyyy;
1503
+ vec4 h = 1.0 - abs(x) - abs(y);
1504
+ vec4 b0 = vec4( x.xy, y.xy );
1505
+ vec4 b1 = vec4( x.zw, y.zw );
1506
+ vec4 s0 = floor(b0)*2.0 + 1.0;
1507
+ vec4 s1 = floor(b1)*2.0 + 1.0;
1508
+ vec4 sh = -step(h, vec4(0.0));
1509
+ vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;
1510
+ vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;
1511
+ vec3 p0 = vec3(a0.xy,h.x);
1512
+ vec3 p1 = vec3(a0.zw,h.y);
1513
+ vec3 p2 = vec3(a1.xy,h.z);
1514
+ vec3 p3 = vec3(a1.zw,h.w);
1515
+ vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
1516
+ p0 *= norm.x; p1 *= norm.y; p2 *= norm.z; p3 *= norm.w;
1517
+ vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
1518
+ m = m * m;
1519
+ return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1), dot(p2,x2), dot(p3,x3) ) );
1520
+ }
1521
+
1522
+ void main() {
1523
+ vec3 pos = position;
1524
+ float time = uTime * uSpeed;
1525
+
1526
+ float angle = pos.y * uTwist;
1527
+ mat2 rot = mat2(cos(angle), -sin(angle), sin(angle), cos(angle));
1528
+ pos.xz *= rot;
1529
+
1530
+ vec3 normal = normalize(pos);
1531
+ float noise = snoise(pos * uNoiseScale + time);
1532
+
1533
+ float totalDisplacement = (noise * uSpike) + uPulse;
1534
+ pos += normal * totalDisplacement;
1535
+
1536
+ vDisplacement = totalDisplacement;
1537
+
1538
+ vec4 mvPosition = modelViewMatrix * vec4(pos, 1.0);
1539
+
1540
+ gl_PointSize = (15.0 + totalDisplacement * 20.0) * (1.0 / -mvPosition.z);
1541
+ gl_PointSize *= (1.0 + sin(uTime * 5.0 + aRandom * 50.0) * 0.2);
1542
+
1543
+ gl_Position = projectionMatrix * mvPosition;
1544
+ }
1545
+ `;
1546
+ }
1547
+ /**
1548
+ * Fragment shader
1549
+ */
1550
+ _getFragmentShader() {
1551
+ return `
1552
+ varying float vDisplacement;
1553
+ uniform vec3 uColorBase;
1554
+ uniform vec3 uColorPeak;
1555
+
1556
+ void main() {
1557
+ float dist = distance(gl_PointCoord, vec2(0.5));
1558
+ if (dist > 0.5) discard;
1559
+ float alpha = 1.0 - pow(dist * 2.0, 2.0);
1560
+
1561
+ float mixValue = smoothstep(-0.2, 0.5, vDisplacement);
1562
+ vec3 finalColor = mix(uColorBase, uColorPeak, mixValue);
1563
+
1564
+ gl_FragColor = vec4(finalColor, alpha * 0.9);
1565
+ }
1566
+ `;
1187
1567
  }
1188
1568
  static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: OrbComponent, deps: [], target: i0.ɵɵFactoryTarget.Component }); }
1189
- static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "20.3.16", type: OrbComponent, isStandalone: true, selector: "app-orb-component", host: { listeners: { "window:keydown.Space": "handleSpacebarPress($event)" } }, viewQueries: [{ propertyName: "rendererContainer", first: true, predicate: ["rendererContainer"], descendants: true, static: true }], usesOnChanges: true, ngImport: i0, template: `<div
1190
- #rendererContainer
1191
- tabindex="0"
1192
- role="button"
1193
- class="orb-viewport"
1194
- [style.height]="orbSize"
1195
- [style.width]="orbSize"
1196
- (click)="toggleRecording()"
1197
- (keypress)="toggleRecording()"
1198
- ></div>`, isInline: true, styles: [".orb-viewport{background:transparent;cursor:pointer}\n"] }); }
1569
+ static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "20.3.16", type: OrbComponent, isStandalone: true, selector: "app-orb-component", host: { listeners: { "window:keydown": "handleSpacebarPress($event)" } }, viewQueries: [{ propertyName: "_rendererContainer", first: true, predicate: ["rendererContainer"], descendants: true, static: true }], ngImport: i0, template: `
1570
+ <div
1571
+ #rendererContainer
1572
+ role="button"
1573
+ tabindex="0"
1574
+ class="orb-container"
1575
+ [style.width.px]="orbSize"
1576
+ [style.height.px]="orbSize"
1577
+ (click)="toggleRecording()"
1578
+ (keypress)="toggleRecording()"
1579
+ ></div>
1580
+ `, isInline: true, styles: [".orb-container{display:flex;justify-content:center;align-items:center;overflow:hidden;cursor:pointer}canvas{display:block;outline:none}\n"] }); }
1199
1581
  }
1200
1582
  i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "20.3.16", ngImport: i0, type: OrbComponent, decorators: [{
1201
1583
  type: Component,
1202
- args: [{ selector: 'app-orb-component', standalone: true, template: `<div
1203
- #rendererContainer
1204
- tabindex="0"
1205
- role="button"
1206
- class="orb-viewport"
1207
- [style.height]="orbSize"
1208
- [style.width]="orbSize"
1209
- (click)="toggleRecording()"
1210
- (keypress)="toggleRecording()"
1211
- ></div>`, styles: [".orb-viewport{background:transparent;cursor:pointer}\n"] }]
1212
- }], propDecorators: { rendererContainer: [{
1584
+ args: [{ selector: 'app-orb-component', template: `
1585
+ <div
1586
+ #rendererContainer
1587
+ role="button"
1588
+ tabindex="0"
1589
+ class="orb-container"
1590
+ [style.width.px]="orbSize"
1591
+ [style.height.px]="orbSize"
1592
+ (click)="toggleRecording()"
1593
+ (keypress)="toggleRecording()"
1594
+ ></div>
1595
+ `, styles: [".orb-container{display:flex;justify-content:center;align-items:center;overflow:hidden;cursor:pointer}canvas{display:block;outline:none}\n"] }]
1596
+ }], propDecorators: { _rendererContainer: [{
1213
1597
  type: ViewChild,
1214
1598
  args: ['rendererContainer', { static: true }]
1215
1599
  }], handleSpacebarPress: [{
1216
1600
  type: HostListener,
1217
- args: ['window:keydown.Space', ['$event']]
1601
+ args: ['window:keydown', ['$event']]
1218
1602
  }] } });
1219
1603
 
1220
- const DEFAULT_THROTTLE_TIME = 1000;
1221
1604
  class WakeyWakeyComponent {
1222
1605
  constructor() {
1223
1606
  /**
1224
- * Fires when library loaded
1607
+ * Fires when library loaded. Emits bridge service event
1225
1608
  */
1226
1609
  this.ready = new EventEmitter();
1227
1610
  /**
@@ -1254,7 +1637,7 @@ class WakeyWakeyComponent {
1254
1637
  this._platform = inject(PlatformService);
1255
1638
  this._event = inject(EventService);
1256
1639
  this._audio = inject(AudioService);
1257
- this._model = inject(ModelService);
1640
+ this._bridge = inject(BridgeService);
1258
1641
  /**
1259
1642
  * Subscriptions
1260
1643
  */
@@ -1299,8 +1682,8 @@ class WakeyWakeyComponent {
1299
1682
  */
1300
1683
  _listenEvents() {
1301
1684
  // Ready event
1302
- this._subs.sink = this._event.ready.subscribe((e) => {
1303
- this.ready.emit(e);
1685
+ this._subs.sink = this._event.ready.subscribe(() => {
1686
+ this.ready.emit(this._bridge);
1304
1687
  });
1305
1688
  // Exception event
1306
1689
  this._subs.sink = this._event.exception.subscribe((e) => {
@@ -1312,7 +1695,7 @@ class WakeyWakeyComponent {
1312
1695
  });
1313
1696
  // Wake word event
1314
1697
  this._subs.sink = this._event.wakeword
1315
- .pipe(throttleTime(this._config.throttleTime ?? DEFAULT_THROTTLE_TIME))
1698
+ .pipe(throttleTime(this._config.throttleTime))
1316
1699
  .subscribe((e) => {
1317
1700
  this.wakeword.emit(e);
1318
1701
  });
@@ -1372,10 +1755,13 @@ function provideWakeyWakey(config) {
1372
1755
  PipelineService,
1373
1756
  SpeechRecognitionService,
1374
1757
  ModelService,
1758
+ BridgeService,
1759
+ OrbComponentService,
1375
1760
  provideAppInitializer(async () => {
1376
1761
  const _config = inject(ConfigService);
1377
1762
  const _model = inject(ModelService);
1378
1763
  const _platform = inject(PlatformService);
1764
+ const _mic = inject(MicrophoneService);
1379
1765
  if (_platform.isServer)
1380
1766
  return;
1381
1767
  // Set wasm path
@@ -1387,7 +1773,10 @@ function provideWakeyWakey(config) {
1387
1773
  _config.onnx.model.wakeword,
1388
1774
  ];
1389
1775
  // Create sessions
1390
- const sessions = await Promise.all(Object.values(modelPath).map((path) => InferenceSession.create(path, { executionProviders: ['wasm'] })));
1776
+ const sessions = await Promise.all([
1777
+ ...Object.values(modelPath).map((path) => InferenceSession.create(path, { executionProviders: ['wasm'] })),
1778
+ _mic.init(),
1779
+ ]);
1391
1780
  // set sessions
1392
1781
  _model.session = {
1393
1782
  melspectrogram: sessions[0],
@@ -1407,5 +1796,5 @@ function provideWakeyWakey(config) {
1407
1796
  * Generated bundle index. Do not edit.
1408
1797
  */
1409
1798
 
1410
- export { CONFIG as WAKEYWAKEY_CONFIG, AudioUtil as WakeyWakeyAudioUtil, WakeyWakeyComponent, provideWakeyWakey };
1799
+ export { CONFIG as WAKEYWAKEY_CONFIG, AudioUtil as WakeyWakeyAudioUtil, BridgeService as WakeyWakeyBridgeService, WakeyWakeyComponent, provideWakeyWakey };
1411
1800
  //# sourceMappingURL=prabhjeet.me-wakeywakey.mjs.map