@whereby.com/assistant-sdk 0.0.0-canary-20250903113745 → 0.0.0-canary-20250911141956
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +27 -12
- package/dist/index.d.cts +17 -11
- package/dist/index.d.mts +17 -11
- package/dist/index.d.ts +17 -11
- package/dist/index.mjs +27 -12
- package/dist/legacy-esm.js +27 -12
- package/dist/tools.cjs +357 -0
- package/dist/tools.d.ts +22 -0
- package/package.json +17 -6
- package/tools/package.json +4 -0
package/dist/index.cjs
CHANGED
|
@@ -404,7 +404,7 @@ class AudioMixer extends EventEmitter.EventEmitter {
|
|
|
404
404
|
}
|
|
405
405
|
|
|
406
406
|
class Assistant extends EventEmitter {
|
|
407
|
-
constructor({ assistantKey, startCombinedAudioStream
|
|
407
|
+
constructor({ assistantKey, startCombinedAudioStream = false, startLocalMedia = false }) {
|
|
408
408
|
super();
|
|
409
409
|
this.mediaStream = null;
|
|
410
410
|
this.audioSource = null;
|
|
@@ -413,10 +413,12 @@ class Assistant extends EventEmitter {
|
|
|
413
413
|
this.client = new core.WherebyClient();
|
|
414
414
|
this.roomConnection = this.client.getRoomConnection();
|
|
415
415
|
this.localMedia = this.client.getLocalMedia();
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
416
|
+
if (startLocalMedia) {
|
|
417
|
+
const outputAudioSource = new wrtc.nonstandard.RTCAudioSource();
|
|
418
|
+
const outputMediaStream = new wrtc.MediaStream([outputAudioSource.createTrack()]);
|
|
419
|
+
this.mediaStream = outputMediaStream;
|
|
420
|
+
this.audioSource = outputAudioSource;
|
|
421
|
+
}
|
|
420
422
|
if (startCombinedAudioStream) {
|
|
421
423
|
const handleStreamReady = () => {
|
|
422
424
|
if (!this.combinedStream) {
|
|
@@ -445,11 +447,20 @@ class Assistant extends EventEmitter {
|
|
|
445
447
|
},
|
|
446
448
|
roomUrl,
|
|
447
449
|
isNodeSdk: true,
|
|
448
|
-
|
|
450
|
+
assistantKey: this.assistantKey,
|
|
449
451
|
});
|
|
450
452
|
this.roomConnection.joinRoom();
|
|
451
453
|
});
|
|
452
454
|
}
|
|
455
|
+
startLocalMedia() {
|
|
456
|
+
if (!this.mediaStream) {
|
|
457
|
+
const outputAudioSource = new wrtc.nonstandard.RTCAudioSource();
|
|
458
|
+
const outputMediaStream = new wrtc.MediaStream([outputAudioSource.createTrack()]);
|
|
459
|
+
this.mediaStream = outputMediaStream;
|
|
460
|
+
this.audioSource = outputAudioSource;
|
|
461
|
+
}
|
|
462
|
+
this.localMedia.startMedia(this.mediaStream);
|
|
463
|
+
}
|
|
453
464
|
getLocalMediaStream() {
|
|
454
465
|
return this.mediaStream;
|
|
455
466
|
}
|
|
@@ -505,6 +516,9 @@ class Assistant extends EventEmitter {
|
|
|
505
516
|
subscribeToRemoteParticipants(callback) {
|
|
506
517
|
return this.roomConnection.subscribeToRemoteParticipants(callback);
|
|
507
518
|
}
|
|
519
|
+
subscribeToChatMessages(callback) {
|
|
520
|
+
return this.roomConnection.subscribeToChatMessages(callback);
|
|
521
|
+
}
|
|
508
522
|
}
|
|
509
523
|
|
|
510
524
|
const BIND_INTERFACE = "en0";
|
|
@@ -524,7 +538,7 @@ function buildRoomUrl(roomPath, wherebySubdomain, baseDomain = "whereby.com") {
|
|
|
524
538
|
return `https://${wherebyDomain}${roomPath}`;
|
|
525
539
|
}
|
|
526
540
|
|
|
527
|
-
const webhookRouter = (webhookTriggers,
|
|
541
|
+
const webhookRouter = (webhookTriggers, emitter, assistantKey, startCombinedAudioStream = false, startLocalMedia = false) => {
|
|
528
542
|
const router = express.Router();
|
|
529
543
|
const jsonParser = bodyParser.json();
|
|
530
544
|
router.get("/", (_, res) => {
|
|
@@ -537,8 +551,8 @@ const webhookRouter = (webhookTriggers, subdomain, emitter, assistantKey) => {
|
|
|
537
551
|
assert("type" in req.body, "webhook type is required");
|
|
538
552
|
const shouldTriggerOnReceivedWebhook = (_a = webhookTriggers[req.body.type]) === null || _a === void 0 ? void 0 : _a.call(webhookTriggers, req.body);
|
|
539
553
|
if (shouldTriggerOnReceivedWebhook) {
|
|
540
|
-
const roomUrl = buildRoomUrl(req.body.data.roomName, subdomain);
|
|
541
|
-
const assistant = new Assistant({ assistantKey, startCombinedAudioStream
|
|
554
|
+
const roomUrl = buildRoomUrl(req.body.data.roomName, req.body.data.subdomain);
|
|
555
|
+
const assistant = new Assistant({ assistantKey, startCombinedAudioStream, startLocalMedia });
|
|
542
556
|
assistant.joinRoom(roomUrl);
|
|
543
557
|
emitter.emit(ASSISTANT_JOIN_SUCCESS, { roomUrl, triggerWebhook: req.body, assistant });
|
|
544
558
|
}
|
|
@@ -548,16 +562,17 @@ const webhookRouter = (webhookTriggers, subdomain, emitter, assistantKey) => {
|
|
|
548
562
|
return router;
|
|
549
563
|
};
|
|
550
564
|
class Trigger extends EventEmitter.EventEmitter {
|
|
551
|
-
constructor({ webhookTriggers = {},
|
|
565
|
+
constructor({ webhookTriggers = {}, port = 4999, assistantKey, startCombinedAudioStream, startLocalMedia, }) {
|
|
552
566
|
super();
|
|
553
567
|
this.webhookTriggers = webhookTriggers;
|
|
554
|
-
this.subdomain = subdomain;
|
|
555
568
|
this.port = port;
|
|
556
569
|
this.assistantKey = assistantKey;
|
|
570
|
+
this.startCombinedAudioStream = startCombinedAudioStream !== null && startCombinedAudioStream !== void 0 ? startCombinedAudioStream : false;
|
|
571
|
+
this.startLocalMedia = startLocalMedia !== null && startLocalMedia !== void 0 ? startLocalMedia : false;
|
|
557
572
|
}
|
|
558
573
|
start() {
|
|
559
574
|
const app = express();
|
|
560
|
-
const router = webhookRouter(this.webhookTriggers, this.
|
|
575
|
+
const router = webhookRouter(this.webhookTriggers, this, this.assistantKey, this.startCombinedAudioStream, this.startLocalMedia);
|
|
561
576
|
app.use(router);
|
|
562
577
|
const server = app.listen(this.port, () => {
|
|
563
578
|
});
|
package/dist/index.d.cts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { RoomConnectionClient, RemoteParticipantState } from '@whereby.com/core';
|
|
1
|
+
import { RoomConnectionClient, RemoteParticipantState, ChatMessage } from '@whereby.com/core';
|
|
2
2
|
export { RemoteParticipantState } from '@whereby.com/core';
|
|
3
3
|
import wrtc from '@roamhq/wrtc';
|
|
4
4
|
import EventEmitter, { EventEmitter as EventEmitter$1 } from 'events';
|
|
@@ -13,19 +13,21 @@ type AssistantEvents = {
|
|
|
13
13
|
};
|
|
14
14
|
|
|
15
15
|
type AssistantOptions = {
|
|
16
|
-
assistantKey
|
|
17
|
-
startCombinedAudioStream
|
|
16
|
+
assistantKey: string;
|
|
17
|
+
startCombinedAudioStream?: boolean;
|
|
18
|
+
startLocalMedia?: boolean;
|
|
18
19
|
};
|
|
19
20
|
declare class Assistant extends EventEmitter<AssistantEvents> {
|
|
20
|
-
private assistantKey
|
|
21
|
+
private assistantKey;
|
|
21
22
|
private client;
|
|
22
23
|
private roomConnection;
|
|
23
24
|
private localMedia;
|
|
24
25
|
private mediaStream;
|
|
25
26
|
private audioSource;
|
|
26
27
|
private combinedStream;
|
|
27
|
-
constructor({ assistantKey, startCombinedAudioStream }
|
|
28
|
+
constructor({ assistantKey, startCombinedAudioStream, startLocalMedia }: AssistantOptions);
|
|
28
29
|
joinRoom(roomUrl: string): Promise<void>;
|
|
30
|
+
startLocalMedia(): void;
|
|
29
31
|
getLocalMediaStream(): MediaStream | null;
|
|
30
32
|
getLocalAudioSource(): wrtc.nonstandard.RTCAudioSource | null;
|
|
31
33
|
getRoomConnection(): RoomConnectionClient;
|
|
@@ -41,6 +43,7 @@ declare class Assistant extends EventEmitter<AssistantEvents> {
|
|
|
41
43
|
acceptWaitingParticipant(participantId: string): void;
|
|
42
44
|
rejectWaitingParticipant(participantId: string): void;
|
|
43
45
|
subscribeToRemoteParticipants(callback: (participants: RemoteParticipantState[]) => void): () => void;
|
|
46
|
+
subscribeToChatMessages(callback: (messages: ChatMessage[]) => void): () => void;
|
|
44
47
|
}
|
|
45
48
|
|
|
46
49
|
type WebhookType = "room.client.joined" | "room.client.left" | "room.session.started" | "room.session.ended";
|
|
@@ -55,6 +58,7 @@ interface WherebyWebhookInRoom {
|
|
|
55
58
|
meetingId: string;
|
|
56
59
|
roomName: string;
|
|
57
60
|
roomSessionId: string | null;
|
|
61
|
+
subdomain: string;
|
|
58
62
|
}
|
|
59
63
|
interface WherebyWebhookDataClient {
|
|
60
64
|
displayName: string;
|
|
@@ -62,7 +66,7 @@ interface WherebyWebhookDataClient {
|
|
|
62
66
|
metadata: string | null;
|
|
63
67
|
externalId: string | null;
|
|
64
68
|
}
|
|
65
|
-
type WherebyRoleName = "owner" | "member" | "host" | "visitor" | "granted_visitor" | "viewer" | "granted_viewer" | "recorder" | "streamer" | "captioner";
|
|
69
|
+
type WherebyRoleName = "owner" | "member" | "host" | "visitor" | "granted_visitor" | "viewer" | "granted_viewer" | "recorder" | "streamer" | "captioner" | "assistant";
|
|
66
70
|
interface WherebyWebhookDataClientJoinLeave {
|
|
67
71
|
roleName: WherebyRoleName;
|
|
68
72
|
numClients: number;
|
|
@@ -100,16 +104,18 @@ type WherebyWebhookTriggers = Partial<{
|
|
|
100
104
|
|
|
101
105
|
interface TriggerOptions {
|
|
102
106
|
webhookTriggers: WherebyWebhookTriggers;
|
|
103
|
-
subdomain: string;
|
|
104
107
|
port?: number;
|
|
105
|
-
assistantKey
|
|
108
|
+
assistantKey: string;
|
|
109
|
+
startCombinedAudioStream?: boolean;
|
|
110
|
+
startLocalMedia?: boolean;
|
|
106
111
|
}
|
|
107
112
|
declare class Trigger extends EventEmitter$1<TriggerEvents> {
|
|
108
113
|
private webhookTriggers;
|
|
109
|
-
private subdomain;
|
|
110
114
|
private port;
|
|
111
|
-
private assistantKey
|
|
112
|
-
|
|
115
|
+
private assistantKey;
|
|
116
|
+
private startCombinedAudioStream;
|
|
117
|
+
private startLocalMedia;
|
|
118
|
+
constructor({ webhookTriggers, port, assistantKey, startCombinedAudioStream, startLocalMedia, }: TriggerOptions);
|
|
113
119
|
start(): void;
|
|
114
120
|
}
|
|
115
121
|
|
package/dist/index.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { RoomConnectionClient, RemoteParticipantState } from '@whereby.com/core';
|
|
1
|
+
import { RoomConnectionClient, RemoteParticipantState, ChatMessage } from '@whereby.com/core';
|
|
2
2
|
export { RemoteParticipantState } from '@whereby.com/core';
|
|
3
3
|
import wrtc from '@roamhq/wrtc';
|
|
4
4
|
import EventEmitter, { EventEmitter as EventEmitter$1 } from 'events';
|
|
@@ -13,19 +13,21 @@ type AssistantEvents = {
|
|
|
13
13
|
};
|
|
14
14
|
|
|
15
15
|
type AssistantOptions = {
|
|
16
|
-
assistantKey
|
|
17
|
-
startCombinedAudioStream
|
|
16
|
+
assistantKey: string;
|
|
17
|
+
startCombinedAudioStream?: boolean;
|
|
18
|
+
startLocalMedia?: boolean;
|
|
18
19
|
};
|
|
19
20
|
declare class Assistant extends EventEmitter<AssistantEvents> {
|
|
20
|
-
private assistantKey
|
|
21
|
+
private assistantKey;
|
|
21
22
|
private client;
|
|
22
23
|
private roomConnection;
|
|
23
24
|
private localMedia;
|
|
24
25
|
private mediaStream;
|
|
25
26
|
private audioSource;
|
|
26
27
|
private combinedStream;
|
|
27
|
-
constructor({ assistantKey, startCombinedAudioStream }
|
|
28
|
+
constructor({ assistantKey, startCombinedAudioStream, startLocalMedia }: AssistantOptions);
|
|
28
29
|
joinRoom(roomUrl: string): Promise<void>;
|
|
30
|
+
startLocalMedia(): void;
|
|
29
31
|
getLocalMediaStream(): MediaStream | null;
|
|
30
32
|
getLocalAudioSource(): wrtc.nonstandard.RTCAudioSource | null;
|
|
31
33
|
getRoomConnection(): RoomConnectionClient;
|
|
@@ -41,6 +43,7 @@ declare class Assistant extends EventEmitter<AssistantEvents> {
|
|
|
41
43
|
acceptWaitingParticipant(participantId: string): void;
|
|
42
44
|
rejectWaitingParticipant(participantId: string): void;
|
|
43
45
|
subscribeToRemoteParticipants(callback: (participants: RemoteParticipantState[]) => void): () => void;
|
|
46
|
+
subscribeToChatMessages(callback: (messages: ChatMessage[]) => void): () => void;
|
|
44
47
|
}
|
|
45
48
|
|
|
46
49
|
type WebhookType = "room.client.joined" | "room.client.left" | "room.session.started" | "room.session.ended";
|
|
@@ -55,6 +58,7 @@ interface WherebyWebhookInRoom {
|
|
|
55
58
|
meetingId: string;
|
|
56
59
|
roomName: string;
|
|
57
60
|
roomSessionId: string | null;
|
|
61
|
+
subdomain: string;
|
|
58
62
|
}
|
|
59
63
|
interface WherebyWebhookDataClient {
|
|
60
64
|
displayName: string;
|
|
@@ -62,7 +66,7 @@ interface WherebyWebhookDataClient {
|
|
|
62
66
|
metadata: string | null;
|
|
63
67
|
externalId: string | null;
|
|
64
68
|
}
|
|
65
|
-
type WherebyRoleName = "owner" | "member" | "host" | "visitor" | "granted_visitor" | "viewer" | "granted_viewer" | "recorder" | "streamer" | "captioner";
|
|
69
|
+
type WherebyRoleName = "owner" | "member" | "host" | "visitor" | "granted_visitor" | "viewer" | "granted_viewer" | "recorder" | "streamer" | "captioner" | "assistant";
|
|
66
70
|
interface WherebyWebhookDataClientJoinLeave {
|
|
67
71
|
roleName: WherebyRoleName;
|
|
68
72
|
numClients: number;
|
|
@@ -100,16 +104,18 @@ type WherebyWebhookTriggers = Partial<{
|
|
|
100
104
|
|
|
101
105
|
interface TriggerOptions {
|
|
102
106
|
webhookTriggers: WherebyWebhookTriggers;
|
|
103
|
-
subdomain: string;
|
|
104
107
|
port?: number;
|
|
105
|
-
assistantKey
|
|
108
|
+
assistantKey: string;
|
|
109
|
+
startCombinedAudioStream?: boolean;
|
|
110
|
+
startLocalMedia?: boolean;
|
|
106
111
|
}
|
|
107
112
|
declare class Trigger extends EventEmitter$1<TriggerEvents> {
|
|
108
113
|
private webhookTriggers;
|
|
109
|
-
private subdomain;
|
|
110
114
|
private port;
|
|
111
|
-
private assistantKey
|
|
112
|
-
|
|
115
|
+
private assistantKey;
|
|
116
|
+
private startCombinedAudioStream;
|
|
117
|
+
private startLocalMedia;
|
|
118
|
+
constructor({ webhookTriggers, port, assistantKey, startCombinedAudioStream, startLocalMedia, }: TriggerOptions);
|
|
113
119
|
start(): void;
|
|
114
120
|
}
|
|
115
121
|
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { RoomConnectionClient, RemoteParticipantState } from '@whereby.com/core';
|
|
1
|
+
import { RoomConnectionClient, RemoteParticipantState, ChatMessage } from '@whereby.com/core';
|
|
2
2
|
export { RemoteParticipantState } from '@whereby.com/core';
|
|
3
3
|
import wrtc from '@roamhq/wrtc';
|
|
4
4
|
import EventEmitter, { EventEmitter as EventEmitter$1 } from 'events';
|
|
@@ -13,19 +13,21 @@ type AssistantEvents = {
|
|
|
13
13
|
};
|
|
14
14
|
|
|
15
15
|
type AssistantOptions = {
|
|
16
|
-
assistantKey
|
|
17
|
-
startCombinedAudioStream
|
|
16
|
+
assistantKey: string;
|
|
17
|
+
startCombinedAudioStream?: boolean;
|
|
18
|
+
startLocalMedia?: boolean;
|
|
18
19
|
};
|
|
19
20
|
declare class Assistant extends EventEmitter<AssistantEvents> {
|
|
20
|
-
private assistantKey
|
|
21
|
+
private assistantKey;
|
|
21
22
|
private client;
|
|
22
23
|
private roomConnection;
|
|
23
24
|
private localMedia;
|
|
24
25
|
private mediaStream;
|
|
25
26
|
private audioSource;
|
|
26
27
|
private combinedStream;
|
|
27
|
-
constructor({ assistantKey, startCombinedAudioStream }
|
|
28
|
+
constructor({ assistantKey, startCombinedAudioStream, startLocalMedia }: AssistantOptions);
|
|
28
29
|
joinRoom(roomUrl: string): Promise<void>;
|
|
30
|
+
startLocalMedia(): void;
|
|
29
31
|
getLocalMediaStream(): MediaStream | null;
|
|
30
32
|
getLocalAudioSource(): wrtc.nonstandard.RTCAudioSource | null;
|
|
31
33
|
getRoomConnection(): RoomConnectionClient;
|
|
@@ -41,6 +43,7 @@ declare class Assistant extends EventEmitter<AssistantEvents> {
|
|
|
41
43
|
acceptWaitingParticipant(participantId: string): void;
|
|
42
44
|
rejectWaitingParticipant(participantId: string): void;
|
|
43
45
|
subscribeToRemoteParticipants(callback: (participants: RemoteParticipantState[]) => void): () => void;
|
|
46
|
+
subscribeToChatMessages(callback: (messages: ChatMessage[]) => void): () => void;
|
|
44
47
|
}
|
|
45
48
|
|
|
46
49
|
type WebhookType = "room.client.joined" | "room.client.left" | "room.session.started" | "room.session.ended";
|
|
@@ -55,6 +58,7 @@ interface WherebyWebhookInRoom {
|
|
|
55
58
|
meetingId: string;
|
|
56
59
|
roomName: string;
|
|
57
60
|
roomSessionId: string | null;
|
|
61
|
+
subdomain: string;
|
|
58
62
|
}
|
|
59
63
|
interface WherebyWebhookDataClient {
|
|
60
64
|
displayName: string;
|
|
@@ -62,7 +66,7 @@ interface WherebyWebhookDataClient {
|
|
|
62
66
|
metadata: string | null;
|
|
63
67
|
externalId: string | null;
|
|
64
68
|
}
|
|
65
|
-
type WherebyRoleName = "owner" | "member" | "host" | "visitor" | "granted_visitor" | "viewer" | "granted_viewer" | "recorder" | "streamer" | "captioner";
|
|
69
|
+
type WherebyRoleName = "owner" | "member" | "host" | "visitor" | "granted_visitor" | "viewer" | "granted_viewer" | "recorder" | "streamer" | "captioner" | "assistant";
|
|
66
70
|
interface WherebyWebhookDataClientJoinLeave {
|
|
67
71
|
roleName: WherebyRoleName;
|
|
68
72
|
numClients: number;
|
|
@@ -100,16 +104,18 @@ type WherebyWebhookTriggers = Partial<{
|
|
|
100
104
|
|
|
101
105
|
interface TriggerOptions {
|
|
102
106
|
webhookTriggers: WherebyWebhookTriggers;
|
|
103
|
-
subdomain: string;
|
|
104
107
|
port?: number;
|
|
105
|
-
assistantKey
|
|
108
|
+
assistantKey: string;
|
|
109
|
+
startCombinedAudioStream?: boolean;
|
|
110
|
+
startLocalMedia?: boolean;
|
|
106
111
|
}
|
|
107
112
|
declare class Trigger extends EventEmitter$1<TriggerEvents> {
|
|
108
113
|
private webhookTriggers;
|
|
109
|
-
private subdomain;
|
|
110
114
|
private port;
|
|
111
|
-
private assistantKey
|
|
112
|
-
|
|
115
|
+
private assistantKey;
|
|
116
|
+
private startCombinedAudioStream;
|
|
117
|
+
private startLocalMedia;
|
|
118
|
+
constructor({ webhookTriggers, port, assistantKey, startCombinedAudioStream, startLocalMedia, }: TriggerOptions);
|
|
113
119
|
start(): void;
|
|
114
120
|
}
|
|
115
121
|
|
package/dist/index.mjs
CHANGED
|
@@ -402,7 +402,7 @@ class AudioMixer extends EventEmitter {
|
|
|
402
402
|
}
|
|
403
403
|
|
|
404
404
|
class Assistant extends EventEmitter$1 {
|
|
405
|
-
constructor({ assistantKey, startCombinedAudioStream
|
|
405
|
+
constructor({ assistantKey, startCombinedAudioStream = false, startLocalMedia = false }) {
|
|
406
406
|
super();
|
|
407
407
|
this.mediaStream = null;
|
|
408
408
|
this.audioSource = null;
|
|
@@ -411,10 +411,12 @@ class Assistant extends EventEmitter$1 {
|
|
|
411
411
|
this.client = new WherebyClient();
|
|
412
412
|
this.roomConnection = this.client.getRoomConnection();
|
|
413
413
|
this.localMedia = this.client.getLocalMedia();
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
414
|
+
if (startLocalMedia) {
|
|
415
|
+
const outputAudioSource = new wrtc.nonstandard.RTCAudioSource();
|
|
416
|
+
const outputMediaStream = new wrtc.MediaStream([outputAudioSource.createTrack()]);
|
|
417
|
+
this.mediaStream = outputMediaStream;
|
|
418
|
+
this.audioSource = outputAudioSource;
|
|
419
|
+
}
|
|
418
420
|
if (startCombinedAudioStream) {
|
|
419
421
|
const handleStreamReady = () => {
|
|
420
422
|
if (!this.combinedStream) {
|
|
@@ -443,11 +445,20 @@ class Assistant extends EventEmitter$1 {
|
|
|
443
445
|
},
|
|
444
446
|
roomUrl,
|
|
445
447
|
isNodeSdk: true,
|
|
446
|
-
|
|
448
|
+
assistantKey: this.assistantKey,
|
|
447
449
|
});
|
|
448
450
|
this.roomConnection.joinRoom();
|
|
449
451
|
});
|
|
450
452
|
}
|
|
453
|
+
startLocalMedia() {
|
|
454
|
+
if (!this.mediaStream) {
|
|
455
|
+
const outputAudioSource = new wrtc.nonstandard.RTCAudioSource();
|
|
456
|
+
const outputMediaStream = new wrtc.MediaStream([outputAudioSource.createTrack()]);
|
|
457
|
+
this.mediaStream = outputMediaStream;
|
|
458
|
+
this.audioSource = outputAudioSource;
|
|
459
|
+
}
|
|
460
|
+
this.localMedia.startMedia(this.mediaStream);
|
|
461
|
+
}
|
|
451
462
|
getLocalMediaStream() {
|
|
452
463
|
return this.mediaStream;
|
|
453
464
|
}
|
|
@@ -503,6 +514,9 @@ class Assistant extends EventEmitter$1 {
|
|
|
503
514
|
subscribeToRemoteParticipants(callback) {
|
|
504
515
|
return this.roomConnection.subscribeToRemoteParticipants(callback);
|
|
505
516
|
}
|
|
517
|
+
subscribeToChatMessages(callback) {
|
|
518
|
+
return this.roomConnection.subscribeToChatMessages(callback);
|
|
519
|
+
}
|
|
506
520
|
}
|
|
507
521
|
|
|
508
522
|
const BIND_INTERFACE = "en0";
|
|
@@ -522,7 +536,7 @@ function buildRoomUrl(roomPath, wherebySubdomain, baseDomain = "whereby.com") {
|
|
|
522
536
|
return `https://${wherebyDomain}${roomPath}`;
|
|
523
537
|
}
|
|
524
538
|
|
|
525
|
-
const webhookRouter = (webhookTriggers,
|
|
539
|
+
const webhookRouter = (webhookTriggers, emitter, assistantKey, startCombinedAudioStream = false, startLocalMedia = false) => {
|
|
526
540
|
const router = express.Router();
|
|
527
541
|
const jsonParser = bodyParser.json();
|
|
528
542
|
router.get("/", (_, res) => {
|
|
@@ -535,8 +549,8 @@ const webhookRouter = (webhookTriggers, subdomain, emitter, assistantKey) => {
|
|
|
535
549
|
assert("type" in req.body, "webhook type is required");
|
|
536
550
|
const shouldTriggerOnReceivedWebhook = (_a = webhookTriggers[req.body.type]) === null || _a === void 0 ? void 0 : _a.call(webhookTriggers, req.body);
|
|
537
551
|
if (shouldTriggerOnReceivedWebhook) {
|
|
538
|
-
const roomUrl = buildRoomUrl(req.body.data.roomName, subdomain);
|
|
539
|
-
const assistant = new Assistant({ assistantKey, startCombinedAudioStream
|
|
552
|
+
const roomUrl = buildRoomUrl(req.body.data.roomName, req.body.data.subdomain);
|
|
553
|
+
const assistant = new Assistant({ assistantKey, startCombinedAudioStream, startLocalMedia });
|
|
540
554
|
assistant.joinRoom(roomUrl);
|
|
541
555
|
emitter.emit(ASSISTANT_JOIN_SUCCESS, { roomUrl, triggerWebhook: req.body, assistant });
|
|
542
556
|
}
|
|
@@ -546,16 +560,17 @@ const webhookRouter = (webhookTriggers, subdomain, emitter, assistantKey) => {
|
|
|
546
560
|
return router;
|
|
547
561
|
};
|
|
548
562
|
class Trigger extends EventEmitter {
|
|
549
|
-
constructor({ webhookTriggers = {},
|
|
563
|
+
constructor({ webhookTriggers = {}, port = 4999, assistantKey, startCombinedAudioStream, startLocalMedia, }) {
|
|
550
564
|
super();
|
|
551
565
|
this.webhookTriggers = webhookTriggers;
|
|
552
|
-
this.subdomain = subdomain;
|
|
553
566
|
this.port = port;
|
|
554
567
|
this.assistantKey = assistantKey;
|
|
568
|
+
this.startCombinedAudioStream = startCombinedAudioStream !== null && startCombinedAudioStream !== void 0 ? startCombinedAudioStream : false;
|
|
569
|
+
this.startLocalMedia = startLocalMedia !== null && startLocalMedia !== void 0 ? startLocalMedia : false;
|
|
555
570
|
}
|
|
556
571
|
start() {
|
|
557
572
|
const app = express();
|
|
558
|
-
const router = webhookRouter(this.webhookTriggers, this.
|
|
573
|
+
const router = webhookRouter(this.webhookTriggers, this, this.assistantKey, this.startCombinedAudioStream, this.startLocalMedia);
|
|
559
574
|
app.use(router);
|
|
560
575
|
const server = app.listen(this.port, () => {
|
|
561
576
|
});
|
package/dist/legacy-esm.js
CHANGED
|
@@ -402,7 +402,7 @@ class AudioMixer extends EventEmitter {
|
|
|
402
402
|
}
|
|
403
403
|
|
|
404
404
|
class Assistant extends EventEmitter$1 {
|
|
405
|
-
constructor({ assistantKey, startCombinedAudioStream
|
|
405
|
+
constructor({ assistantKey, startCombinedAudioStream = false, startLocalMedia = false }) {
|
|
406
406
|
super();
|
|
407
407
|
this.mediaStream = null;
|
|
408
408
|
this.audioSource = null;
|
|
@@ -411,10 +411,12 @@ class Assistant extends EventEmitter$1 {
|
|
|
411
411
|
this.client = new WherebyClient();
|
|
412
412
|
this.roomConnection = this.client.getRoomConnection();
|
|
413
413
|
this.localMedia = this.client.getLocalMedia();
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
414
|
+
if (startLocalMedia) {
|
|
415
|
+
const outputAudioSource = new wrtc.nonstandard.RTCAudioSource();
|
|
416
|
+
const outputMediaStream = new wrtc.MediaStream([outputAudioSource.createTrack()]);
|
|
417
|
+
this.mediaStream = outputMediaStream;
|
|
418
|
+
this.audioSource = outputAudioSource;
|
|
419
|
+
}
|
|
418
420
|
if (startCombinedAudioStream) {
|
|
419
421
|
const handleStreamReady = () => {
|
|
420
422
|
if (!this.combinedStream) {
|
|
@@ -443,11 +445,20 @@ class Assistant extends EventEmitter$1 {
|
|
|
443
445
|
},
|
|
444
446
|
roomUrl,
|
|
445
447
|
isNodeSdk: true,
|
|
446
|
-
|
|
448
|
+
assistantKey: this.assistantKey,
|
|
447
449
|
});
|
|
448
450
|
this.roomConnection.joinRoom();
|
|
449
451
|
});
|
|
450
452
|
}
|
|
453
|
+
startLocalMedia() {
|
|
454
|
+
if (!this.mediaStream) {
|
|
455
|
+
const outputAudioSource = new wrtc.nonstandard.RTCAudioSource();
|
|
456
|
+
const outputMediaStream = new wrtc.MediaStream([outputAudioSource.createTrack()]);
|
|
457
|
+
this.mediaStream = outputMediaStream;
|
|
458
|
+
this.audioSource = outputAudioSource;
|
|
459
|
+
}
|
|
460
|
+
this.localMedia.startMedia(this.mediaStream);
|
|
461
|
+
}
|
|
451
462
|
getLocalMediaStream() {
|
|
452
463
|
return this.mediaStream;
|
|
453
464
|
}
|
|
@@ -503,6 +514,9 @@ class Assistant extends EventEmitter$1 {
|
|
|
503
514
|
subscribeToRemoteParticipants(callback) {
|
|
504
515
|
return this.roomConnection.subscribeToRemoteParticipants(callback);
|
|
505
516
|
}
|
|
517
|
+
subscribeToChatMessages(callback) {
|
|
518
|
+
return this.roomConnection.subscribeToChatMessages(callback);
|
|
519
|
+
}
|
|
506
520
|
}
|
|
507
521
|
|
|
508
522
|
const BIND_INTERFACE = "en0";
|
|
@@ -522,7 +536,7 @@ function buildRoomUrl(roomPath, wherebySubdomain, baseDomain = "whereby.com") {
|
|
|
522
536
|
return `https://${wherebyDomain}${roomPath}`;
|
|
523
537
|
}
|
|
524
538
|
|
|
525
|
-
const webhookRouter = (webhookTriggers,
|
|
539
|
+
const webhookRouter = (webhookTriggers, emitter, assistantKey, startCombinedAudioStream = false, startLocalMedia = false) => {
|
|
526
540
|
const router = express.Router();
|
|
527
541
|
const jsonParser = bodyParser.json();
|
|
528
542
|
router.get("/", (_, res) => {
|
|
@@ -535,8 +549,8 @@ const webhookRouter = (webhookTriggers, subdomain, emitter, assistantKey) => {
|
|
|
535
549
|
assert("type" in req.body, "webhook type is required");
|
|
536
550
|
const shouldTriggerOnReceivedWebhook = (_a = webhookTriggers[req.body.type]) === null || _a === void 0 ? void 0 : _a.call(webhookTriggers, req.body);
|
|
537
551
|
if (shouldTriggerOnReceivedWebhook) {
|
|
538
|
-
const roomUrl = buildRoomUrl(req.body.data.roomName, subdomain);
|
|
539
|
-
const assistant = new Assistant({ assistantKey, startCombinedAudioStream
|
|
552
|
+
const roomUrl = buildRoomUrl(req.body.data.roomName, req.body.data.subdomain);
|
|
553
|
+
const assistant = new Assistant({ assistantKey, startCombinedAudioStream, startLocalMedia });
|
|
540
554
|
assistant.joinRoom(roomUrl);
|
|
541
555
|
emitter.emit(ASSISTANT_JOIN_SUCCESS, { roomUrl, triggerWebhook: req.body, assistant });
|
|
542
556
|
}
|
|
@@ -546,16 +560,17 @@ const webhookRouter = (webhookTriggers, subdomain, emitter, assistantKey) => {
|
|
|
546
560
|
return router;
|
|
547
561
|
};
|
|
548
562
|
class Trigger extends EventEmitter {
|
|
549
|
-
constructor({ webhookTriggers = {},
|
|
563
|
+
constructor({ webhookTriggers = {}, port = 4999, assistantKey, startCombinedAudioStream, startLocalMedia, }) {
|
|
550
564
|
super();
|
|
551
565
|
this.webhookTriggers = webhookTriggers;
|
|
552
|
-
this.subdomain = subdomain;
|
|
553
566
|
this.port = port;
|
|
554
567
|
this.assistantKey = assistantKey;
|
|
568
|
+
this.startCombinedAudioStream = startCombinedAudioStream !== null && startCombinedAudioStream !== void 0 ? startCombinedAudioStream : false;
|
|
569
|
+
this.startLocalMedia = startLocalMedia !== null && startLocalMedia !== void 0 ? startLocalMedia : false;
|
|
555
570
|
}
|
|
556
571
|
start() {
|
|
557
572
|
const app = express();
|
|
558
|
-
const router = webhookRouter(this.webhookTriggers, this.
|
|
573
|
+
const router = webhookRouter(this.webhookTriggers, this, this.assistantKey, this.startCombinedAudioStream, this.startLocalMedia);
|
|
559
574
|
app.use(router);
|
|
560
575
|
const server = app.listen(this.port, () => {
|
|
561
576
|
});
|
package/dist/tools.cjs
ADDED
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var events = require('events');
|
|
4
|
+
var child_process = require('child_process');
|
|
5
|
+
require('stream');
|
|
6
|
+
var wrtc = require('@roamhq/wrtc');
|
|
7
|
+
|
|
8
|
+
const { nonstandard: { RTCAudioSink }, } = wrtc;
|
|
9
|
+
class AudioSink extends wrtc.nonstandard.RTCAudioSink {
|
|
10
|
+
constructor(track) {
|
|
11
|
+
super(track);
|
|
12
|
+
this._sink = new RTCAudioSink(track);
|
|
13
|
+
}
|
|
14
|
+
subscribe(cb) {
|
|
15
|
+
this._sink.ondata = cb;
|
|
16
|
+
return () => {
|
|
17
|
+
this._sink.ondata = undefined;
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const PARTICIPANT_SLOTS = 20;
|
|
23
|
+
const STREAM_INPUT_SAMPLE_RATE_IN_HZ = 48000;
|
|
24
|
+
const BYTES_PER_SAMPLE = 2;
|
|
25
|
+
const FRAME_10MS_SAMPLES = 480;
|
|
26
|
+
const slotBuffers = new Map();
|
|
27
|
+
function appendAndDrainTo480(slot, newSamples) {
|
|
28
|
+
var _a;
|
|
29
|
+
const prev = (_a = slotBuffers.get(slot)) !== null && _a !== void 0 ? _a : new Int16Array(0);
|
|
30
|
+
const merged = new Int16Array(prev.length + newSamples.length);
|
|
31
|
+
merged.set(prev, 0);
|
|
32
|
+
merged.set(newSamples, prev.length);
|
|
33
|
+
let offset = 0;
|
|
34
|
+
while (merged.length - offset >= FRAME_10MS_SAMPLES) {
|
|
35
|
+
const chunk = merged.subarray(offset, offset + FRAME_10MS_SAMPLES);
|
|
36
|
+
enqueueFrame(slot, chunk);
|
|
37
|
+
offset += FRAME_10MS_SAMPLES;
|
|
38
|
+
}
|
|
39
|
+
slotBuffers.set(slot, merged.subarray(offset));
|
|
40
|
+
}
|
|
41
|
+
({
|
|
42
|
+
enqFrames: new Array(PARTICIPANT_SLOTS).fill(0),
|
|
43
|
+
enqSamples: new Array(PARTICIPANT_SLOTS).fill(0),
|
|
44
|
+
wroteFrames: new Array(PARTICIPANT_SLOTS).fill(0),
|
|
45
|
+
wroteSamples: new Array(PARTICIPANT_SLOTS).fill(0),
|
|
46
|
+
lastFramesSeen: new Array(PARTICIPANT_SLOTS).fill(0),
|
|
47
|
+
});
|
|
48
|
+
let slots = [];
|
|
49
|
+
let stopPacerFn = null;
|
|
50
|
+
let outputPacerState = null;
|
|
51
|
+
function resampleTo48kHz(inputSamples, inputSampleRate, inputFrames) {
|
|
52
|
+
const ratio = STREAM_INPUT_SAMPLE_RATE_IN_HZ / inputSampleRate;
|
|
53
|
+
const outputLength = Math.floor(inputFrames * ratio);
|
|
54
|
+
const output = new Int16Array(outputLength);
|
|
55
|
+
for (let i = 0; i < outputLength; i++) {
|
|
56
|
+
const inputIndex = i / ratio;
|
|
57
|
+
const index = Math.floor(inputIndex);
|
|
58
|
+
const fraction = inputIndex - index;
|
|
59
|
+
if (index + 1 < inputSamples.length) {
|
|
60
|
+
const sample1 = inputSamples[index];
|
|
61
|
+
const sample2 = inputSamples[index + 1];
|
|
62
|
+
output[i] = Math.round(sample1 + (sample2 - sample1) * fraction);
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
output[i] = inputSamples[Math.min(index, inputSamples.length - 1)];
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return output;
|
|
69
|
+
}
|
|
70
|
+
function enqueueOutputFrame(samples) {
|
|
71
|
+
if (outputPacerState) {
|
|
72
|
+
outputPacerState.frameQueue.push(samples);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
function startPacer(ff, slotCount, rtcAudioSource, onAudioStreamReady) {
|
|
76
|
+
if (stopPacerFn) {
|
|
77
|
+
stopPacerFn();
|
|
78
|
+
stopPacerFn = null;
|
|
79
|
+
}
|
|
80
|
+
const writers = Array.from({ length: slotCount }, (_, i) => ff.stdio[3 + i]);
|
|
81
|
+
const nowMs = () => Number(process.hrtime.bigint()) / 1e6;
|
|
82
|
+
const outputFrameMs = (FRAME_10MS_SAMPLES / STREAM_INPUT_SAMPLE_RATE_IN_HZ) * 1000;
|
|
83
|
+
const t0 = nowMs();
|
|
84
|
+
slots = Array.from({ length: slotCount }, () => ({
|
|
85
|
+
q: [],
|
|
86
|
+
lastFrames: FRAME_10MS_SAMPLES,
|
|
87
|
+
nextDueMs: t0 + (FRAME_10MS_SAMPLES / STREAM_INPUT_SAMPLE_RATE_IN_HZ) * 1000,
|
|
88
|
+
}));
|
|
89
|
+
outputPacerState = {
|
|
90
|
+
frameQueue: [],
|
|
91
|
+
nextDueMs: t0 + outputFrameMs,
|
|
92
|
+
rtcAudioSource,
|
|
93
|
+
onAudioStreamReady,
|
|
94
|
+
didEmitReadyEvent: false,
|
|
95
|
+
};
|
|
96
|
+
const iv = setInterval(() => {
|
|
97
|
+
const t = nowMs();
|
|
98
|
+
for (let s = 0; s < slotCount; s++) {
|
|
99
|
+
const st = slots[s];
|
|
100
|
+
const w = writers[s];
|
|
101
|
+
const frameMs = (st.lastFrames / STREAM_INPUT_SAMPLE_RATE_IN_HZ) * 1000;
|
|
102
|
+
if (t >= st.nextDueMs) {
|
|
103
|
+
const buf = st.q.length ? st.q.shift() : Buffer.alloc(st.lastFrames * BYTES_PER_SAMPLE);
|
|
104
|
+
if (!w.write(buf)) {
|
|
105
|
+
const late = t - st.nextDueMs;
|
|
106
|
+
const steps = Math.max(1, Math.ceil(late / frameMs));
|
|
107
|
+
st.nextDueMs += steps * frameMs;
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
const late = t - st.nextDueMs;
|
|
111
|
+
const steps = Math.max(1, Math.ceil(late / frameMs));
|
|
112
|
+
st.nextDueMs += steps * frameMs;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
if (!outputPacerState)
|
|
116
|
+
return;
|
|
117
|
+
const state = outputPacerState;
|
|
118
|
+
if (t >= state.nextDueMs) {
|
|
119
|
+
const samples = state.frameQueue.length > 0 ? state.frameQueue.shift() : new Int16Array(FRAME_10MS_SAMPLES);
|
|
120
|
+
if (!state.didEmitReadyEvent) {
|
|
121
|
+
state.onAudioStreamReady();
|
|
122
|
+
state.didEmitReadyEvent = true;
|
|
123
|
+
}
|
|
124
|
+
state.rtcAudioSource.onData({
|
|
125
|
+
samples: samples,
|
|
126
|
+
sampleRate: STREAM_INPUT_SAMPLE_RATE_IN_HZ,
|
|
127
|
+
});
|
|
128
|
+
const late = t - state.nextDueMs;
|
|
129
|
+
const steps = Math.max(1, Math.ceil(late / outputFrameMs));
|
|
130
|
+
state.nextDueMs += steps * outputFrameMs;
|
|
131
|
+
}
|
|
132
|
+
}, 5);
|
|
133
|
+
stopPacerFn = () => clearInterval(iv);
|
|
134
|
+
}
|
|
135
|
+
function stopPacer() {
|
|
136
|
+
if (stopPacerFn)
|
|
137
|
+
stopPacerFn();
|
|
138
|
+
stopPacerFn = null;
|
|
139
|
+
slots = [];
|
|
140
|
+
}
|
|
141
|
+
function enqueueFrame(slot, samples, numberOfFrames) {
|
|
142
|
+
const st = slots[slot];
|
|
143
|
+
if (!st)
|
|
144
|
+
return;
|
|
145
|
+
const buf = Buffer.from(samples.buffer, samples.byteOffset, samples.byteLength);
|
|
146
|
+
st.q.push(buf);
|
|
147
|
+
}
|
|
148
|
+
function clearSlotQueue(slot) {
|
|
149
|
+
const st = slots[slot];
|
|
150
|
+
if (st) {
|
|
151
|
+
st.q = [];
|
|
152
|
+
const now = Number(process.hrtime.bigint()) / 1e6;
|
|
153
|
+
const frameMs = (st.lastFrames / STREAM_INPUT_SAMPLE_RATE_IN_HZ) * 1000;
|
|
154
|
+
st.nextDueMs = now + frameMs;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
function getFFmpegArguments() {
|
|
158
|
+
const N = PARTICIPANT_SLOTS;
|
|
159
|
+
const SR = STREAM_INPUT_SAMPLE_RATE_IN_HZ;
|
|
160
|
+
const ffArgs = [];
|
|
161
|
+
for (let i = 0; i < N; i++) {
|
|
162
|
+
ffArgs.push("-f", "s16le", "-ar", String(SR), "-ac", "1", "-i", `pipe:${3 + i}`);
|
|
163
|
+
}
|
|
164
|
+
const pre = [];
|
|
165
|
+
for (let i = 0; i < N; i++) {
|
|
166
|
+
pre.push(`[${i}:a]aresample=async=1:first_pts=0,asetpts=N/SR/TB[a${i}]`);
|
|
167
|
+
}
|
|
168
|
+
const labels = Array.from({ length: N }, (_, i) => `[a${i}]`).join("");
|
|
169
|
+
const amix = `${labels}amix=inputs=${N}:duration=longest:dropout_transition=250:normalize=0[mix]`;
|
|
170
|
+
const filter = `${pre.join(";")};${amix}`;
|
|
171
|
+
ffArgs.push("-hide_banner", "-nostats", "-loglevel", "error", "-filter_complex", filter, "-map", "[mix]", "-f", "s16le", "-ar", String(SR), "-ac", "1", "-c:a", "pcm_s16le", "pipe:1");
|
|
172
|
+
return ffArgs;
|
|
173
|
+
}
|
|
174
|
+
function spawnFFmpegProcess(rtcAudioSource, onAudioStreamReady) {
|
|
175
|
+
const stdio = ["ignore", "pipe", "pipe", ...Array(PARTICIPANT_SLOTS).fill("pipe")];
|
|
176
|
+
const args = getFFmpegArguments();
|
|
177
|
+
const ffmpegProcess = child_process.spawn("ffmpeg", args, { stdio });
|
|
178
|
+
startPacer(ffmpegProcess, PARTICIPANT_SLOTS, rtcAudioSource, onAudioStreamReady);
|
|
179
|
+
ffmpegProcess.stderr.setEncoding("utf8");
|
|
180
|
+
ffmpegProcess.stderr.on("data", (d) => console.error("[ffmpeg]", String(d).trim()));
|
|
181
|
+
ffmpegProcess.on("error", () => console.error("FFmpeg process error: is ffmpeg installed?"));
|
|
182
|
+
let audioBuffer = Buffer.alloc(0);
|
|
183
|
+
const FRAME_SIZE_BYTES = FRAME_10MS_SAMPLES * BYTES_PER_SAMPLE;
|
|
184
|
+
ffmpegProcess.stdout.on("data", (chunk) => {
|
|
185
|
+
audioBuffer = Buffer.concat([audioBuffer, chunk]);
|
|
186
|
+
while (audioBuffer.length >= FRAME_SIZE_BYTES) {
|
|
187
|
+
const frameData = audioBuffer.subarray(0, FRAME_SIZE_BYTES);
|
|
188
|
+
const samples = new Int16Array(FRAME_10MS_SAMPLES);
|
|
189
|
+
for (let i = 0; i < FRAME_10MS_SAMPLES; i++) {
|
|
190
|
+
samples[i] = frameData.readInt16LE(i * 2);
|
|
191
|
+
}
|
|
192
|
+
enqueueOutputFrame(samples);
|
|
193
|
+
audioBuffer = audioBuffer.subarray(FRAME_SIZE_BYTES);
|
|
194
|
+
}
|
|
195
|
+
});
|
|
196
|
+
return ffmpegProcess;
|
|
197
|
+
}
|
|
198
|
+
function writeAudioDataToFFmpeg(ffmpegProcess, slot, audioTrack) {
|
|
199
|
+
const writer = ffmpegProcess.stdio[3 + slot];
|
|
200
|
+
const sink = new AudioSink(audioTrack);
|
|
201
|
+
const unsubscribe = sink.subscribe(({ samples, sampleRate: sr, channelCount: ch, bitsPerSample, numberOfFrames }) => {
|
|
202
|
+
if (ch !== 1 || bitsPerSample !== 16)
|
|
203
|
+
return;
|
|
204
|
+
let out = samples;
|
|
205
|
+
if (sr !== STREAM_INPUT_SAMPLE_RATE_IN_HZ) {
|
|
206
|
+
const resampled = resampleTo48kHz(samples, sr, numberOfFrames !== null && numberOfFrames !== void 0 ? numberOfFrames : samples.length);
|
|
207
|
+
out = resampled;
|
|
208
|
+
}
|
|
209
|
+
appendAndDrainTo480(slot, out);
|
|
210
|
+
});
|
|
211
|
+
const stop = () => {
|
|
212
|
+
try {
|
|
213
|
+
unsubscribe();
|
|
214
|
+
sink.stop();
|
|
215
|
+
}
|
|
216
|
+
catch (_a) {
|
|
217
|
+
console.error("Failed to stop AudioSink");
|
|
218
|
+
}
|
|
219
|
+
};
|
|
220
|
+
return { sink, writer, stop };
|
|
221
|
+
}
|
|
222
|
+
function stopFFmpegProcess(ffmpegProcess) {
|
|
223
|
+
stopPacer();
|
|
224
|
+
if (ffmpegProcess && !ffmpegProcess.killed) {
|
|
225
|
+
try {
|
|
226
|
+
ffmpegProcess.stdout.unpipe();
|
|
227
|
+
}
|
|
228
|
+
catch (_a) {
|
|
229
|
+
console.error("Failed to unpipe ffmpeg stdout");
|
|
230
|
+
}
|
|
231
|
+
for (let i = 0; i < PARTICIPANT_SLOTS; i++) {
|
|
232
|
+
const w = ffmpegProcess.stdio[3 + i];
|
|
233
|
+
try {
|
|
234
|
+
w.end();
|
|
235
|
+
}
|
|
236
|
+
catch (_b) {
|
|
237
|
+
console.error("Failed to end ffmpeg writable stream");
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
ffmpegProcess.kill("SIGTERM");
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
class AudioMixer extends events.EventEmitter {
|
|
245
|
+
constructor(onStreamReady) {
|
|
246
|
+
super();
|
|
247
|
+
this.ffmpegProcess = null;
|
|
248
|
+
this.combinedAudioStream = null;
|
|
249
|
+
this.rtcAudioSource = null;
|
|
250
|
+
this.participantSlots = new Map();
|
|
251
|
+
this.activeSlots = {};
|
|
252
|
+
this.setupMediaStream();
|
|
253
|
+
this.participantSlots = new Map(Array.from({ length: PARTICIPANT_SLOTS }, (_, i) => [i, ""]));
|
|
254
|
+
this.onStreamReady = onStreamReady;
|
|
255
|
+
}
|
|
256
|
+
setupMediaStream() {
|
|
257
|
+
this.rtcAudioSource = new wrtc.nonstandard.RTCAudioSource();
|
|
258
|
+
const audioTrack = this.rtcAudioSource.createTrack();
|
|
259
|
+
this.combinedAudioStream = new wrtc.MediaStream([audioTrack]);
|
|
260
|
+
}
|
|
261
|
+
getCombinedAudioStream() {
|
|
262
|
+
return this.combinedAudioStream;
|
|
263
|
+
}
|
|
264
|
+
handleRemoteParticipants(participants) {
|
|
265
|
+
if (participants.length === 0) {
|
|
266
|
+
this.stopAudioMixer();
|
|
267
|
+
return;
|
|
268
|
+
}
|
|
269
|
+
if (!this.ffmpegProcess && this.rtcAudioSource) {
|
|
270
|
+
this.ffmpegProcess = spawnFFmpegProcess(this.rtcAudioSource, this.onStreamReady);
|
|
271
|
+
}
|
|
272
|
+
for (const p of participants)
|
|
273
|
+
this.attachParticipantIfNeeded(p);
|
|
274
|
+
const liveIds = new Set(participants.map((p) => p.id).filter(Boolean));
|
|
275
|
+
for (const [slot, pid] of this.participantSlots) {
|
|
276
|
+
if (pid && !liveIds.has(pid))
|
|
277
|
+
this.detachParticipant(pid);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
stopAudioMixer() {
|
|
281
|
+
if (this.ffmpegProcess) {
|
|
282
|
+
stopFFmpegProcess(this.ffmpegProcess);
|
|
283
|
+
this.ffmpegProcess = null;
|
|
284
|
+
}
|
|
285
|
+
this.participantSlots = new Map(Array.from({ length: PARTICIPANT_SLOTS }, (_, i) => [i, ""]));
|
|
286
|
+
this.activeSlots = {};
|
|
287
|
+
this.setupMediaStream();
|
|
288
|
+
}
|
|
289
|
+
slotForParticipant(participantId) {
|
|
290
|
+
var _a;
|
|
291
|
+
const found = (_a = [...this.participantSlots.entries()].find(([, id]) => id === participantId)) === null || _a === void 0 ? void 0 : _a[0];
|
|
292
|
+
return found === undefined ? null : found;
|
|
293
|
+
}
|
|
294
|
+
acquireSlot(participantId) {
|
|
295
|
+
var _a;
|
|
296
|
+
const existing = this.slotForParticipant(participantId);
|
|
297
|
+
if (existing !== null)
|
|
298
|
+
return existing;
|
|
299
|
+
const empty = (_a = [...this.participantSlots.entries()].find(([, id]) => id === "")) === null || _a === void 0 ? void 0 : _a[0];
|
|
300
|
+
if (empty === undefined)
|
|
301
|
+
return null;
|
|
302
|
+
this.participantSlots.set(empty, participantId);
|
|
303
|
+
return empty;
|
|
304
|
+
}
|
|
305
|
+
attachParticipantIfNeeded(participant) {
|
|
306
|
+
var _a;
|
|
307
|
+
const { id: participantId, stream: participantStream, isAudioEnabled } = participant;
|
|
308
|
+
if (!participantId)
|
|
309
|
+
return;
|
|
310
|
+
if (!participantStream || !isAudioEnabled) {
|
|
311
|
+
this.detachParticipant(participantId);
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
const audioTrack = participantStream.getTracks().find((t) => t.kind === "audio");
|
|
315
|
+
if (!audioTrack) {
|
|
316
|
+
this.detachParticipant(participantId);
|
|
317
|
+
return;
|
|
318
|
+
}
|
|
319
|
+
const slot = this.acquireSlot(participantId);
|
|
320
|
+
if (slot === null)
|
|
321
|
+
return;
|
|
322
|
+
const existing = this.activeSlots[slot];
|
|
323
|
+
if (existing && existing.trackId === audioTrack.id)
|
|
324
|
+
return;
|
|
325
|
+
if (existing) {
|
|
326
|
+
try {
|
|
327
|
+
existing.stop();
|
|
328
|
+
}
|
|
329
|
+
catch (e) {
|
|
330
|
+
console.error("Failed to stop existing audio track", { error: e });
|
|
331
|
+
}
|
|
332
|
+
this.activeSlots[slot] = undefined;
|
|
333
|
+
}
|
|
334
|
+
const { sink, writer, stop } = writeAudioDataToFFmpeg(this.ffmpegProcess, slot, audioTrack);
|
|
335
|
+
this.activeSlots[slot] = { sink, writer, stop, trackId: audioTrack.id };
|
|
336
|
+
(_a = audioTrack.addEventListener) === null || _a === void 0 ? void 0 : _a.call(audioTrack, "ended", () => this.detachParticipant(participantId));
|
|
337
|
+
}
|
|
338
|
+
detachParticipant(participantId) {
|
|
339
|
+
const slot = this.slotForParticipant(participantId);
|
|
340
|
+
if (slot === null)
|
|
341
|
+
return;
|
|
342
|
+
const binding = this.activeSlots[slot];
|
|
343
|
+
if (binding) {
|
|
344
|
+
try {
|
|
345
|
+
binding.stop();
|
|
346
|
+
}
|
|
347
|
+
catch (e) {
|
|
348
|
+
console.error("Failed to stop existing audio track", { error: e });
|
|
349
|
+
}
|
|
350
|
+
this.activeSlots[slot] = undefined;
|
|
351
|
+
}
|
|
352
|
+
clearSlotQueue(slot);
|
|
353
|
+
this.participantSlots.set(slot, "");
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
exports.AudioMixer = AudioMixer;
|
package/dist/tools.d.ts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { EventEmitter } from 'events';
|
|
2
|
+
import { RemoteParticipantState } from '@whereby.com/core';
|
|
3
|
+
|
|
4
|
+
declare class AudioMixer extends EventEmitter {
|
|
5
|
+
private ffmpegProcess;
|
|
6
|
+
private combinedAudioStream;
|
|
7
|
+
private rtcAudioSource;
|
|
8
|
+
private participantSlots;
|
|
9
|
+
private activeSlots;
|
|
10
|
+
private onStreamReady;
|
|
11
|
+
constructor(onStreamReady: () => void);
|
|
12
|
+
private setupMediaStream;
|
|
13
|
+
getCombinedAudioStream(): MediaStream | null;
|
|
14
|
+
handleRemoteParticipants(participants: RemoteParticipantState[]): void;
|
|
15
|
+
stopAudioMixer(): void;
|
|
16
|
+
private slotForParticipant;
|
|
17
|
+
private acquireSlot;
|
|
18
|
+
private attachParticipantIfNeeded;
|
|
19
|
+
private detachParticipant;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export { AudioMixer };
|
package/package.json
CHANGED
|
@@ -2,11 +2,12 @@
|
|
|
2
2
|
"name": "@whereby.com/assistant-sdk",
|
|
3
3
|
"description": "Assistant SDK for whereby.com",
|
|
4
4
|
"author": "Whereby AS",
|
|
5
|
-
"version": "0.0.0-canary-
|
|
5
|
+
"version": "0.0.0-canary-20250911141956",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"files": [
|
|
8
8
|
"dist",
|
|
9
|
-
"polyfills/package.json"
|
|
9
|
+
"polyfills/package.json",
|
|
10
|
+
"tools/package.json"
|
|
10
11
|
],
|
|
11
12
|
"publishConfig": {
|
|
12
13
|
"access": "public"
|
|
@@ -33,23 +34,33 @@
|
|
|
33
34
|
"types": "./dist/polyfills.d.ts",
|
|
34
35
|
"default": "./dist/polyfills.cjs"
|
|
35
36
|
}
|
|
37
|
+
},
|
|
38
|
+
"./tools": {
|
|
39
|
+
"import": {
|
|
40
|
+
"types": "./dist/tools.d.ts",
|
|
41
|
+
"default": "./dist/tools.cjs"
|
|
42
|
+
},
|
|
43
|
+
"require": {
|
|
44
|
+
"types": "./dist/tools.d.ts",
|
|
45
|
+
"default": "./dist/tools.cjs"
|
|
46
|
+
}
|
|
36
47
|
}
|
|
37
48
|
},
|
|
38
49
|
"devDependencies": {
|
|
39
50
|
"eslint": "^9.29.0",
|
|
40
51
|
"prettier": "^3.5.3",
|
|
41
52
|
"typescript": "^5.8.3",
|
|
42
|
-
"@whereby.com/rollup-config": "0.1.0",
|
|
43
|
-
"@whereby.com/prettier-config": "0.1.0",
|
|
44
|
-
"@whereby.com/jest-config": "0.1.0",
|
|
45
53
|
"@whereby.com/eslint-config": "0.1.0",
|
|
54
|
+
"@whereby.com/jest-config": "0.1.0",
|
|
55
|
+
"@whereby.com/prettier-config": "0.1.0",
|
|
56
|
+
"@whereby.com/rollup-config": "0.1.0",
|
|
46
57
|
"@whereby.com/tsconfig": "0.1.0"
|
|
47
58
|
},
|
|
48
59
|
"dependencies": {
|
|
49
60
|
"@roamhq/wrtc": "github:whereby/node-webrtc#patch/rtc_audio_source",
|
|
50
61
|
"uuid": "^11.0.3",
|
|
51
62
|
"ws": "^8.18.0",
|
|
52
|
-
"@whereby.com/core": "0.0.0-canary-
|
|
63
|
+
"@whereby.com/core": "0.0.0-canary-20250911141956"
|
|
53
64
|
},
|
|
54
65
|
"prettier": "@whereby.com/prettier-config",
|
|
55
66
|
"scripts": {
|