com.adrenak.univoice 4.7.0 → 4.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +37 -25
- package/Runtime/Adrenak.UniVoice.Runtime.asmdef +2 -1
- package/Runtime/Common/WavFileWriter.cs +124 -0
- package/Runtime/Common/WavFileWriter.cs.meta +11 -0
- package/Runtime/Impl/Networks/FishNet/FishNetBroadcast.cs +22 -0
- package/Runtime/Impl/Networks/FishNet/FishNetBroadcast.cs.meta +2 -0
- package/Runtime/Impl/Networks/FishNet/FishNetBroadcastTags.cs +15 -0
- package/Runtime/Impl/Networks/FishNet/FishNetBroadcastTags.cs.meta +2 -0
- package/Runtime/Impl/Networks/FishNet/FishNetClient.cs +197 -0
- package/Runtime/Impl/Networks/FishNet/FishNetClient.cs.meta +2 -0
- package/Runtime/Impl/Networks/FishNet/FishNetServer.cs +236 -0
- package/Runtime/Impl/Networks/FishNet/FishNetServer.cs.meta +2 -0
- package/Runtime/Impl/Networks/FishNet.meta +3 -0
- package/Runtime/Impl/Networks/Mirror/MirrorClient.cs +1 -1
- package/Runtime/Impl/Networks/Mirror/MirrorMessage.cs +1 -1
- package/Runtime/Impl/Networks/Mirror/MirrorMessageTags.cs +1 -1
- package/Runtime/Impl/Networks/Mirror/MirrorModeObserver.cs +1 -1
- package/Runtime/Impl/Networks/Mirror/MirrorServer.cs +1 -1
- package/Samples~/Basic Setup Scripts/FishNet-SinglePrefabObjects.asset +15 -0
- package/Samples~/Basic Setup Scripts/FishNet-SinglePrefabObjects.asset.meta +8 -0
- package/Samples~/Basic Setup Scripts/UniVoiceFishNetSetupSample.cs +199 -0
- package/Samples~/Basic Setup Scripts/UniVoiceFishNetSetupSample.cs.meta +11 -0
- package/Samples~/Basic Setup Scripts/UniVoiceFishNetSetupSample.unity +335 -0
- package/Samples~/Basic Setup Scripts/UniVoiceFishNetSetupSample.unity.meta +7 -0
- package/Samples~/Basic Setup Scripts/UniVoiceMirrorSetupSample.cs +2 -2
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,25 +1,13 @@
|
|
|
1
1
|
# UniVoice
|
|
2
2
|
UniVoice is a voice chat/VoIP solution for Unity.
|
|
3
3
|
|
|
4
|
-
Some features of UniVoice:
|
|
5
|
-
- 👥 Group voice chat. Multiple peers can join a chatroom and exchange audio.
|
|
6
|
-
|
|
7
|
-
- ⚙ Fine control over audio data flow.
|
|
8
|
-
* Don't want to listen to a peer? Mute them. Don't want someone listening to you? Deafen them.
|
|
9
|
-
* Group players using tags and control audio flow between them. For example:
|
|
10
|
-
- "red", "blue" and "spectator" tags for two teams playing against each other.
|
|
11
|
-
- Red and Blue teams can only hear each other
|
|
12
|
-
- Spectators can hear everyone
|
|
13
|
-
- clients with "contestant", "judge" and "audience" tags for a virtual talent show.
|
|
14
|
-
- Contestant can be heard by everyone, but don't hear anyone else (for focus)
|
|
15
|
-
- Judges can talk to and hear each other for discussions. They can hear the contestant. But not the audience (for less noise)
|
|
16
|
-
- Audience can hear and talk to each other. They can hear the performer. But they cannot hear the judges.
|
|
17
|
-
|
|
4
|
+
Some features of UniVoice:
|
|
18
5
|
- 🎨 Customize your audio input, output and networking layers.
|
|
19
6
|
* 🌐 __Configurable Network__:
|
|
20
7
|
- UniVoice is networking agnostic. Implement the `IAudioClient` and `IAudioServer` interfaces using the networking plugin of your choice to have it send audio data over any networking solution.
|
|
21
8
|
- Built-in support for:
|
|
22
|
-
- Mirror networking
|
|
9
|
+
- [Mirror networking](https://mirror-networking.com/)
|
|
10
|
+
- [Fish Networking](https://fish-networking.gitbook.io/docs)
|
|
23
11
|
|
|
24
12
|
* 🎤 __Configurable Audio Input__:
|
|
25
13
|
- UniVoice is audio input agnostic. You can change the source of outgoing audio by implementing the `IAudioInput` interface.
|
|
@@ -37,6 +25,22 @@ Some features of UniVoice:
|
|
|
37
25
|
- Opus (Concentus) encoding & decoding.
|
|
38
26
|
- RNNoise based noise removal.
|
|
39
27
|
- Gaussian blurring for minor denoising.
|
|
28
|
+
|
|
29
|
+
- 👥 Easy integration with your existing networking solution
|
|
30
|
+
- Whether you're using Mirror or FishNet, UniVoice runs in the background in sync with your networking lifecycle
|
|
31
|
+
- A basic integration involves just initializing it on start.
|
|
32
|
+
- For advanced usage like teams, chatrooms, lobbies, you can use the UniVoice API to create runtime behaviour.
|
|
33
|
+
|
|
34
|
+
- ⚙ Fine control over audio data flow.
|
|
35
|
+
* Don't want to listen to a peer? Mute them. Don't want someone listening to you? Deafen them.
|
|
36
|
+
* Group players using tags and control audio flow between them. For example:
|
|
37
|
+
- "red", "blue" and "spectator" tags for two teams playing against each other.
|
|
38
|
+
- Red and Blue teams can only hear each other
|
|
39
|
+
- Spectators can hear everyone
|
|
40
|
+
- clients with "contestant", "judge" and "audience" tags for a virtual talent show.
|
|
41
|
+
- Contestant can be heard by everyone, but don't hear anyone else (for focus)
|
|
42
|
+
- Judges can talk to and hear each other for discussions. They can hear the contestant. But not the audience (for less noise)
|
|
43
|
+
- Audience can hear and talk to each other. They can hear the performer. But they cannot hear the judges.
|
|
40
44
|
|
|
41
45
|
## Installation
|
|
42
46
|
⚠️ [OpenUPM](https://openupm.com/packages/com.adrenak.univoice/?subPage=versions) may not have up to date releases. Install using NPM registry instead 👇
|
|
@@ -62,19 +66,18 @@ Then add `com.adrenak.univoice:x.y.z` to the `dependencies` in your `manifest.js
|
|
|
62
66
|
## Useful links
|
|
63
67
|
* API reference is available here: http://www.vatsalambastha.com/univoice
|
|
64
68
|
* UniVoice blog: https://blog.vatsalambastha.com/search/label/univoice
|
|
65
|
-
* Discord server: https://discord.gg/
|
|
69
|
+
* Discord server: https://discord.gg/NGvkEVbdjQ
|
|
66
70
|
|
|
67
71
|
## Integration
|
|
68
72
|
UniVoice isn't currently very drag-and-drop/low-code. The best way to integrate is to have some code perform a one time setup when your app starts and provides access to relevant objects that you can use throughout the rest of the apps runtime.
|
|
69
73
|
|
|
70
|
-
An example of this is the `UniVoiceMirrorSetupSample.cs` file that gives you access to an AudioServer that you can use in your server code and a ClientSession that you can use in your client code. For more see the "Samples" section below.
|
|
71
|
-
|
|
72
74
|
## Samples
|
|
73
75
|
This repository contains two samples:
|
|
74
|
-
* `UniVoiceMirrorSetupSample.cs` is a drag and drop component, a simple integration sample script. You can add it to your Mirror NetworkManager to get voice chat to work. No code required, it's as simple as that! It'll work as long as you have setup your project properly. For more instructions see the top of the `UniVoiceMirrorSetupSample.cs` file.
|
|
75
|
-
*
|
|
76
|
+
* `UniVoiceMirrorSetupSample.cs` is a drag and drop component, a simple integration sample script. You can add it to your Mirror NetworkManager to get voice chat to work. No code required, it's as simple as that! It'll work as long as you have setup your project properly. For more instructions see the top of the `UniVoiceMirrorSetupSample.cs` file.
|
|
77
|
+
* `UniVoiceFishNetSetypSample.cs` is also very similar. Just drag and drop and it should work!
|
|
78
|
+
* A sample scene that shows the other clients in a UI as well as allows you to mute yourself/them. This sample is Mirror based.
|
|
76
79
|
|
|
77
|
-
> UniVoice currently only supports Mirror out of the box.
|
|
80
|
+
> UniVoice currently only supports Mirror and FishNetworking out of the box. Follow the instructions in the "Activating non-packaged dependencies" section below before trying it out the samples.
|
|
78
81
|
|
|
79
82
|
## Dependencies
|
|
80
83
|
[com.adrenak.brw](https://www.github.com/adrenak/brw) for reading and writing messages for communication. See `MirrorServer.cs` and `MirrorClient.cs` where they're used.
|
|
@@ -90,19 +93,28 @@ UniVoice includes and installs the dependencies mentioned above along with itsel
|
|
|
90
93
|
* Mic audio capture input (via UniMic)
|
|
91
94
|
* AudioSource based playback output (via UniMic)
|
|
92
95
|
|
|
93
|
-
|
|
94
|
-
* Mirror network:
|
|
95
|
-
* To enable, ensure the Mirror package is in your project and add `UNIVOICE_NETWORK_MIRROR` to activate it
|
|
96
|
+
UniVoice has code that uses dependencies that you have to install and sometimes enable via compilation symbols as they are _not_ UniVoice dependencies and _don't_ get installed along with UniVoice. This is because they are either third party modules or based on native libraries (not plain C#) that can pose build issues.
|
|
96
97
|
* RNNoise Noise removal filter:
|
|
97
98
|
* To enable, ensure the [RNNoise4Unity](https://github.com/adrenak/RNNoise4Unity) package is in your project and add `UNIVOICE_FILTER_RNNOISE4UNITY` to activate it
|
|
99
|
+
* Mirror network:
|
|
100
|
+
* Just add the Mirror package to your project. UniVoice will detect it.
|
|
101
|
+
* Fish Networking:
|
|
102
|
+
* Just install FishNet package in your project. UniVoice will detect it.
|
|
98
103
|
|
|
99
104
|
## License and Support
|
|
100
105
|
This project is under the [MIT license](https://github.com/adrenak/univoice/blob/master/LICENSE).
|
|
101
106
|
|
|
102
107
|
Community contributions are welcome.
|
|
108
|
+
|
|
109
|
+
Commercial engagements with the author can be arranged, subject to schedule and availability.
|
|
110
|
+
|
|
111
|
+
## Acknowledgements and contributors
|
|
112
|
+
* [@metater](https://github.com/Metater/) for helping make improvements to audio streaming quality. [A related blog post](https://blog.vatsalambastha.com/2025/07/unimic-330-many-streamedaudiosource.html)
|
|
113
|
+
* [@FrantisekHolubec](https://github.com/FrantisekHolubec) for [FishNet support code](https://github.com/adrenak/univoice/commit/fdc3424180d8991c92b3e092b3edb50b6110c863). Here's a [related blog post](https://blog.vatsalambastha.com/2025/09/univoice-480-fishnet-support.html)
|
|
114
|
+
* [Masaryk University](https://www.muni.cz/en) for using UniVoice in their projects and providing helpful feedback
|
|
103
115
|
|
|
104
116
|
## Contact
|
|
105
|
-
The
|
|
117
|
+
The author can be reached at the following links:
|
|
106
118
|
|
|
107
119
|
[Website](http://www.vatsalambastha.com)
|
|
108
120
|
[LinkedIn](https://www.linkedin.com/in/vatsalAmbastha)
|
|
@@ -6,7 +6,8 @@
|
|
|
6
6
|
"GUID:f87ecb857e752164ab814a3de8eb0262",
|
|
7
7
|
"GUID:b118fd5a40c85ad4e9b38e8c4a42bbb1",
|
|
8
8
|
"GUID:4653938bfdb5cf8409322ce17219d5f7",
|
|
9
|
-
"GUID:30817c1a0e6d646d99c048fc403f5979"
|
|
9
|
+
"GUID:30817c1a0e6d646d99c048fc403f5979",
|
|
10
|
+
"GUID:7c88a4a7926ee5145ad2dfa06f454c67"
|
|
10
11
|
],
|
|
11
12
|
"includePlatforms": [],
|
|
12
13
|
"excludePlatforms": [],
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
using System;
|
|
2
|
+
using System.IO;
|
|
3
|
+
using System.Text;
|
|
4
|
+
|
|
5
|
+
/*
|
|
6
|
+
Example script for usage with UniMic. Just add it to a scene,
|
|
7
|
+
play and then quit/exit play mode:
|
|
8
|
+
|
|
9
|
+
public class WavFileWriterTest : MonoBehaviour {
|
|
10
|
+
WavFileWriter writer;
|
|
11
|
+
|
|
12
|
+
void Start() {
|
|
13
|
+
string path = string.Empty;
|
|
14
|
+
if (Application.isEditor)
|
|
15
|
+
path = Application.dataPath.Replace("Assets", "output.wav");
|
|
16
|
+
else
|
|
17
|
+
path = Path.Combine(Application.persistentDataPath, "output.wav");
|
|
18
|
+
|
|
19
|
+
writer = new WavFileWriter(path);
|
|
20
|
+
|
|
21
|
+
Mic.Init();
|
|
22
|
+
|
|
23
|
+
Mic.AvailableDevices[0].OnFrameCollected += OnFrameCollected;
|
|
24
|
+
Mic.AvailableDevices[0].StartRecording(20);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
private void OnFrameCollected(int arg1, int arg2, float[] arg3) {
|
|
28
|
+
writer.Write(arg1, arg2, arg3);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
private void OnDestroy() {
|
|
32
|
+
writer.Dispose();
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
*/
|
|
36
|
+
|
|
37
|
+
namespace Adrenak.UniVoice {
|
|
38
|
+
/// <summary>
|
|
39
|
+
/// A utility to write audio samples to a file on disk.
|
|
40
|
+
/// Construct using the path you want to store the audio file at.
|
|
41
|
+
/// Invoke Write with the sampling frequency, channel count and PCM samples
|
|
42
|
+
/// and it will lazily initialize.
|
|
43
|
+
/// </summary>
|
|
44
|
+
public class WavFileWriter : IDisposable {
|
|
45
|
+
FileStream fileStream;
|
|
46
|
+
int sampleRate;
|
|
47
|
+
short channels;
|
|
48
|
+
readonly short bitsPerSample = 16;
|
|
49
|
+
|
|
50
|
+
long dataChunkPos;
|
|
51
|
+
int totalSampleCount = 0;
|
|
52
|
+
bool isInitialized = false;
|
|
53
|
+
readonly string path;
|
|
54
|
+
|
|
55
|
+
public WavFileWriter(string path) {
|
|
56
|
+
this.path = path;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
public void Write(int frequency, int channelCount, float[] samples) {
|
|
60
|
+
if (!isInitialized) {
|
|
61
|
+
sampleRate = frequency;
|
|
62
|
+
channels = (short)channelCount;
|
|
63
|
+
fileStream = new FileStream(path, FileMode.Create, FileAccess.Write);
|
|
64
|
+
WriteWavHeader();
|
|
65
|
+
isInitialized = true;
|
|
66
|
+
}
|
|
67
|
+
else {
|
|
68
|
+
if (frequency != sampleRate || channelCount != channels)
|
|
69
|
+
throw new InvalidOperationException("Inconsistent frequency or channel count between calls.");
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
byte[] buffer = new byte[samples.Length * 2]; // 2 bytes per sample (16-bit PCM)
|
|
73
|
+
for (int i = 0; i < samples.Length; i++) {
|
|
74
|
+
short intSample = (short)Math.Clamp(samples[i] * short.MaxValue, short.MinValue, short.MaxValue);
|
|
75
|
+
buffer[i * 2] = (byte)(intSample & 0xff);
|
|
76
|
+
buffer[i * 2 + 1] = (byte)((intSample >> 8) & 0xff);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
fileStream.Write(buffer, 0, buffer.Length);
|
|
80
|
+
totalSampleCount += samples.Length;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
void WriteWavHeader() {
|
|
84
|
+
var writer = new BinaryWriter(fileStream, Encoding.ASCII, true);
|
|
85
|
+
|
|
86
|
+
writer.Write(Encoding.ASCII.GetBytes("RIFF"));
|
|
87
|
+
writer.Write(0); // placeholder for file size
|
|
88
|
+
writer.Write(Encoding.ASCII.GetBytes("WAVE"));
|
|
89
|
+
|
|
90
|
+
// fmt chunk
|
|
91
|
+
writer.Write(Encoding.ASCII.GetBytes("fmt "));
|
|
92
|
+
writer.Write(16); // PCM header size
|
|
93
|
+
writer.Write((short)1); // PCM format
|
|
94
|
+
writer.Write(channels);
|
|
95
|
+
writer.Write(sampleRate);
|
|
96
|
+
int byteRate = sampleRate * channels * bitsPerSample / 8;
|
|
97
|
+
writer.Write(byteRate);
|
|
98
|
+
short blockAlign = (short)(channels * bitsPerSample / 8);
|
|
99
|
+
writer.Write(blockAlign);
|
|
100
|
+
writer.Write(bitsPerSample);
|
|
101
|
+
|
|
102
|
+
// data chunk
|
|
103
|
+
writer.Write(Encoding.ASCII.GetBytes("data"));
|
|
104
|
+
dataChunkPos = fileStream.Position;
|
|
105
|
+
writer.Write(0); // placeholder for data chunk size
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
public void Dispose() {
|
|
109
|
+
if (!isInitialized) return;
|
|
110
|
+
|
|
111
|
+
long dataSize = totalSampleCount * bitsPerSample / 8;
|
|
112
|
+
|
|
113
|
+
fileStream.Seek(dataChunkPos, SeekOrigin.Begin);
|
|
114
|
+
fileStream.Write(BitConverter.GetBytes((int)dataSize), 0, 4);
|
|
115
|
+
|
|
116
|
+
fileStream.Seek(4, SeekOrigin.Begin);
|
|
117
|
+
int fileSize = (int)(fileStream.Length - 8);
|
|
118
|
+
fileStream.Write(BitConverter.GetBytes(fileSize), 0, 4);
|
|
119
|
+
|
|
120
|
+
fileStream.Dispose();
|
|
121
|
+
isInitialized = false;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
#if FISHNET
|
|
2
|
+
using System;
|
|
3
|
+
using FishNet.Broadcast;
|
|
4
|
+
|
|
5
|
+
namespace Adrenak.UniVoice.Networks
|
|
6
|
+
{
|
|
7
|
+
/// <summary>
|
|
8
|
+
/// The messages exchanged between the server and client.
|
|
9
|
+
/// To see how the Mirror implementation of UniVoice uses this struct
|
|
10
|
+
/// find the references to the <see cref="data"/> object in the project.
|
|
11
|
+
/// The gist is, it uses BRW (https://www.github.com/adrenak/brw) to
|
|
12
|
+
/// write and read data. The data always starts with a tag. All the tags
|
|
13
|
+
/// used for this UniVoice FishNet implementation are available in
|
|
14
|
+
/// <see cref="FishNetBroadcastTags"/>
|
|
15
|
+
/// </summary>
|
|
16
|
+
[Serializable]
|
|
17
|
+
public struct FishNetBroadcast : IBroadcast
|
|
18
|
+
{
|
|
19
|
+
public byte[] data;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
#endif
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
#if FISHNET
|
|
2
|
+
namespace Adrenak.UniVoice.Networks
|
|
3
|
+
{
|
|
4
|
+
/// <summary>
|
|
5
|
+
/// The different types of messages we send over FishNet
|
|
6
|
+
/// to implement the <see cref="IAudioClient{T}"/> and <see cref="IAudioServer{T}"/>
|
|
7
|
+
/// interfaces for FishNet
|
|
8
|
+
/// </summary>
|
|
9
|
+
public class FishNetBroadcastTags
|
|
10
|
+
{
|
|
11
|
+
public const string AUDIO_FRAME = "AUDIO_FRAME";
|
|
12
|
+
public const string VOICE_SETTINGS = "VOICE_SETTINGS";
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
#endif
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
#if FISHNET
|
|
2
|
+
using System;
|
|
3
|
+
using System.Collections.Generic;
|
|
4
|
+
using System.Linq;
|
|
5
|
+
using Adrenak.BRW;
|
|
6
|
+
using FishNet;
|
|
7
|
+
using FishNet.Managing;
|
|
8
|
+
using FishNet.Transporting;
|
|
9
|
+
using UnityEngine;
|
|
10
|
+
|
|
11
|
+
namespace Adrenak.UniVoice.Networks
|
|
12
|
+
{
|
|
13
|
+
/// <summary>
|
|
14
|
+
/// This is the implementation of <see cref="IAudioClient{T}"/> interface for FishNet.
|
|
15
|
+
/// It uses the FishNet to send and receive UniVoice data to the server.
|
|
16
|
+
/// </summary>
|
|
17
|
+
public class FishNetClient : IAudioClient<int>
|
|
18
|
+
{
|
|
19
|
+
private const string TAG = "[FishNetClient]";
|
|
20
|
+
public int ID { get; private set; } = -1;
|
|
21
|
+
|
|
22
|
+
public List<int> PeerIDs { get; private set; }
|
|
23
|
+
public VoiceSettings YourVoiceSettings { get; private set; }
|
|
24
|
+
|
|
25
|
+
public event Action<int, List<int>> OnJoined;
|
|
26
|
+
public event Action OnLeft;
|
|
27
|
+
public event Action<int> OnPeerJoined;
|
|
28
|
+
public event Action<int> OnPeerLeft;
|
|
29
|
+
public event Action<int, AudioFrame> OnReceivedPeerAudioFrame;
|
|
30
|
+
|
|
31
|
+
private NetworkManager _networkManager;
|
|
32
|
+
|
|
33
|
+
public FishNetClient()
|
|
34
|
+
{
|
|
35
|
+
PeerIDs = new List<int>();
|
|
36
|
+
YourVoiceSettings = new VoiceSettings();
|
|
37
|
+
|
|
38
|
+
_networkManager = InstanceFinder.NetworkManager;
|
|
39
|
+
_networkManager.ClientManager.OnClientConnectionState += OnClientConnectionStateChanged;
|
|
40
|
+
_networkManager.ClientManager.OnAuthenticated += OnClientAuthenticated;
|
|
41
|
+
_networkManager.ClientManager.OnRemoteConnectionState += OnRemoteConnectionStateChanged;
|
|
42
|
+
_networkManager.ClientManager.RegisterBroadcast<FishNetBroadcast>(OnReceivedMessage);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
public void Dispose()
|
|
46
|
+
{
|
|
47
|
+
if (_networkManager)
|
|
48
|
+
{
|
|
49
|
+
_networkManager.ClientManager.OnClientConnectionState -= OnClientConnectionStateChanged;
|
|
50
|
+
_networkManager.ClientManager.OnAuthenticated -= OnClientAuthenticated;
|
|
51
|
+
_networkManager.ClientManager.OnRemoteConnectionState -= OnRemoteConnectionStateChanged;
|
|
52
|
+
_networkManager.ClientManager.UnregisterBroadcast<FishNetBroadcast>(OnReceivedMessage);
|
|
53
|
+
}
|
|
54
|
+
PeerIDs.Clear();
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
private void OnRemoteConnectionStateChanged(RemoteConnectionStateArgs args)
|
|
58
|
+
{
|
|
59
|
+
// Don't process connection state changes before the client is authenticated
|
|
60
|
+
if (_networkManager.ClientManager.Connection.ClientId < 0)
|
|
61
|
+
return;
|
|
62
|
+
|
|
63
|
+
if (args.ConnectionState == RemoteConnectionState.Started)
|
|
64
|
+
{
|
|
65
|
+
var newPeerID = args.ConnectionId;
|
|
66
|
+
if (!PeerIDs.Contains(newPeerID))
|
|
67
|
+
{
|
|
68
|
+
PeerIDs.Add(newPeerID);
|
|
69
|
+
Debug.unityLogger.Log(LogType.Log, TAG,
|
|
70
|
+
$"Peer {newPeerID} joined. Peer list is now {string.Join(", ", PeerIDs)}");
|
|
71
|
+
OnPeerJoined?.Invoke(newPeerID);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
else if (args.ConnectionState == RemoteConnectionState.Stopped)
|
|
75
|
+
{
|
|
76
|
+
var leftPeerID = args.ConnectionId;
|
|
77
|
+
if (PeerIDs.Contains(leftPeerID))
|
|
78
|
+
{
|
|
79
|
+
PeerIDs.Remove(leftPeerID);
|
|
80
|
+
var log2 = $"Peer {leftPeerID} left. ";
|
|
81
|
+
if (PeerIDs.Count == 0)
|
|
82
|
+
log2 += "There are no peers anymore.";
|
|
83
|
+
else
|
|
84
|
+
log2 += $"Peer list is now {string.Join(", ", PeerIDs)}";
|
|
85
|
+
|
|
86
|
+
Debug.unityLogger.Log(LogType.Log, TAG, log2);
|
|
87
|
+
OnPeerLeft?.Invoke(leftPeerID);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
private void OnClientAuthenticated()
|
|
93
|
+
{
|
|
94
|
+
// We need to use OnClientAuthenticated to ensure the client does have ClientId set
|
|
95
|
+
ID = _networkManager.ClientManager.Connection.ClientId;
|
|
96
|
+
PeerIDs = _networkManager.ClientManager.Clients.Keys.Where(x => x != ID).ToList();
|
|
97
|
+
|
|
98
|
+
var log = $"Initialized with ID {ID}. ";
|
|
99
|
+
if (PeerIDs.Count > 0)
|
|
100
|
+
log += $"Peer list: {string.Join(", ", PeerIDs)}";
|
|
101
|
+
else
|
|
102
|
+
log += "There are currently no peers.";
|
|
103
|
+
Debug.unityLogger.Log(LogType.Log, TAG, log);
|
|
104
|
+
|
|
105
|
+
OnJoined?.Invoke(ID, PeerIDs);
|
|
106
|
+
foreach (var peerId in PeerIDs)
|
|
107
|
+
OnPeerJoined?.Invoke(peerId);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
private void OnClientConnectionStateChanged(ClientConnectionStateArgs args)
|
|
111
|
+
{
|
|
112
|
+
// We check only for the stopped state here, as the started state is handled in OnClientAuthenticated
|
|
113
|
+
if (args.ConnectionState == LocalConnectionState.Stopped)
|
|
114
|
+
{
|
|
115
|
+
YourVoiceSettings = new VoiceSettings();
|
|
116
|
+
var oldPeerIds = PeerIDs.ToList();
|
|
117
|
+
PeerIDs.Clear();
|
|
118
|
+
ID = -1;
|
|
119
|
+
foreach (var peerId in oldPeerIds)
|
|
120
|
+
OnPeerLeft?.Invoke(peerId);
|
|
121
|
+
OnLeft?.Invoke();
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
private void OnReceivedMessage(FishNetBroadcast msg, Channel channel)
|
|
126
|
+
{
|
|
127
|
+
var reader = new BytesReader(msg.data);
|
|
128
|
+
var tag = reader.ReadString();
|
|
129
|
+
switch (tag)
|
|
130
|
+
{
|
|
131
|
+
// When the server sends audio from a peer meant for this client
|
|
132
|
+
case FishNetBroadcastTags.AUDIO_FRAME:
|
|
133
|
+
var sender = reader.ReadInt();
|
|
134
|
+
if (sender == ID || !PeerIDs.Contains(sender))
|
|
135
|
+
return;
|
|
136
|
+
var frame = new AudioFrame
|
|
137
|
+
{
|
|
138
|
+
timestamp = reader.ReadLong(),
|
|
139
|
+
frequency = reader.ReadInt(),
|
|
140
|
+
channelCount = reader.ReadInt(),
|
|
141
|
+
samples = reader.ReadByteArray()
|
|
142
|
+
};
|
|
143
|
+
OnReceivedPeerAudioFrame?.Invoke(sender, frame);
|
|
144
|
+
break;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/// <summary>
|
|
149
|
+
/// Sends an audio frame captured on this client to the server
|
|
150
|
+
/// </summary>
|
|
151
|
+
/// <param name="frame"></param>
|
|
152
|
+
public void SendAudioFrame(AudioFrame frame)
|
|
153
|
+
{
|
|
154
|
+
if (ID == -1)
|
|
155
|
+
return;
|
|
156
|
+
var writer = new BytesWriter();
|
|
157
|
+
writer.WriteString(FishNetBroadcastTags.AUDIO_FRAME);
|
|
158
|
+
writer.WriteInt(ID);
|
|
159
|
+
writer.WriteLong(frame.timestamp);
|
|
160
|
+
writer.WriteInt(frame.frequency);
|
|
161
|
+
writer.WriteInt(frame.channelCount);
|
|
162
|
+
writer.WriteByteArray(frame.samples);
|
|
163
|
+
|
|
164
|
+
var message = new FishNetBroadcast
|
|
165
|
+
{
|
|
166
|
+
data = writer.Bytes
|
|
167
|
+
};
|
|
168
|
+
|
|
169
|
+
if (_networkManager.ClientManager.Started)
|
|
170
|
+
_networkManager.ClientManager.Broadcast(message, Channel.Unreliable);
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/// <summary>
|
|
174
|
+
/// Updates the server with the voice settings of this client
|
|
175
|
+
/// </summary>
|
|
176
|
+
public void SubmitVoiceSettings()
|
|
177
|
+
{
|
|
178
|
+
if (ID == -1)
|
|
179
|
+
return;
|
|
180
|
+
var writer = new BytesWriter();
|
|
181
|
+
writer.WriteString(FishNetBroadcastTags.VOICE_SETTINGS);
|
|
182
|
+
writer.WriteInt(YourVoiceSettings.muteAll ? 1 : 0);
|
|
183
|
+
writer.WriteIntArray(YourVoiceSettings.mutedPeers.ToArray());
|
|
184
|
+
writer.WriteInt(YourVoiceSettings.deafenAll ? 1 : 0);
|
|
185
|
+
writer.WriteIntArray(YourVoiceSettings.deafenedPeers.ToArray());
|
|
186
|
+
writer.WriteString(string.Join(",", YourVoiceSettings.myTags));
|
|
187
|
+
writer.WriteString(string.Join(",", YourVoiceSettings.mutedTags));
|
|
188
|
+
writer.WriteString(string.Join(",", YourVoiceSettings.deafenedTags));
|
|
189
|
+
|
|
190
|
+
var message = new FishNetBroadcast() {
|
|
191
|
+
data = writer.Bytes
|
|
192
|
+
};
|
|
193
|
+
_networkManager.ClientManager.Broadcast(message);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
#endif
|