Skip to content

Commit

Permalink
Add basic VAD support
Browse files Browse the repository at this point in the history
  • Loading branch information
streamer45 committed Apr 16, 2021
1 parent b767f30 commit ce32978
Show file tree
Hide file tree
Showing 9 changed files with 296 additions and 38 deletions.
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ Password: password

- Code cleanup & linting.
- Configuration.
- [VAD](https://en.wikipedia.org/wiki/Voice_activity_detection) support.
- Proper [ICE](https://webrtcglossary.com/ice/) handling.
- [TURN](https://webrtcglossary.com/turn/) support.
- [HA](https://developers.mattermost.com/extend/plugins/server/ha/) support.
Expand Down
10 changes: 6 additions & 4 deletions server/message.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@ type message struct {
}

const (
messageTypeSignal = "signal"
messageTypeICE = "ice"
messageTypeMute = "mute"
messageTypeUnmute = "unmute"
messageTypeSignal = "signal"
messageTypeICE = "ice"
messageTypeMute = "mute"
messageTypeUnmute = "unmute"
messageTypeVoiceOn = "voice_on"
messageTypeVoiceOff = "voice_off"
)

func (m *message) ToJSON() ([]byte, error) {
Expand Down
45 changes: 34 additions & 11 deletions server/signal.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,19 @@ const (
wsEventUserDisconnected = "user_disconnected"
wsEventUserMuted = "user_muted"
wsEventUserUnmuted = "user_unmuted"
wsEventUserVoiceOn = "user_voice_on"
wsEventUserVoiceOff = "user_voice_off"
)

type session struct {
wsInCh <-chan []byte
wsOutCh chan<- []byte
outTrack *webrtc.TrackLocalStaticRTP
outConn *webrtc.PeerConnection
channelID string
isMuted bool
mut sync.RWMutex
wsInCh <-chan []byte
wsOutCh chan<- []byte
outTrack *webrtc.TrackLocalStaticRTP
outConn *webrtc.PeerConnection
channelID string
isMuted bool
isSpeaking bool
mut sync.RWMutex
}

func (p *Plugin) handleWebSocket(w http.ResponseWriter, r *http.Request, channelID string) {
Expand Down Expand Up @@ -84,14 +87,23 @@ func (p *Plugin) handleWebSocket(w http.ResponseWriter, r *http.Request, channel
// notify connected user about other users state.
p.mut.RLock()
for id, session := range p.sessions {
var evType string
var mutedEvType string
var voiceEvType string
if session.isMuted {
evType = wsEventUserMuted
mutedEvType = wsEventUserMuted
} else {
evType = wsEventUserUnmuted
mutedEvType = wsEventUserUnmuted
}
if session.isSpeaking {
voiceEvType = wsEventUserVoiceOn
} else {
voiceEvType = wsEventUserVoiceOff
}
if id != userID {
p.API.PublishWebSocketEvent(evType, map[string]interface{}{
p.API.PublishWebSocketEvent(mutedEvType, map[string]interface{}{
"userID": id,
}, &model.WebsocketBroadcast{ChannelId: channelID, UserId: userID})
p.API.PublishWebSocketEvent(voiceEvType, map[string]interface{}{
"userID": id,
}, &model.WebsocketBroadcast{ChannelId: channelID, UserId: userID})
}
Expand Down Expand Up @@ -139,6 +151,17 @@ func (p *Plugin) handleWebSocket(w http.ResponseWriter, r *http.Request, channel
p.API.PublishWebSocketEvent(evType, map[string]interface{}{
"userID": userID,
}, &model.WebsocketBroadcast{ChannelId: channelID})
case messageTypeVoiceOn, messageTypeVoiceOff:
userSession.mut.Lock()
userSession.isSpeaking = (msg.Type == messageTypeVoiceOn)
userSession.mut.Unlock()
evType := wsEventUserVoiceOff
if msg.Type == messageTypeVoiceOn {
evType = wsEventUserVoiceOn
}
p.API.PublishWebSocketEvent(evType, map[string]interface{}{
"userID": userID,
}, &model.WebsocketBroadcast{ChannelId: channelID})
}
}
}()
Expand Down
2 changes: 2 additions & 0 deletions webapp/src/action_types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ export const VOICE_CHANNEL_USER_CONNECTED = pluginId + '_voice_channel_user_conn
export const VOICE_CHANNEL_USER_DISCONNECTED = pluginId + '_voice_channel_user_disconnected';
export const VOICE_CHANNEL_USER_MUTED = pluginId + '_voice_channel_user_muted';
export const VOICE_CHANNEL_USER_UNMUTED = pluginId + '_voice_channel_user_unmuted';
export const VOICE_CHANNEL_USER_VOICE_ON = pluginId + '_voice_channel_user_voice_on';
export const VOICE_CHANNEL_USER_VOICE_OFF = pluginId + '_voice_channel_user_voice_off';
export const VOICE_CHANNEL_USERS_CONNECTED = pluginId + '_voice_channel_users_connected';
export const VOICE_CHANNEL_PROFILES_CONNECTED = pluginId + '_voice_channel_profiles_connected';
export const VOICE_CHANNEL_PROFILE_CONNECTED = pluginId + '_voice_channel_profile_connected';
8 changes: 5 additions & 3 deletions webapp/src/components/right_hand_sidebar/component.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,15 @@ export default class RHSView extends React.PureComponent {
}

render() {
// console.log(this.props.statuses);
const listItems = this.props.profiles.map((user) => {
const muteIcon = this.props.statuses[user.id] === false ? faMicrophoneAlt : faMicrophoneAltSlash;
const muteStyle = this.props.statuses[user.id] === false ? {color: 'inherit'} : {color: '#E00000'};
const muteIcon = this.props.statuses[user.id] && this.props.statuses[user.id].unmuted === true ? faMicrophoneAlt : faMicrophoneAltSlash;
const muteStyle = this.props.statuses[user.id] && this.props.statuses[user.id].unmuted === true ? {color: 'inherit'} : {color: '#E00000'};
const voiceStyle = this.props.statuses[user.id] && this.props.statuses[user.id].voice === true ? {fontWeight: 'bold'} : {fontWeight: 'normal'};
return (
<li key={user.id}>
<div style={style.user}>
{user.username}
<span style={voiceStyle}>{user.username}</span>
<FontAwesomeIcon
icon={muteIcon}
style={muteStyle}
Expand Down
63 changes: 48 additions & 15 deletions webapp/src/connection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import {getCurrentUserId} from 'mattermost-redux/selectors/entities/users';
import {getWSConnectionURL} from './utils';
import {VOICE_CHANNEL_USER_CONNECTED} from './action_types';

import VoiceActivityDetector from './vad';

export async function newClient(store, channelID: string, closeCb) {
let peer = null;
let receiver = null;
Expand All @@ -14,12 +16,35 @@ export async function newClient(store, channelID: string, closeCb) {
video: false,
audio: true,
});

const audioTrack = stream.getAudioTracks()[0];
audioTrack.enabled = false;
streams.push(stream);

const AudioContext = window.AudioContext || window.webkitAudioContext;
if (!AudioContext) {
throw new Error('AudioCtx unsupported');
}
const audioCtx = new AudioContext();
const voiceDetector = new VoiceActivityDetector(audioCtx, stream);

const ws = new WebSocket(getWSConnectionURL(channelID));

voiceDetector.on('start', () => {
if (ws) {
ws.send(JSON.stringify({
type: 'voice_on',
}));
}
});
voiceDetector.on('stop', () => {
if (ws) {
ws.send(JSON.stringify({
type: 'voice_off',
}));
}
});

const disconnect = () => {
streams.forEach((s) => {
s.getTracks().forEach((track) => {
Expand All @@ -42,6 +67,9 @@ export async function newClient(store, channelID: string, closeCb) {
};

const mute = () => {
if (voiceDetector) {
voiceDetector.stop();
}
const audioTrack = stream.getAudioTracks()[0];
audioTrack.enabled = false;
if (ws) {
Expand All @@ -52,6 +80,9 @@ export async function newClient(store, channelID: string, closeCb) {
};

const unmute = () => {
if (voiceDetector) {
voiceDetector.start();
}
const audioTrack = stream.getAudioTracks()[0];
audioTrack.enabled = true;
if (ws) {
Expand All @@ -64,28 +95,28 @@ export async function newClient(store, channelID: string, closeCb) {
ws.onerror = (err) => console.log(err);

ws.onopen = () => {
console.log('ws connected');
// console.log('ws connected');

peer = new Peer({initiator: true, stream, trickle: true});
peer.on('connect', () => {
console.log('connected!');
// console.log('connected!');
});
peer.on('signal', (data) => {
console.log(data);
// console.log(data);
if (data.type === 'offer') {
console.log('sending offer');
// console.log('sending offer');
ws.send(JSON.stringify({
type: 'signal',
data,
}));
} else if (data.type === 'answer') {
console.log('sending answer');
// console.log('sending answer');
ws.send(JSON.stringify({
type: 'signal',
data,
}));
} else if (data.type === 'candidate') {
console.log('sending candidate');
// console.log('sending candidate');
ws.send(JSON.stringify({
type: 'ice',
data,
Expand All @@ -94,32 +125,33 @@ export async function newClient(store, channelID: string, closeCb) {
});
peer.on('error', (err) => console.log(err));
ws.onmessage = ({data}) => {
console.log('ws msg');
// console.log('ws msg');

const msg = JSON.parse(data);
if (msg.type === 'answer') {
peer.signal(data);
} else if (msg.type === 'offer') {
console.log('offer!');
// console.log('offer!');

if (receiver) {
receiver.signal(data);
return;
}

receiver = new Peer({trickle: true});
receiver.on('connect', () => console.log('receiver connected!'));

// receiver.on('connect', () => console.log('receiver connected!'));
receiver.on('error', (err) => console.log(err));
receiver.on('signal', (data) => {
console.log(data);
// console.log(data);
if (data.type === 'offer') {
console.log('rcv sending offer');
// console.log('rcv sending offer');
ws.send(JSON.stringify({
type: 'signal',
data,
}));
} else if (data.type === 'answer') {
console.log('rcv sending answer');
// console.log('rcv sending answer');
ws.send(JSON.stringify({
type: 'signal',
data,
Expand All @@ -128,7 +160,7 @@ export async function newClient(store, channelID: string, closeCb) {
});
receiver.signal(data);
receiver.on('stream', (stream) => {
console.log('receiver stream');
// console.log('receiver stream');

streams.push(stream);

Expand All @@ -142,12 +174,13 @@ export async function newClient(store, channelID: string, closeCb) {

document.body.appendChild(audio);
receiver.on('close', () => {
console.log('receiver closed!');
// console.log('receiver closed!');
audio.remove();
});
});
}
console.log(data);

// console.log(data);
};
};

Expand Down
24 changes: 22 additions & 2 deletions webapp/src/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ import {
VOICE_CHANNEL_PROFILE_CONNECTED,
VOICE_CHANNEL_USER_MUTED,
VOICE_CHANNEL_USER_UNMUTED,
VOICE_CHANNEL_USER_VOICE_OFF,
VOICE_CHANNEL_USER_VOICE_ON,
} from './action_types';

// eslint-disable-next-line import/no-unresolved
Expand Down Expand Up @@ -123,7 +125,6 @@ export default class Plugin {
store.subscribe(async () => {
const currentChannelId = getCurrentChannelId(store.getState());
if (currChannelId !== currentChannelId) {
console.log('channel switched');
currChannelId = currentChannelId;
registry.unregisterComponent(actionID);
try {
Expand All @@ -138,7 +139,6 @@ export default class Plugin {
channelID: currChannelId,
},
});
console.log(store.getState());

if (resp.data.enabled) {
actionID = registerChannelHeaderButtonAction();
Expand Down Expand Up @@ -231,6 +231,26 @@ export default class Plugin {
},
});
});

registry.registerWebSocketEventHandler(`custom_${manifest.id}_user_voice_on`, (ev) => {
store.dispatch({
type: VOICE_CHANNEL_USER_VOICE_ON,
data: {
channelID: ev.broadcast.channel_id,
userID: ev.data.userID,
},
});
});

registry.registerWebSocketEventHandler(`custom_${manifest.id}_user_voice_off`, (ev) => {
store.dispatch({
type: VOICE_CHANNEL_USER_VOICE_OFF,
data: {
channelID: ev.broadcast.channel_id,
userID: ev.data.userID,
},
});
});
}

uninitialize() {
Expand Down
Loading

0 comments on commit ce32978

Please sign in to comment.