I'm trying to create a realtime voice chat. once a client is holding a button and talks, I want the sound to be sent over the socket to the nodejs backend, then I want to stream this data to another client.
here is the sender client code:
socket.on('connect', function() {
var session = {
audio: true,
video: false
};
navigator.getUserMedia(session, function(stream){
var audioInput = context.createMediaStreamSource(stream);
var bufferSize = 2048;
recorder = context.createScriptProcessor(bufferSize, 1, 1);
recorder.onaudioprocess = onAudio;
audioInput.connect(recorder);
recorder.connect(context.destination);
},function(e){
});
function onAudio(e) {
if(!broadcast) return;
var mic = e.inputBuffer.getChannelData(0);
var converted = convertFloat32ToInt16(mic);
socket.emit('broadcast', converted);
}
});
The server then gets this buffer and stream it to another client (in this example, the same client)
Server Code
socket.on('broadcast', function(buffer) {
socket.emit('broadcast', new Int16Array(buffer));
});
And then, in order to play the sound at the other side (the receiver), the client code is like:
socket.on('broadcast', function(raw) {
var buffer = convertInt16ToFloat32(raw);
var src = context.createBufferSource();
var audioBuffer = context.createBuffer(1, buffer.byteLength, context.sampleRate);
audioBuffer.getChannelData(0).set(buffer);
src.buffer = audioBuffer;
src.connect(context.destination);
src.start(0);
});
My expected result is that the sound from client A will be heard in client B, I can see the buffer on the server, I can see the buffer back in the client but I hear nothing.
I know socket.io 1.x supports binary data but I can't find any example of making a voice chat, I tried also using BinaryJS but the results are the same, also, I know that with WebRTC this is a simple task but I don't want to use WebRTC, can anyone point me to a good resource or tell me what am I missing?