Since some days I`m trying to visualize an audiostream which is coming over webrtc. We already wrote some visuals which are working fine for the normal local stream (webaudio microphone usage).
Then I found some really interesting things on https://github.com/muaz-khan/WebRTC-Experiment/tree/master/ for streaming the microphone input between different browsers. We need this to have the same audio data from one backend for all clients in the frontend.
Everything works fine and some tests showed that we can hear each other. So I thought that it is also not a problem to visualize the incoming stream.
But: all frequency data are empty (zero), even if we can hear each other.
Does anybody has a solution or hint for this? Thanks in advance!
This is my test for analysing the remote frequence data:
include these files first:
webrtc-experiment.com/firebase.js
webrtc-experiment.com/one-to-many-audio-broadcasting/meeting.js
var meeting = new Meeting('test');
var audioContext = new window.webkitAudioContext();
var analyser = audioContext.createAnalyser();
// on getting local or remote streams
meeting.onaddstream = function(e) {
console.log(e.type);
console.log(e.audio);
console.log(e.stream);
if(e.type === 'local')
{
//try it with the local stream, it works!
}
else
{
var source = audioContext.createMediaStreamSource(e.stream);
source.connect(analyser);
analyser.connect(audioContext.destination);
console.log(analyser.fftSize);
console.log(analyser.frequencyBinCount);
analyser.fftSize = 64;
console.log(analyser.frequencyBinCount);
var frequencyData = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(frequencyData);
function update() {
requestAnimationFrame(update);
analyser.getByteFrequencyData(frequencyData);
console.log(frequencyData);
};
update();
}
};
meeting.check();
meeting.setup('test');