audio - 通过 websocket 从麦克风流式传输音频。我可以看到正在发送的数据,但在接收客户端听不到它
问题描述
我正在尝试通过 websocket 广播捕获的麦克风音频。我可以看到正在发送缓冲区数组,并且该数组具有实际的有效数据,但接收客户端听不到它。我很确定我的播放功能是正确的,因为我可以通过用随机数填充数组并使用播放功能来听到它来产生白噪声。我在想它播放的音频可能太安静而听不见,因为数组中生成的数字似乎大多在 .000### 范围内。有任何想法吗?捕获麦克风音频并广播它似乎过于复杂......:/
//broadcasting side
navigator.mediaDevices.getUserMedia({audio: true,video: false}) // request cam
.then(stream => {
vid.srcObject = stream;
context = new AudioContext();
var source = context.createMediaStreamSource(stream);
var processor = context.createScriptProcessor(1024, 2, 2);
source.connect(processor);
processor.connect(context.destination);
processor.onaudioprocess = function(e) {
audiodata = e.inputBuffer.getChannelData(1);
socket.send(JSON.stringify({sound: audiodata, to: to, from: '$username', text:''}));
};
return vid.play(); // returns a Promise
});
//receiving side object to array
if(typeof (message.sound) != "undefined"){
//$('#video_stream_btn').trigger('click');
var json_sound = message.sound;
var array_sound = [];
for(var i in json_sound){
array_sound.push([i, json_sound [i]]);
}
if(typeof(context) == 'undefined'){
context = new AudioContext();
}
play_sound(array_sound, context);
return;
}
// receiving side play sound function
function play_sound(raw,context){
//alert(raw.length);
var audioBuffer = context.createBuffer(1, raw.length, context.sampleRate);
audioBuffer.getChannelData(0).set(raw);
var source = context.createBufferSource();
source.buffer = audioBuffer;
source.connect(context.destination);
source.start(0);
}
解决方案
对于任何试图弄清楚这一点的人。我最终将它编码为一个 int16array,然后通过套接字发送它,客户端将它编码回一个 float32 数组并将其传递给 play_sound 函数。我基本上只是从stackoverflow上偷了一堆东西并伪造它,直到我成功为止,因为我没那么聪明:)
捕获麦克风并转换为 int16array,然后通过套接字发送
navigator.mediaDevices.getUserMedia({audio: {sampleSize: 16, channelCount: 2},video: true}) // request cam
.then(stream => {
vid.srcObject = stream; // don't use createObjectURL(MediaStream)
context = new AudioContext();
var source = context.createMediaStreamSource(stream);
var processor = context.createScriptProcessor(1024, 2, 2);
source.connect(processor);
processor.connect(context.destination);
processor.onaudioprocess = function(e) {
// Do something with the data, i.e Convert this to WAV
audiodata = new Int16Array(convertFloat32ToInt16(e.inputBuffer.getChannelData(0)));
console.log(audiodata);
socket.send(JSON.stringify({sound: audiodata, to: to, from: '$username', text:''}));
};
return vid.play(); // returns a Promise
});
将捕获的麦克风转换为 int16array 的相关函数:
function convertFloat32ToInt16(buffer){
l = buffer.length;
buf = new Int16Array(l);
while (l--)
{
buf[l] = Math.min(1, buffer[l])*0x7FFF;
}
return buf.buffer;
}
将客户端 json 对象接收到 int16array,然后将 int16array 返回到 float32array:
if(typeof (message.sound) != "undefined"){
//$('#video_stream_btn').trigger('click');
//var json_sound = message.sound;
if(typeof(context) == 'undefined'){
context = new AudioContext();
}
sound_array = [];
for (i in message.sound)
{
sound_array[i] = (message.sound [i]);
}
//sound_array16 = new Int16Array(sound_array);
sound_array32 = int16ToFloat32(sound_array);
play_sound(sound_array32, context);
return;
}
相关接收方 int16array 到 float32array 函数:
function int16ToFloat32(inputArray) {
let int16arr = new Int16Array(inputArray)
var output = new Float32Array(int16arr.length);
for (var i = 0; i < int16arr.length; i++) {
var int = int16arr[i];
var float = (int >= 0x8000) ? -(0x10000 - int) / 0x8000 : int / 0x7FFF;
output[i] = float;
}
return output;
}
推荐阅读
- html - 我找不到导致水平滚动的原因
- reactjs - 查询修剪不是函数(如何调试)
- sql - SQL 查询 - 如何输出重复值
- css - 为什么 SASS @each 语句在 Next.js 中不起作用?
- r - 将日期时间变量离散化为“in-hours”和“after-hours”
- r - 如何根据空白行从 df 分区到多个 .csv?
- c++ - Pytorch C++ API:CMake 问题
- java - JDBI -> 将 MySql DATETIME 读入 java.sql.Date 或类似的
- dart - Dart 是否具有尾调用优化 (TCO) 功能?
- python - 如何使用自定义内核导出 SVM 模型?