首页 > 解决方案 > Web 音频 API 和 WebRTC:如果我使用 AudiContext 播放远程/对等音频流,在远程音频旁边听到自己的声音

问题描述

我正在尝试使用 WebRTC 实现简单的点对点连接,并有可能为远程音频流增加增益,以使其他点的声音更大。

P2P 连接部分目前工作正常,但音频部分让我头疼。我已经解决了 chrome 中的错误,即当远程流未附加到静音音频元素时,AudioContext 未播放。 https://bugs.chromium.org/p/chromium/issues/detail?id=933677
https://bugs.chromium.org/p/chromium/issues/detail?id=952700

但是现在我遇到了一个问题,在我当前的解决方案中,我将远程流附加到 AudiContext,我可以听到自己(localStream)在来自远程对等方的音频旁边说话。奇怪的是我从来没有将 localStream 应用到 AudioContext。也许我在这里遗漏了一些明显的东西。

下面我将附上我的客户代码。
我希望有人可以帮助我。

先感谢您。

import './style.css'

import { io } from "socket.io-client";

const servers = {
  iceServers: [
    {
      urls: ['stun:stun1.l.google.com:19302', 'stun:stun2.l.google.com:19302'],
    },
  ],
  iceCandidatePoolSize: 10,
};

// Global State
const pc = new RTCPeerConnection(servers);
console.log("connection to Signal Server");
const socket = io("https://chat.psps-apps.de");
let localStream: MediaStream;
let remoteStream: MediaStream;

socket.on("connect", () => {
  console.log("Connected");
  alert("Connected to Server");
});

socket.on("error", err => {
  console.log("Error ", err);
  alert("Error "+ err);
});

// HTML elements
const webcamButton = document.getElementById('webcamButton') as HTMLButtonElement;
const callButton = document.getElementById('callButton') as HTMLButtonElement;
const remoteAudio = document.getElementById('remoteAudio ') as HTMLAudioElement;
const hangupButton = document.getElementById('hangupButton') as HTMLButtonElement;
const range = document.getElementById("lautReg") as HTMLInputElement;
const label = document.getElementById("lautLabel") as HTMLLabelElement;

let gainNode: GainNode;
let context: AudioContext; 

range.value = "1";
label.innerText = range.value;


range.onchange = (ev) => {
  const inp = ev.target as HTMLInputElement;
  label.innerText = inp.value;

  gainNode.gain.value = parseFloat(inp.value) * 2;
  console.log("Gain ", gainNode.gain.value);  
  console.log(context);  
}

socket.on("offer-fw", async offer => {
  console.log("ON offer-fw", offer);  
  await pc.setRemoteDescription(new RTCSessionDescription(offer));
  
  const answerDescription = await pc.createAnswer();
  await pc.setLocalDescription(answerDescription);
  
  const answer = {
    type: answerDescription.type,
    sdp: answerDescription.sdp,
  };

  console.log("EMIT answer", answer);
  socket.emit("answer", answer);
})

// Get candidates for caller
pc.onicecandidate = event => {
  console.log("EMIT new-ice-offer-candidate", event.candidate?.toJSON());    
  event.candidate && socket.emit("new-ice-offer-candidate", event.candidate.toJSON());
}

// Listen for remote answer
socket.on("answer-fw", answer => {
  console.log("ON answer-fw", answer);    
  const answerDescription = new RTCSessionDescription(answer);
  pc.setRemoteDescription(answerDescription);

  pc.onicecandidate = event => {
    console.log("EMIT new-ice-answer-candidate", event.candidate?.toJSON());       
    event.candidate && socket.emit("new-ice-answer-candidate", event.candidate.toJSON());
  } 
})

// When answered, add candidate to peer connection
socket.on("new-ice-answer-candidate-fw", (candidateData) => {
  console.log("ON new-ice-answer-candidate-fw", candidateData);    
  const candidate = new RTCIceCandidate(candidateData);
  pc.addIceCandidate(candidate);
});

//################################################################
//when client gets called
socket.on("new-ice-offer-candidate-fw", (candidate) => {
  console.log("ON new-ice-offer-candidate-fw");    
  console.log("Candidate", candidate);    
  pc.addIceCandidate(new RTCIceCandidate(candidate));
})

webcamButton.onclick = async () => {
  localStream = await navigator.mediaDevices.getUserMedia({ audio: true });
  
  //!!!!The remote stream must be added to the audo HTMLElement and must be muted.
  //ONLY IF this is the case then the AudioContext is able to play.
  remoteStream = new MediaStream();
  remoteAudio.srcObject = remoteStream;
  remoteAudio.muted = true;

  context = new AudioContext();
  
  gainNode = context.createGain();

  gainNode.gain.value = 1;

  // Push tracks from local stream to peer connection
  localStream.getTracks().forEach((track) => {
    pc.addTrack(track, localStream);
  });

  // Pull tracks from remote stream, add to video stream
  pc.ontrack = (event) => {
    event.streams[0].getTracks().forEach((track) => {
      remoteStream.addTrack(track);
    });

    const mediaStreamSource = context.createMediaStreamSource(remoteStream);

    //this will apply the gain to the stream
    mediaStreamSource.connect(gainNode);
    
    // this will play the audio with the gain applied
    gainNode.connect(context.destination);
  
    
    // event.streams[0].getTracks().forEach(track => {
    //   remoteStream.addTrack(track);
    //   console.log("Track", track);
    // })
  };

  // webcamVideo.srcObject = localStream;

  callButton.disabled = false;
  webcamButton.disabled = true;
};

callButton.onclick = async () => {
  // Create offer
  const offerDescription = await pc.createOffer();
  await pc.setLocalDescription(offerDescription);

  const offer = {
    sdp: offerDescription.sdp,
    type: offerDescription.type,
  };

  socket.emit("offer", offer);
  
  hangupButton.disabled = false;
} 

function addGain(stream: MediaStream): MediaStream {
  const mediaStreamSource = context.createMediaStreamSource(stream);
  const mediaStreamDestination = context.createMediaStreamDestination();

  mediaStreamSource.connect(gainNode);
  gainNode.connect(mediaStreamDestination);

  return mediaStreamDestination.stream;
}

标签: javascriptwebrtcweb-audio-apiaudiocontext

解决方案


推荐阅读