首页 > 解决方案 > 如何接收连续的视频块作为 blob 数组并在 Websocket 中动态设置为视频标签

问题描述

我正在尝试制作自己的广播架构。在这个系统中,我使用 Websocket 传输数据,因为我知道适合连续数据传输。


在我的系统中,有一个主机发起网络摄像头直播视频。我使用MediaStreamRecorder.js记录每 5 秒的视频块并通过 websocket 作为 blob 数组发送到服务器。

服务器简单地接收并发送给在该会话中连接的所有客户端。

当客户端连接时,它通过 Websocket 接收连续 5 秒的视频块作为 blob 数组。

我的主要问题是在客户端如何将视频 blob 数组设置为html video每 5 秒动态源,以便它可以播放每 5 秒的视频数据块。

我在主机和客户端使用 Glassfish 4.0 作为服务器和 JavaScript。浏览器:Chrome 源代码:

ServerBroadCast.java

    package websocket1;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;

import javax.websocket.OnClose;
import javax.websocket.OnMessage;
import javax.websocket.OnOpen;
import javax.websocket.Session;
import javax.websocket.server.ServerEndpoint;

@ServerEndpoint(value = "/liveStreamMulticast")
public class LiveStreamMultiCast {
    private static final Set<Session> sessions = Collections.synchronizedSet(new HashSet<Session>());

    @OnOpen
    public void whenOpening(Session session) {
        // session.setMaxBinaryMessageBufferSize(1024*512); // 512 KB
        sessions.add(session);
        System.out.println("You are Connected!");
        System.out.println("Total Connection are connected: " + sessions.size());

    }

    @OnMessage
    public void handleVideo(byte[] videoData, Session HostSession) {
        // System.out.println("Insite process video");

        try {
            if (videoData != null) {
                sendVideo(videoData, HostSession);
            }

        } catch (Throwable e) {
            System.out.println("Error sending message " + e.getMessage());
        }
    }


    @OnClose
    public void onClosing(Session session) {
        System.out.println("Goodbye!");
        sessions.remove(session);
    }

    private void sendVideo(byte[] videoData, Session hostSession) throws IOException {

        Iterator<Session> iterator = sessions.iterator();
        Session tempSession = null;

        while (iterator.hasNext()) {
            tempSession = iterator.next();

            // System.out.println("Sever send data to "+ tempSession);
            if (!tempSession.equals(hostSession))
                tempSession.getBasicRemote().sendBinary(ByteBuffer.wrap(videoData));

        }

    }
}

主机.html

<html>
<head>
    <title>Demo</title>
    <script type="text/javascript" src="js/required/mediastream.js"></script>
</head>
<body>

<video id="video" autoplay=""></video>

<button id="stopButton" onclick="stop()">Stop</button>
<script type="text/javascript">

var url = "ws://localhost:8080/LiveTraining3Demo/liveStreamMulticast"; // 8080/application_name/value_given_in_annotation

var socket = new WebSocket(url);
    var video = document.querySelector('video');

socket.onopen = function(){

    console.log("Connected to Server!!");

}
socket.onmessage = function(msg){
    console.log("Message come from server");

}
/////////////////////////////////
var wholeVideo =[];
var chunks = [];
var mediaRecorder;
//////////////////////////////////////

  function gotMedia(stream) {
    video.srcObject = stream;
    mediaRecorder = new MediaStreamRecorder(stream);
    console.log("mediaRecorderCalled");
    mediaRecorder.mimeType = 'video/webm';
    mediaRecorder.start(5000);//
    console.log("recorder started");

    mediaRecorder.ondataavailable = (event) =>{
        chunks.push(event.data);
        console.log("push  B");
        wholeVideo.push(event.data);
        console.log("WholeVideo Size:");
        setTimeout(sendData(),5010);
    }



  }


  function sendData(){ 
    //var byteArray = new Uint8Array(recordedTemp);
    const superBuffer =  new Blob(chunks, {
        type: 'video/webm'
        });

     socket.send(superBuffer);
     console.log("Send Data");
      console.table(superBuffer);
      chunks = [];

  }


  navigator.getUserMedia  = navigator.getUserMedia || 
                                     navigator.webkitGetUserMedia ||
                                      navigator.mozGetUserMedia || 
                                       navigator.msGetUserMedia;

  navigator.mediaDevices.getUserMedia({video: true , audio: true})
      .then(gotMedia)
      .catch(e => { console.error('getUserMedia() failed: ' + e); });
    </script>

</body>
</html>

客户端.html

<html>
<head>

<title>Recieve Video</title>

</head>
<body>
<video id="video" autoplay controls loop
    style="width: 700; height: 500; margin: auto">
    <source src="" type="video/webm">
</video>
<script>
    var url = "ws://localhost:8080/LiveTraining3Demo/liveStreamMulticast"; // 8080/application_name/value_given_in_annotation
    var check = true;
    var socket = new WebSocket(url);
    var videoData = [];
    var superBuffer = null;
    //var videoUrl;

    //socket.binaryType = 'arraybuffer';
    socket.onopen = function() {
        console.log("Connected!!");

    }

    var check = true;
    socket.onmessage = function(videoStream) {

        var video = document.querySelector('video');
        var videoUrl = window.URL.createObjectURL(videoStream.data);
        video.src = videoUrl;
        video.load();
        video.onloadeddata = function() {
            URL.revokeObjectURL(video.src);
            video.play();
        }
        //video.srcObject

        //video.play();

        console.table(videoStream);

    }
    socket.onerror = function(err) {
        console.log("Error: " + err);
    }
</script>
</body>
</html>


当我尝试运行所有其他看起来都很好但client.html仅显示视频标签源时,没有任何视频播放。

一个星期以来我一直在努力。可能是我的一些实现出了问题,我也知道WebRTCMauz Webrtc Broadcast但如果有另一种简单的方法可以做到这一点,我不喜欢经历那个复杂的过程。我不喜欢使用node.js服务器,因为我必须用 spring 制作这个 web 应用程序。任何想法都可以赞赏。提前致谢!!。

标签: javascriptjavavideowebsocketbroadcast

解决方案


在客户端将获得数组缓冲区。因此,您需要将数组缓冲区转换为 blob 数组。

 let video = document.querySelector('video'); 
  let blobArray = [];
 socket.on('message',data=>{
  blobArray.push(new Blob([new Uint8Array(data)],{'type':'video/mp4'}));
  let currentTime = video.currentTime;
  let blob = new Blob(blobArray,{'type':'video/mp4'});
  video.src = window.URL.createObjectURL(blob);
  video.currentTime = currentTime;
  video.play();
 });

推荐阅读