web音頻流轉發之音視頻直播

前言

通過前面兩篇文章的講解,你們已經瞭解了audio的基本使用方法,下面咱們就根據咱們瞭解的api作一個直播。
web音頻流轉發之AudioNode
web音頻流轉發之音頻源javascript

原理

  • 視頻直播:採集一幀一幀的視頻,轉換爲base64轉發,接收到base64後,設置爲img的src,而後不停的修改img的src造成視頻
  • 音頻直播:採集一幀一幀的音頻二進制數據,轉發2進制數據,在接收端對2進制原始音頻數據進行播放

採集和推流

  1. 獲取攝像頭,和麥克風須要https
  2. navigator.getUserMedia已經廢棄,使用navigator.mediaDevices.getUserMedia,固然須要作兼容
//獲取音頻視頻流數據
mediaDevices = navigator.mediaDevices.getUserMedia({audio: true,video: { width: 320, height: 240 }});
mediaDevices.then(stream => {
    //視頻流轉換到video標籤播放
    video.srcObject = stream;
    video.play();
    //音頻流轉換到AudioNode作數據採集
    let source = audioCtx.createMediaStreamSource(stream);
    recorder = audioCtx.createScriptProcessor(2048, 1, 1);
    source.connect(recorder);
    recorder.connect(audioCtx.destination);
    recorder.onaudioprocess = function(ev){
    //採集單聲道數據
    let inputBuffer = ev.inputBuffer.getChannelData(0);
    //將視頻畫面轉換成base64發送
    ws.send(canvas.toDataURL('image/jpeg'));
    //發送音頻pcm數據
    ws.send(inputBuffer.buffer);
    };
});
video.onplay = function(){
    //將video繪製到canvas上
    interval = setInterval(function(){
        ctx.drawImage(video, 0, 0);
    },30);
    };

接收流文件

對接收的文件進行一個緩存,以達到一個好的用戶體驗css

let ws = new WebSocket("wss://192.168.3.102"),
    imgChuncks = [],
    audioChuncks = [],
    img = null;
    //如何處理二進制數據,默認是Blob
    ws.binaryType = 'arraybuffer',
    ws.onmessage = function(evt) { 
        if(evt.data.byteLength === undefined) {
            //收到的base64圖片
            imgChuncks.push(evt.data);
        }else{
            //收到的音頻二進制pcm數據
            audioChuncks.push(new Float32Array(evt.data));
        }
        //緩存2幀的數據後開始播放
        if(!img && audioChuncks.length > 2){
            myplay();
        }
    };

處理流

//建立播放音頻視頻函數
    function myplay(){
        //建立img標籤來播放base64圖片
        img = new Image();
        document.body.appendChild(img);
        //建立播放音頻對象
        let    myBuffer = audioCtx.createBuffer(1, 2048, audioCtx.sampleRate),
               source = audioCtx.createBufferSource(),
               recorder = audioCtx.createScriptProcessor(2048, 1, 1);
           source.connect(recorder);
           recorder.connect(audioCtx.destination);
           recorder.onaudioprocess = function(ev){
               //修改img的src達到視頻的效果
               img.src = imgChuncks.shift();
               //播放audioChuncks裏面真正的二進制數據
            ev.outputBuffer.copyToChannel(audioChuncks.shift() || new Float32Array(2048), 0, 0);
        };
    }

注意

  1. 這只是一個實例程序,爲進行任何優化
  2. 在測試時請給揚聲器插上耳機收聽,或者讓揚聲器和麥克風放置到不一樣的房間。由於沒有作迴音消除,和破音處理,這樣聽上去會很爽。
  3. 本身生成一個https文件作測試

完整代碼

index.htmlhtml

<!DOCTYPE html>
<html>
<head>
    <meta charset="utf-8">
     <meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <title></title>
    <link rel="stylesheet" href="">
    <style type="text/css" media="screen">
        video, canvas {
            background-color: #e9e9e9;
            margin:0 auto;
            display: block;
        }
        body {
            text-align: center;
        }
        video {
            display: none;
        }
    </style>
</head>
<body>
    <canvas width="320px" height="240px">
    
    </canvas>
    <video src="" width="320px" height="240px" controls muted></video>
    <button type="button" class="start">開始</button>
</body>
<script type="text/javascript">
    let ws = new WebSocket("wss://192.168.3.102"),
    imgChuncks = [],
    audioChuncks = [],
    img = null;
    //如何處理二進制數據,默認是Blob
    ws.binaryType = 'arraybuffer',
    ws.onmessage = function(evt) { 
        if(evt.data.byteLength === undefined) {
            //收到的base64圖片
            imgChuncks.push(evt.data);
        }else{
            //收到的音頻二進制pcm數據
            audioChuncks.push(new Float32Array(evt.data));
        }
        //緩存2幀的數據後開始播放
        if(!img && audioChuncks.length > 2){
            myplay();
        }
    };
    //建立播放音頻視頻函數
    function myplay(){
        //建立img標籤來播放base64圖片
        img = new Image();
        document.body.appendChild(img);
        //建立播放音頻對象
        let    myBuffer = audioCtx.createBuffer(1, 2048, audioCtx.sampleRate),
               source = audioCtx.createBufferSource(),
               recorder = audioCtx.createScriptProcessor(2048, 1, 1);
           source.connect(recorder);
           recorder.connect(audioCtx.destination);
           recorder.onaudioprocess = function(ev){
               //修改img的src達到視頻的效果
               img.src = imgChuncks.shift();
               //播放audioChuncks裏面真正的二進制數據
            ev.outputBuffer.copyToChannel(audioChuncks.shift() || new Float32Array(2048), 0, 0);
        };
    }
    let video = document.querySelector('video'),
        start = document.querySelector('.start'),
        stop = document.querySelector('.stop'),
        canvas = document.querySelector('canvas'),
        ctx = canvas.getContext('2d'),
        audioCtx = new (window.AudioContext || window.webkitAudioContext)(),
        interval = null,
        mediaDevices = null;
    //點擊開始
    start.onclick = function(){
        //獲取音頻視頻流數據
        mediaDevices = navigator.mediaDevices.getUserMedia({audio: true,video: { width: 320, height: 240 }});
        mediaDevices.then(stream => {
            //視頻流轉換到video標籤播放
            video.srcObject = stream;
            video.play();
            //音頻流轉換到AudioNode作數據採集
            let source = audioCtx.createMediaStreamSource(stream);
            recorder = audioCtx.createScriptProcessor(2048, 1, 1);
            source.connect(recorder);
            recorder.connect(audioCtx.destination);
            recorder.onaudioprocess = function(ev){
                //採集單聲道數據
                let inputBuffer = ev.inputBuffer.getChannelData(0);
                //將視頻畫面轉換成base64發送
                ws.send(canvas.toDataURL('image/jpeg'));
                //發送音頻pcm數據
                ws.send(inputBuffer.buffer);
            };
        });
    };
    video.onplay = function(){
        //將video繪製到canvas上
        interval = setInterval(function(){
            ctx.drawImage(video, 0, 0);
        },30);
    };
</script>
</html>

servers.jsjava

let https = require('https'),
    fs = require('fs'),
    WebSocket = require('ws'),
    options = {
        key: fs.readFileSync('./key.pem'),
        cert:fs.readFileSync('./key-cert.pem')
    },
    server = https.createServer(options, function(req, res){
        fs.readFile('./index.html', function(err, data){
            res.writeHead(200,{'Content-Type': 'text/html'});
            res.end(data);
        });
    }).listen(443, function(){
        console.log('服務啓動成功')
    });
const wss = new WebSocket.Server({server});
    wss.binaryType = 'arraybuffer';
    wss.on('connection', (ws) => {
        ws.on('message', function(data) {  
      wss.clients.forEach(function each(client) {
            if (client.readyState === WebSocket.OPEN && client !== ws) {
              client.send(data);
            }
          });
      }); 
    });
相關文章
相關標籤/搜索