websocket傳輸canvas圖像數據給C++服務端opencv圖像實現web在線實時圖像處理

先後端的耦合想了好久,上下課都在思考怎麼作,而後終於憋出來了。這是以前搞的一個視覺計算的項目,boss叫對接到前端,因而就產生了這樣一個詭異的需求,就是前端打開攝像頭,同時須要把攝像頭的數據回傳到後端進行圖像處理(好比美顏啊腦殼上加個裝飾品之類),這就須要涉及到前端和服務端的數據編碼耦合,想了想既然任何圖像在內存裏面都是一個uchar矩陣,因而琢磨了這個東西出來。html

通常狀況下,圖像在內存裏的表達都是個uchar串,或者說byte流,由於我常常須要寫跨語言調用的玩意兒,因此通常在內存裏我都是用字符串和比特流進行交互,這裏我採用了一樣的思想,咱們把opencv的圖像進行編碼爲png,而後再一次編碼爲base64,經過websocket傳輸給前端。大體過程以下。前端

首先假設咱們的前端打開websocket鏈接後端,鏈接上了之後前端打開攝像頭取攝像頭數據傳輸給後端,後端經過一系列的圖像處理機器學習之後編碼圖像回傳給前端。ios

前端代碼:git

<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <title>Title</title>
</head>
<body>
<video id="video" style="display: none" width="480" height="320" controls></video>

<canvas id="canvas" width="480" height="320"></canvas>
<img id="target" width="480" height="320"></img>
<script>
    var video = document.getElementById('video');
    var canvas = document.getElementById('canvas');
    var image = document.getElementById('target');
    var context = canvas.getContext('2d');
    var ws = new WebSocket("ws://127.0.0.1:9002");
    ws.binaryType = "arraybuffer";

    ws.onopen = function() {
        ws.send("I'm client");
    };

    ws.onmessage = function (evt) {
        console.log("resive");
        try{
            //顯示後端回傳回來的base64圖像
            image.src="data:image/png;base64,"+evt.data;
            console.log(evt.data);
        }catch{

        }

    };

    ws.onclose = function() {
        alert("Closed");
    };

    ws.onerror = function(err) {
        alert("Error: " + err);
    };
    function getUserMedia(constraints, success, error) {
        if (navigator.mediaDevices.getUserMedia) {
            navigator.mediaDevices.getUserMedia(constraints).then(success).catch(error);
        }
    }
    //成功回調函數
    function success(stream){
        video.srcObject=stream;
        video.play();
    }
    function error(error) {
        console.log('訪問用戶媒體失敗:',error.name,error.message);
    }

    //這個函數是實現將canvas上面的base64圖像轉爲圖像數據流的字符串形式
    function dataURItoBlob(dataURI) {
        // convert base64/URLEncoded data component to raw binary data held in a string
        var byteString;
        if (dataURI.split(',')[0].indexOf('base64') >= 0)
            byteString = atob(dataURI.split(',')[1]);
        else
            byteString = unescape(dataURI.split(',')[1]);

        // separate out the mime component
        var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];

        // write the bytes of the string to a typed array
        var ia = new Uint8Array(byteString.length);
        for (var i = 0; i < byteString.length; i++) {
            ia[i] = byteString.charCodeAt(i);
        }

        return new Blob([ia], {type:mimeString});
    }
    if (navigator.mediaDevices.getUserMedia || navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia) {
        //調用用戶媒體設備, 訪問攝像頭
        getUserMedia({video: {width: 480, height: 320}}, success, error);
        timer = setInterval(
            function () {
                context.drawImage(video,0,0,480,320);
                var data = canvas.toDataURL('image/jpeg', 1.0);
                newblob = dataURItoBlob(data);
                //將轉換好成爲字符串的圖像數據發送出去
                ws.send(newblob);
            }, 100);//這裏咱們的前端仍是須要延時的,若是咱們的後端計算實時性不是很強的話,而剛好個人項目後端計算規模很是大,因此須要50ms的等待

    } else {
        alert('不支持訪問用戶媒體');
    }

</script>
</body>
</html>

C++服務器端(這裏須要使用到websocket++讀者請自行編譯)web

opencv_websocket_server.hsql

//
// Created by Pulsar on 2019/4/16.
//

#ifndef WEBSOCKETPP_OPENCV_WEBSOCKET_H
#define WEBSOCKETPP_OPENCV_WEBSOCKET_H

#include <opencv2/opencv.hpp>

#include <boost/thread/thread.hpp>
//#include <boost/bind.hpp>
#include <boost/thread/mutex.hpp>
#include <websocketpp/config/asio_no_tls.hpp>
#include <websocketpp/server.hpp>


typedef websocketpp::server<websocketpp::config::asio> WebsocketServer;
typedef WebsocketServer::message_ptr message_ptr;

class opencv_websocket {
public:
    opencv_websocket(std::string file_path)	;
    void Run(int port);
    ~opencv_websocket();
};


#endif //WEBSOCKETPP_OPENCV_WEBSOCKET_H

opencv_websocket_server.cppcanvas

//
// Created by Pulsar on 2019/4/16.
//

#include <opencv_websocket.h>
//using websocketpp::lib::placeholders::_1;
//using websocketpp::lib::placeholders::_2;
//using websocketpp::lib::bind;
boost::shared_mutex  read_write_mutex;
boost::mutex lock;
cv::CascadeClassifier cascade;
//解碼base64數據
static std::string base64Decode(const char *Data, int DataByte) {
    //解碼錶
    const char DecodeTable[] =
            {
                    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                    62, // '+'
                    0, 0, 0,
                    63, // '/'
                    52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // '0'-'9'
                    0, 0, 0, 0, 0, 0, 0,
                    0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
                    13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // 'A'-'Z'
                    0, 0, 0, 0, 0, 0,
                    26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
                    39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // 'a'-'z'
            };
    std::string strDecode;
    int nValue;
    int i = 0;
    while (i < DataByte) {
        if (*Data != '\r' && *Data != '\n') {
            nValue = DecodeTable[*Data++] << 18;
            nValue += DecodeTable[*Data++] << 12;
            strDecode += (nValue & 0x00FF0000) >> 16;
            if (*Data != '=') {
                nValue += DecodeTable[*Data++] << 6;
                strDecode += (nValue & 0x0000FF00) >> 8;
                if (*Data != '=') {
                    nValue += DecodeTable[*Data++];
                    strDecode += nValue & 0x000000FF;
                }
            }
            i += 4;
        } else {
            Data++;
            i++;
        }
    }
    return strDecode;
}

//編碼base64數據
static std::string base64Encode(const unsigned char *Data, int DataByte) {
    //編碼表
    const char EncodeTable[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
    //返回值
    std::string strEncode;
    unsigned char Tmp[4] = {0};
    int LineLength = 0;
    for (int i = 0; i < (int) (DataByte / 3); i++) {
        Tmp[1] = *Data++;
        Tmp[2] = *Data++;
        Tmp[3] = *Data++;
        strEncode += EncodeTable[Tmp[1] >> 2];
        strEncode += EncodeTable[((Tmp[1] << 4) | (Tmp[2] >> 4)) & 0x3F];
        strEncode += EncodeTable[((Tmp[2] << 2) | (Tmp[3] >> 6)) & 0x3F];
        strEncode += EncodeTable[Tmp[3] & 0x3F];
        if (LineLength += 4, LineLength == 76) {
            strEncode += "\r\n";
            LineLength = 0;
        }
    }
    //對剩餘數據進行編碼
    int Mod = DataByte % 3;
    if (Mod == 1) {
        Tmp[1] = *Data++;
        strEncode += EncodeTable[(Tmp[1] & 0xFC) >> 2];
        strEncode += EncodeTable[((Tmp[1] & 0x03) << 4)];
        strEncode += "==";
    } else if (Mod == 2) {
        Tmp[1] = *Data++;
        Tmp[2] = *Data++;
        strEncode += EncodeTable[(Tmp[1] & 0xFC) >> 2];
        strEncode += EncodeTable[((Tmp[1] & 0x03) << 4) | ((Tmp[2] & 0xF0) >> 4)];
        strEncode += EncodeTable[((Tmp[2] & 0x0F) << 2)];
        strEncode += "=";
    }


    return strEncode;
}

//imgType 包括png bmp jpg jpeg等opencv可以進行編碼解碼的文件
static std::string Mat2Base64(const cv::Mat &img, std::string imgType) {
    //Mat轉base64
    std::string img_data;
    std::vector<uchar> vecImg;
    std::vector<int> vecCompression_params;
    vecCompression_params.push_back(CV_IMWRITE_JPEG_QUALITY);
    vecCompression_params.push_back(90);
    imgType = "." + imgType;
    //重點來了,它是負責把圖像從opencv的Mat變成編碼好的圖像比特流的重要函數
    cv::imencode(imgType, img, vecImg, vecCompression_params);
    img_data = base64Encode(vecImg.data(), vecImg.size());
    return img_data;
}

//base64轉Mat
static cv::Mat Base2Mat(std::string &base64_data) {
    cv::Mat img;
    std::string s_mat;
    s_mat = base64Decode(base64_data.data(), base64_data.size());
    std::vector<char> base64_img(s_mat.begin(), s_mat.end());
    img = cv::imdecode(base64_img, CV_LOAD_IMAGE_COLOR);
    return img;
}


void OnOpen(WebsocketServer *server, websocketpp::connection_hdl hdl) {
    std::cout << "have client connected" << std::endl;
}

void OnClose(WebsocketServer *server, websocketpp::connection_hdl hdl) {
    std::cout << "have client disconnected" << std::endl;
}

void OnMessage(WebsocketServer *server, websocketpp::connection_hdl hdl, message_ptr msg) {
    std::string image_str = msg->get_payload();
    std::vector<char> img_vec(image_str.begin(), image_str.end());
    try {
        //把前端傳來的圖像字符串進行解碼
        cv::Mat img = cv::imdecode(img_vec, CV_LOAD_IMAGE_COLOR);
        if (!img.empty()) {
//            cv::imshow("", img);
            std::vector<cv::Rect> faces;
            lock.lock();
//            cascade.detectMultiScale(img, faces, 1.1, 3, 0, cv::Size(30, 30));
//            for (size_t t = 0; t < faces.size(); t++){
//                cv::rectangle(img, faces[t], cv::Scalar(0, 0, 255), 2, 8);
//            }
            lock.unlock();
            cv::Mat output = img;
            if (!output.empty()) {
                //把你處理完的圖像轉換爲字符串返回給前端
                std::string strRespon = Mat2Base64(output, "bmp");
                server->send(hdl, strRespon, websocketpp::frame::opcode::text);
            }
//            cv::waitKey(10);
        }
    }
    catch (const std::exception &) {
        std::cout << " 解碼異常" << std::endl;
    }
}

opencv_websocket::opencv_websocket(std::string file_path) {
    //訓練好的文件名稱,放置在可執行文件同目錄下
    if(!cascade.load(file_path))perror("Load Model Error");
}

opencv_websocket::~opencv_websocket() {

}

void opencv_websocket::Run(int port) {
    WebsocketServer server;
    server.set_access_channels(websocketpp::log::alevel::all);
    server.clear_access_channels(websocketpp::log::alevel::frame_payload);

    // Initialize Asio
    server.init_asio();

    // Register our message handler
    server.set_open_handler(websocketpp::lib::bind(&OnOpen, &server, ::websocketpp::lib::placeholders::_1));
    server.set_close_handler(websocketpp::lib::bind(&OnClose, &server, websocketpp::lib::placeholders::_1));
    server.set_message_handler(websocketpp::lib::bind(OnMessage, &server, websocketpp::lib::placeholders::_1, websocketpp::lib::placeholders::_2));
    // Listen on port 9002
    server.listen(port);

    // Start the server accept loop
    server.start_accept();

    // Start the ASIO io_service run loop
    server.run();
}

int main(int argc, char **argv) {

    std::cout<<"[INFO] load model"<<std::endl;
    opencv_websocket opencv_websocket_server("haarcascade_frontalface_alt.xml");
    std::cout<<"[INFO] start server"<<std::endl;
    opencv_websocket_server.Run(9002);
    std::cout<<"[INFO] listen"<<std::endl;
    getchar();
    return 0;
}

上述工程地址:後端

https://gitee.com/Luciferearth/websocketpp
example\opencv_websocket_server下服務器

注意websocket在Windows下須要改動編譯依賴websocket

去掉

iostream_server

testee_server

testee_client

utility_client

的Cmake(直接所有註釋)

CmakeLists.txt

set (WEBSOCKETPP_LIB ${WEBSOCKETPP_BUILD_ROOT}/lib)

後面加入如下編譯命令

#########################################OpenSSL#######################################
set(OPENSSL_INCLUDE_DIR D:/pgsql/include)
set(OPENSSL_LIBRARIES D:/pgsql/lib/ssleay32MD.lib;D:/pgsql/lib/libeay32MD.lib)
#######################################################################################
##########################Windows 下對Boost的引用######################################
set(BUILD_EXAMPLES ON)
set(BUILD_EXAMPLES ON)

set(Boost_FOUND TRUE)
set(Boost_INCLUDE_DIRS E:/local/boost_1_67_0)
set(Boost_INCLUDE_DIR E:/local/boost_1_67_0)
set(Boost_LIBRARY_DIRS E:/local/boost_1_67_0/lib64-msvc-14.0 )
set(Boost_LIBRARIES
        boost_filesystem-vc140-mt-x64-1_67.lib
        boost_filesystem-vc140-mt-gd-x64-1_67.lib

        libboost_zlib-vc140-mt-gd-x64-1_67.lib
        libboost_zlib-vc140-mt-x64-1_67.lib

        boost_system-vc140-mt-gd-x64-1_67.lib
        boost_system-vc140-mt-x64-1_67.lib

        libboost_chrono-vc140-mt-s-x64-1_67.lib
        libboost_chrono-vc140-mt-gd-x64-1_67.lib

        boost_thread-vc140-mt-gd-x64-1_67.lib
        boost_thread-vc140-mt-x64-1_67.lib
        )
###################################################

opencv-server

file (GLOB SOURCE_FILES *.cpp)
file (GLOB HEADER_FILES *.hpp)

set(OPENCV_INCLUDE_DIR F:/Smart_Classroom/3rdparty/ALLPLATHFORM/opencv/include)
message(${OPENCV_INCLUDE_DIR})
set(OPENCV_LIB_DIR F:/Smart_Classroom/3rdparty/ALLPLATHFORM/opencv/x64/vc14/lib)
message(${OPENCV_LIB_DIR})
include_directories(${OPENCV_INCLUDE_DIR})
link_directories(${OPENCV_LIB_DIR})
init_target (opencv_websocket_server)

build_executable (${TARGET_NAME} ${SOURCE_FILES} ${HEADER_FILES})
file(COPY haarcascade_frontalface_alt.xml DESTINATION ${CMAKE_BINARY_DIR}/bin/)
#
link_boost ()
final_target ()
target_link_libraries(opencv_websocket_server
        opencv_world341.lib
        opencv_world341d.lib
        )
#
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "examples")

代碼不免在打字的時候打錯,有什麼問題聯繫筆者。整個服務端的實現難點無非在於編碼與解碼的方法保持客戶端和服務端數據耦合性,這個東西也琢磨了我好幾天才琢磨透,再接再礪把,io真的是一個神奇的東西,當你把它深入的理解到內存的時候,它就像個聽話的孩子。

相關文章
相關標籤/搜索