虹軟SDK在nodejs中的集成

==虹軟官網地址==node

http://www.arcsoft.com.cnlinux

在官網註冊帳號,而且申請人臉識別激活碼, 選擇SDK版本和運行系統(windows/linux/android/ios) ,咱們選擇windows作測試,申請類型選擇1:N ,功能模塊包括人臉檢測、人臉跟蹤、人臉識別。申請以後會獲取APP_ID 和SDK_Key,在代碼中會用到。android

==虹軟SDK人臉檢測目的==ios

主要是與face++人臉檢測作對比,看可否在face++人臉檢測以前選擇虹軟事先檢測一下。c++

==c++部分功能實現==windows


選擇 Qtcreator 4.2.1 ,新建c++ 庫。
設置Qt .pro文件函數

#不加載Qt庫
QT       -= core gui
#生成庫名字
TARGET = detect_lib
#定義生成lib
TEMPLATE = lib

DEFINES += DETECT_LIB_LIBRARY
SOURCES += detect_lib.cpp
#加載虹軟sdk頭文件
HEADERS += detect_lib.h \
    inc/amcomdef.h \
    inc/ammem.h \
    inc/arcsoft_fsdk_face_detection.h \
    inc/asvloffscreen.h \
    inc/merror.h

unix {
    target.path = /usr/lib
    INSTALLS += target
}

unix|win32: LIBS += -L$$PWD/lib/ -llibarcsoft_fsdk_face_detection

INCLUDEPATH += $$PWD/.
DEPENDPATH += $$PWD/.

上面是.pro文件,主要是一些配置信息,如生成庫名字 加載虹軟SDK 和頭文件...測試


下面是detect_lib.h文件 主要供nodejs調用的接口文件。ui

#ifndef DETECT_LIB_H
#define DETECT_LIB_H

#   ifdef __cplusplus
#   define EXTERN_NAME extern "C"
#   else
#   define EXTERN_NAME extern
#   endif

#if defined(WIN32)
#   define Q_DECL_EXPORT __declspec(dllexport)
#   define Q_DECL_IMPORT __declspec(dllexport)
#if defined(DETECT_LIB_LIBRARY)
#   define DETECT_LIBSHARED_EXPORT EXTERN_NAME Q_DECL_EXPORT
#   else
#   define DETECT_LIBSHARED_EXPORT EXTERN_NAME Q_DECL_IMPORT
#endif
#else
#   define DETECT_LIBSHARED_EXPORT EXTERN_NAME
#endif

DETECT_LIBSHARED_EXPORT int add(int a,int b);

DETECT_LIBSHARED_EXPORT int detect(unsigned char * data,int width,int height);

#endif // DETECT_LIB_H

接口add 函數 主要作測試用spa

int detect(unsigned char * data,int width,int height);

檢測人臉函數, data:rgb像素值,width:圖片寬度,height:圖片高度


detect_lib.cpp

#include "detect_lib.h"
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <Windows.h>
#include "inc/arcsoft_fsdk_face_detection.h"
#include "inc/merror.h"

//申請引擎使用的空間
#define WORKBUF_SIZE        (40*1024*1024)

char* APPID = const_cast<char*>("4KjUsRayGjxVv2UrajQBcgB2RKB3JFxGu5HZCrv7T6no");
char* SDKKey = const_cast<char*>("BdFg8U29aDvUKdScmJcCZpep2xqYmrx4NouJ7iGm5BuX");

void changeFormat(unsigned char * data,unsigned char ** destData,int width,int height)
{
    *destData = new unsigned char[width * height * 3];
    int num = 0;
    for(int i = 0 ;i < width * height ;i ++)
    {
        int index = 4 * i;

            (*destData) [num ++]  = data[index +2];
            (*destData) [num ++]  = data[index +1];
            (*destData) [num ++]  = data[index +0];

    }
}

void charToUint (char* data,uint8_t ** imageData,int height,int width){

    int lineByte = (width * 24 / 8 + 3) / 4 * 4;
    *imageData = (uint8_t *)malloc(lineByte  * (height));
    char * tImageData = (char *)malloc(lineByte  * (height));
    int num = 0;
    for(int i = 0 ;i < width * height*4 ;i++){
        if((i+1) % 4 != 0){
            tImageData[num] = data[i];
            num ++;
        }
    }
    for (int i = 0; i < height; i++)
    {
        for (int j = 0; j < width; j++)
        {
            memcpy((*imageData) + i * width * 3 + j * 3, (uint8_t *)tImageData + ((height - 1) - i) * lineByte + j * 3, 3);
        }
    }
    free(tImageData);
}

int add(int a,int b){
    return a + b;
}



int detect(unsigned char * data,int width,int height)
{
    /* 初始化引擎和變量 */
    MRESULT nRet = MERR_UNKNOWN;
    MHandle hEngine = nullptr;
    MInt32 nScale = 32;
    MInt32 nMaxFace = 10;
    MByte *pWorkMem = (MByte *)malloc(WORKBUF_SIZE);
    if (pWorkMem == nullptr)
    {
        return -1;
    }
    nRet = AFD_FSDK_InitialFaceEngine(APPID, SDKKey, pWorkMem, WORKBUF_SIZE, &hEngine, AFD_FSDK_OPF_0_HIGHER_EXT, nScale, nMaxFace);
    if (nRet != MOK)
    {
        return -1;
    }
    /* 打印版本信息 */
    const AFD_FSDK_Version * pVersionInfo = nullptr;
    pVersionInfo = AFD_FSDK_GetVersion(hEngine);
    fprintf(stdout, "%d %d %d %d\n", pVersionInfo->lCodebase, pVersionInfo->lMajor, pVersionInfo->lMinor, pVersionInfo->lBuild);
    fprintf(stdout, "%s\n", pVersionInfo->Version);
    fprintf(stdout, "%s\n", pVersionInfo->BuildDate);
    fprintf(stdout, "%s\n", pVersionInfo->CopyRight);

    /* 讀取靜態圖片信息,並保存到ASVLOFFSCREEN結構體 (以ASVL_PAF_RGB24_B8G8R8格式爲例) */
    ASVLOFFSCREEN offInput = { 0 };
    //offInput.u32PixelArrayFormat = ASVL_PAF_RGB32_R8G8B8;
    offInput.u32PixelArrayFormat = ASVL_PAF_RGB24_B8G8R8;

//    offInput.ppu8Plane[0] = nullptr;
    offInput.i32Width = width;
    offInput.i32Height = height;
//    charToUint(data,(uint8_t**)&offInput.ppu8Plane[0],height,width);
    //readBmp24(INPUT_IMAGE_PATH, (uint8_t**)&offInput.ppu8Plane[0], &offInput.i32Width, &offInput.i32Height);
    unsigned char *dstdata = nullptr;
    changeFormat(data,&dstdata,width,height);
    offInput.ppu8Plane[0] = (MUInt8*) dstdata;
    if (!offInput.ppu8Plane[0])
    {
        //fprintf(stderr, "Fail to ReadBmp(%s)\n", INPUT_IMAGE_PATH);
        AFD_FSDK_UninitialFaceEngine(hEngine);
        free(pWorkMem);
        return -1;
    }
    else
    {
        fprintf(stdout, "Picture width : %d , height : %d \n", offInput.i32Width, offInput.i32Height);
    }
    offInput.pi32Pitch[0] = offInput.i32Width * 3;

    /* 人臉檢測 */
    LPAFD_FSDK_FACERES    FaceRes = nullptr;
    nRet = AFD_FSDK_StillImageFaceDetection(hEngine, &offInput, &FaceRes);
    int nface = 0;
    if (nRet != MOK)
    {
        fprintf(stderr, "Face Detection failed, error code: %d\n", nRet);
    }
    else
    {
        nface = FaceRes->nFace;
        fprintf(stdout, "The number of face: %d\n", FaceRes->nFace);
        for (int i = 0; i < FaceRes->nFace; ++i)
        {
            fprintf(stdout, "Face[%d]: rect[%d,%d,%d,%d], Face orient: %d\n", i, FaceRes->rcFace[i].left, FaceRes->rcFace[i].top, FaceRes->rcFace[i].right, FaceRes->rcFace[i].bottom, FaceRes->lfaceOrient[i]);
        }
    }

    /* 釋放引擎和內存 */
    nRet = AFD_FSDK_UninitialFaceEngine(hEngine);
    if (nRet != MOK)
    {
        fprintf(stderr, "UninitialFaceEngine failed , errorcode is %d \n", nRet);
    }
    //free(offInput.ppu8Plane[0]);
    delete offInput.ppu8Plane[0];
    free(pWorkMem);
    return nface;
}

2.nodejs集成

nodejs 使用node-gyp模塊集成c++模塊
1.新建binding.gyp文件

{
"targets": [
{
"target_name": "detect",
"sources": [ "detect_lib.h","detect.cpp" ],
'library_dirs': ['./'],
"include_dirs" : ["<!(node -e \"require('nan')\")","./"],
'libraries': ['-ldetect_lib',]
}
]
}

 

這是gyp配置文件,

target_name表示生成.node文件的名字,

source是在家c++的文件(.h,.cpp,.c)

library_dirs:庫目錄設置

include_dirs:頭文件目錄,除了頭文件放置目錄, 還包括會使用到nan模塊

libraries:庫名字

新建ArcDetect.cpp node的v8引擎接口文件

 

#include <nan.h>
#include "detect_lib.h"
using namespace Nan ;
using namespace v8;


class DetectWorker : public AsyncWorker {
public:
DetectWorker(Callback *callback, unsigned char* buffer,int width,int height)
: AsyncWorker(callback), p_buffer(buffer), m_width(width),m_height(height) {m_num = 0;}
~DetectWorker() {}

//這個函數運行在工做線程,而不是v8線程,因此不能訪問v8的數據
void Execute () {

//m_num = add(12,3);
m_num = detect(p_buffer,m_width,m_height);
// m_num = 5;

}

//這個是libuv的回調函數,在這裏能夠使用v8的數據
void HandleOKCallback () {

Local<Object> bmpData = NewBuffer(m_num).ToLocalChecked();
Local<Value> argv[] = {
Nan::Null()
,Uint32::New(v8::Isolate::GetCurrent(),m_num)
};


callback->Call(2, argv);
};

private:
unsigned char * p_buffer;
int m_width;
int m_height;
int m_num;
};


NAN_METHOD(detect){
unsigned char * buffer = (unsigned char*) node::Buffer::Data(info[0]->ToObject());
int width = info[1]->Uint32Value();
int height = info[2]->Uint32Value();

Callback *callback = new Callback(info[3].As<Function>());
AsyncQueueWorker(new DetectWorker(callback, buffer,width ,height));
}

NAN_MODULE_INIT(Init)
{
Nan::Set(target,New<String>("detect").ToLocalChecked(),
GetFunction(New<FunctionTemplate>(detect)).ToLocalChecked());
}

NODE_MODULE(detect, Init)

 

 

NAN_METHOD(detect) 表示定義接口detect ,js能夠直接調用,
這裏主要是node中的buffer直接以字節的方式傳遞給c++。也是nodejs與c++交互的重要方式。

將編譯好的dll 和虹軟sdk dll 和detect_lib.h拷貝到當前目錄,而後經過node-gyp configure 和node-gyp build 生成.node

至此.node庫編譯完成,能夠使用require直接飲用該.node 如:var detect = require('./build/Release/detect.node');

相關文章
相關標籤/搜索