Qualcomm Camera HAL 2.0

咱們知道在HAL的Vendor實現當中會動態去load一個名字爲camera.$platform$.so的檔案,而後去加載Android HAL當中定義的方法,這裏以Camera HAL 2.0而且Qualcomm msm8960爲例子看下,結合以前的一篇文章(http://guoh.org/lifelog/2013/07/glance-at-camera-hal-2-0/)。 android

(注:這篇文章已經草稿比較久了,可是一直沒有發出來,由於手裏的這版代碼沒有設備能夠跑,另外也沒法肯定代碼是否徹底正確,至少發現了一些地方都是stub實現,文中可能存在一些錯誤,如發現不正確的地方歡迎指出,我也會盡可能發現錯誤並修正!) api

咱們知道在camera2.h當中定義了不少方法,那麼在msm8960 HAL就是在以下地方
/path/to/qcam-hal/QCamera/HAL2
這編譯出來就是一個camera.$platform$.so,請看它的實現
首先是HAL2/wrapper/QualcommCamera.h|cpp 緩存

/**
 * The functions need to be provided by the camera HAL.
 *
 * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
 * and openCameraHardware() is 0 to N-1.
 */
 
static hw_module_methods_t camera_module_methods = {
    open: camera_device_open,
};
 
static hw_module_t camera_common  = {
    tag: HARDWARE_MODULE_TAG,
    module_api_version: CAMERA_MODULE_API_VERSION_2_0, // 這樣Camera Service纔會去初始化Camera2Client一系列
    hal_api_version: HARDWARE_HAL_API_VERSION,
    id: CAMERA_HARDWARE_MODULE_ID,
    name: "Qcamera",
    author:"Qcom",
    methods: &camera_module_methods,
    dso: NULL,
    reserved:  {0},
};
 
camera_module_t HAL_MODULE_INFO_SYM = { // 這個HMI,每一個HAL模塊都必須有的
    common: camera_common,
    get_number_of_cameras: get_number_of_cameras,
    get_camera_info: get_camera_info,
};
 
camera2_device_ops_t camera_ops = { // 注意這些綁定的函數
    set_request_queue_src_ops:           android::set_request_queue_src_ops,
    notify_request_queue_not_empty:      android::notify_request_queue_not_empty,
    set_frame_queue_dst_ops:             android::set_frame_queue_dst_ops,
    get_in_progress_count:               android::get_in_progress_count,
    flush_captures_in_progress:          android::flush_captures_in_progress,
    construct_default_request:           android::construct_default_request,
 
    allocate_stream:                     android::allocate_stream,
    register_stream_buffers:             android::register_stream_buffers,
    release_stream:                      android::release_stream,
 
    allocate_reprocess_stream:           android::allocate_reprocess_stream,
    allocate_reprocess_stream_from_stream: android::allocate_reprocess_stream_from_stream,
    release_reprocess_stream:            android::release_reprocess_stream,
 
    trigger_action:                      android::trigger_action,
    set_notify_callback:                 android::set_notify_callback,
    get_metadata_vendor_tag_ops:         android::get_metadata_vendor_tag_ops,
    dump:                                android::dump,
};
 
typedef struct { // 注意這個是Qualcomm本身定義的一個wrap結構
  camera2_device_t hw_dev; // 這裏是標準的
  QCameraHardwareInterface *hardware;
  int camera_released;
  int cameraId;
} camera_hardware_t;
 
/* HAL should return NULL if it fails to open camera hardware. */
extern "C" int  camera_device_open(
  const struct hw_module_t* module, const char* id,
          struct hw_device_t** hw_device)
{
    int rc = -1;
    int mode = 0;
    camera2_device_t *device = NULL;
    if (module && id && hw_device) {
        int cameraId = atoi(id);
 
        if (!strcmp(module->name, camera_common.name)) {
            camera_hardware_t *camHal =
                (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
            if (!camHal) {
                *hw_device = NULL;
                ALOGE("%s:  end in no mem", __func__);
                return rc;
            }
            /* we have the camera_hardware obj malloced */
            memset(camHal, 0, sizeof (camera_hardware_t));
            camHal->hardware = new QCameraHardwareInterface(cameraId, mode);
            if (camHal->hardware && camHal->hardware->isCameraReady()) {
                camHal->cameraId = cameraId;
                device = &camHal->hw_dev; // 這裏camera2_device_t
                device->common.close = close_camera_device; // 初始化camera2_device_t
                device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
                device->ops = &camera_ops;
                device->priv = (void *)camHal;
                rc =  0;
            } else {
                if (camHal->hardware) {
                    delete camHal->hardware;
                    camHal->hardware = NULL;
                }
                free(camHal);
                device = NULL;
            }
        }
    }
    /* pass actual hw_device ptr to framework. This amkes that we actally be use memberof() macro */
    *hw_device = (hw_device_t*)&device->common; // 這就是kernel或者Android native framework經常使用的一招
    return rc;
}


看看allocate stream 數據結構

int allocate_stream(const struct camera2_device *device,
        uint32_t width,
        uint32_t height,
        int      format,
        const camera2_stream_ops_t *stream_ops,
        uint32_t *stream_id,
        uint32_t *format_actual,
        uint32_t *usage,
        uint32_t *max_buffers)
{
    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
    hardware->allocate_stream(width, height, format, stream_ops,
            stream_id, format_actual, usage, max_buffers);
    return rc;
}



這裏注意QCameraHardwareInterface在QCameraHWI.h|cpp當中
int QCameraHardwareInterface::allocate_stream(
    uint32_t width,
    uint32_t height, int format,
    const camera2_stream_ops_t *stream_ops,
    uint32_t *stream_id,
    uint32_t *format_actual,
    uint32_t *usage,
    uint32_t *max_buffers)
{
    int ret = OK;
    QCameraStream *stream = NULL;
    camera_mode_t myMode = (camera_mode_t)(CAMERA_MODE_2D|CAMERA_NONZSL_MODE);
 
    stream = QCameraStream_preview::createInstance(
                        mCameraHandle->camera_handle,
                        mChannelId,
                        width,
                        height,
                        format,
                        mCameraHandle,
                        myMode);
 
    stream->setPreviewWindow(stream_ops); // 這裏,也就是隻要經過該方法建立的stream,都會有對應的ANativeWindow進來
    *stream_id = stream->getStreamId();
    *max_buffers= stream->getMaxBuffers(); // 從HAL獲得的
    *usage = GRALLOC_USAGE_HW_CAMERA_WRITE | CAMERA_GRALLOC_HEAP_ID
        | CAMERA_GRALLOC_FALLBACK_HEAP_ID;
    /* Set to an arbitrary format SUPPORTED by gralloc */
    *format_actual = HAL_PIXEL_FORMAT_YCrCb_420_SP;
 
    return ret;
}



QCameraStream_preview::createInstance直接調用本身的構造方法,也就是下面
(相關class在QCameraStream.h|cpp和QCameraStream_Preview.cpp)
QCameraStream_preview::QCameraStream_preview(uint32_t CameraHandle,
                        uint32_t ChannelId,
                        uint32_t Width,
                        uint32_t Height,
                        int requestedFormat,
                        mm_camera_vtbl_t *mm_ops,
                        camera_mode_t mode) :
                 QCameraStream(CameraHandle,
                        ChannelId,
                        Width,
                        Height,
                        mm_ops,
                        mode),
                 mLastQueuedFrame(NULL),
                 mDisplayBuf(NULL),
                 mNumFDRcvd(0)
{
    mStreamId = allocateStreamId(); // 分配stream id(根據mStreamTable)
 
    switch (requestedFormat) { // max buffer number
    case CAMERA2_HAL_PIXEL_FORMAT_OPAQUE:
        mMaxBuffers = 5;
        break;
    case HAL_PIXEL_FORMAT_BLOB:
        mMaxBuffers = 1;
        break;
    default:
        ALOGE("Unsupported requested format %d", requestedFormat);
        mMaxBuffers = 1;
        break;
    }
    /*TODO: There has to be a better way to do this*/
}



再看看
/path/to/qcam-hal/QCamera/stack/mm-camera-interface/
mm_camera_interface.h
當中
typedef struct {
    uint32_t camera_handle;        /* camera object handle */
    mm_camera_info_t *camera_info; /* reference pointer of camear info */
    mm_camera_ops_t *ops;          /* API call table */
} mm_camera_vtbl_t;



mm_camera_interface.c
當中
/* camera ops v-table */
static mm_camera_ops_t mm_camera_ops = {
    .sync = mm_camera_intf_sync,
    .is_event_supported = mm_camera_intf_is_event_supported,
    .register_event_notify = mm_camera_intf_register_event_notify,
    .qbuf = mm_camera_intf_qbuf,
    .camera_close = mm_camera_intf_close,
    .query_2nd_sensor_info = mm_camera_intf_query_2nd_sensor_info,
    .is_parm_supported = mm_camera_intf_is_parm_supported,
    .set_parm = mm_camera_intf_set_parm,
    .get_parm = mm_camera_intf_get_parm,
    .ch_acquire = mm_camera_intf_add_channel,
    .ch_release = mm_camera_intf_del_channel,
    .add_stream = mm_camera_intf_add_stream,
    .del_stream = mm_camera_intf_del_stream,
    .config_stream = mm_camera_intf_config_stream,
    .init_stream_bundle = mm_camera_intf_bundle_streams,
    .destroy_stream_bundle = mm_camera_intf_destroy_bundle,
    .start_streams = mm_camera_intf_start_streams,
    .stop_streams = mm_camera_intf_stop_streams,
    .async_teardown_streams = mm_camera_intf_async_teardown_streams,
    .request_super_buf = mm_camera_intf_request_super_buf,
    .cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
    .start_focus = mm_camera_intf_start_focus,
    .abort_focus = mm_camera_intf_abort_focus,
    .prepare_snapshot = mm_camera_intf_prepare_snapshot,
    .set_stream_parm = mm_camera_intf_set_stream_parm,
    .get_stream_parm = mm_camera_intf_get_stream_parm
};



以start stream爲例子 app

mm_camera_intf_start_streams(mm_camera_interface
    mm_camera_start_streams(mm_camera
        mm_channel_fsm_fn(mm_camera_channel
            mm_channel_fsm_fn_active(mm_camera_channel
                mm_channel_start_streams(mm_camera_channel
                    mm_stream_fsm_fn(mm_camera_stream
                        mm_stream_fsm_reg(mm_camera_stream
                            mm_camera_cmd_thread_launch(mm_camera_data
                            mm_stream_streamon(mm_camera_stream



注意:本文當中,如上這種梯度擺放,表示是調用關係,若是梯度是同樣的,就表示這些方法是在上層同一個方法裏面被調用的

int32_t mm_stream_streamon(mm_stream_t *my_obj)
{
    int32_t rc;
    enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
 
    /* Add fd to data poll thread */
    rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
                                           my_obj->my_hdl,
                                           my_obj->fd,
                                           mm_stream_data_notify,
                                           (void*)my_obj);
    if (rc < 0) {
        return rc;
    }
    rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
    if (rc < 0) {
        CDBG_ERROR("%s: ioctl VIDIOC_STREAMON failed: rc=%d\n",
                   __func__, rc);
        /* remove fd from data poll thread in case of failure */
        mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0], my_obj->my_hdl);
    }
    return rc;
}



看到ioctl,VIDIOC_STREAMON,能夠高興一下了,這就是V4L2規範當中用戶空間和內核空間通訊的方法,V4L2(Video for Linux Two)是一種經典並且成熟的視頻通訊協議,以前是V4L,不清楚的能夠去下載它的規範,另外The Video4Linux2(http://lwn.net/Articles/203924/)也是很好的資料。
這裏簡單介紹下:

open(VIDEO_DEVICE_NAME, ...) // 開啓視頻設備,通常在程序初始化的時候調用 async

ioctl(...) // 主要是一些須要傳輸數據量很小的控制操做
這裏能夠用的參數不少,而且一般來講咱們會按照如下方式來使用,好比
VIDIOC_QUERYCAP // 查詢設備能幹什麼
VIDIOC_CROPCAP // 查詢設備crop能力
VIDIOC_S_* // set/get方法,設置/獲取參數
VIDIOC_G_*
VIDIOC_REQBUFS // 分配buffer,能夠有多種方式
VIDIOC_QUERYBUF // 查詢分配的buffer的信息
VIDIOC_QBUF // QUEUE BUFFER 把buffer壓入DRV緩存隊列(這時候buffer是空的)
VIDIOC_STREAMON // 開始視頻數據傳輸
VIDIOC_DQBUF // DEQUEUE BUFFER 把buffer從DRV緩存隊列中取出(這時候buffer是有數據的) ide

[0...n]
QBUF -> DQBUF // 能夠一直重複這個動做 函數

VIDIOC_STREAMOFF // 中止視頻數據傳輸 oop

close(VIDEO_DEVICE_FD) // 關閉設備
上面就是主要的函數和簡單的調用順序,另外還有幾個函數 post

select() // 等待事件發生,主要用在咱們把存frame的buffer推給DRV之後,等待它的反應
mmap/munmap // 主要處理咱們request的buffer的,buffer分配在設備的內存空間的時候須要

而且看看mm_camera_stream這個文件裏面也都是這麼實現的。

看完這裏,咱們回過頭來繼續看QCam HAL,固然它實現的細節也不是我上面start stream所列的那麼簡單,可是其實也不算複雜,以爲重要的就是狀態和用到的結構。

首先是channel狀態,目前只支持1個channel,可是能夠有多個streams(後面會介紹,並且目前最多支持8個streams)

/* mm_channel */
typedef enum {
    MM_CHANNEL_STATE_NOTUSED = 0,   /* not used */
    MM_CHANNEL_STATE_STOPPED,       /* stopped */
    MM_CHANNEL_STATE_ACTIVE,        /* active, at least one stream active */
    MM_CHANNEL_STATE_PAUSED,        /* paused */
    MM_CHANNEL_STATE_MAX
} mm_channel_state_type_t;

它能夠執行的事件

typedef enum {
    MM_CHANNEL_EVT_ADD_STREAM,
    MM_CHANNEL_EVT_DEL_STREAM,
    MM_CHANNEL_EVT_START_STREAM,
    MM_CHANNEL_EVT_STOP_STREAM,
    MM_CHANNEL_EVT_TEARDOWN_STREAM,
    MM_CHANNEL_EVT_CONFIG_STREAM,
    MM_CHANNEL_EVT_PAUSE,
    MM_CHANNEL_EVT_RESUME,
    MM_CHANNEL_EVT_INIT_BUNDLE,
    MM_CHANNEL_EVT_DESTROY_BUNDLE,
    MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
    MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
    MM_CHANNEL_EVT_START_FOCUS,
    MM_CHANNEL_EVT_ABORT_FOCUS,
    MM_CHANNEL_EVT_PREPARE_SNAPSHOT,
    MM_CHANNEL_EVT_SET_STREAM_PARM,
    MM_CHANNEL_EVT_GET_STREAM_PARM,
    MM_CHANNEL_EVT_DELETE,
    MM_CHANNEL_EVT_MAX
} mm_channel_evt_type_t;


/* mm_stream */
typedef enum { // 這裏的狀態要仔細,每執行一次方法,狀態就須要變化
    MM_STREAM_STATE_NOTUSED = 0,      /* not used */
    MM_STREAM_STATE_INITED,           /* inited  */
    MM_STREAM_STATE_ACQUIRED,         /* acquired, fd opened  */
    MM_STREAM_STATE_CFG,              /* fmt & dim configured */
    MM_STREAM_STATE_BUFFED,           /* buf allocated */
    MM_STREAM_STATE_REG,              /* buf regged, stream off */
    MM_STREAM_STATE_ACTIVE_STREAM_ON, /* active with stream on */
    MM_STREAM_STATE_ACTIVE_STREAM_OFF, /* active with stream off */
    MM_STREAM_STATE_MAX
} mm_stream_state_type_t;

一樣,stream能夠執行的事件

typedef enum {
    MM_STREAM_EVT_ACQUIRE,
    MM_STREAM_EVT_RELEASE,
    MM_STREAM_EVT_SET_FMT,
    MM_STREAM_EVT_GET_BUF,
    MM_STREAM_EVT_PUT_BUF,
    MM_STREAM_EVT_REG_BUF,
    MM_STREAM_EVT_UNREG_BUF,
    MM_STREAM_EVT_START,
    MM_STREAM_EVT_STOP,
    MM_STREAM_EVT_QBUF,
    MM_STREAM_EVT_SET_PARM,
    MM_STREAM_EVT_GET_PARM,
    MM_STREAM_EVT_MAX
} mm_stream_evt_type_t;

這裏每次執行函數的時候都須要檢查channel/stream的狀態,只有狀態正確的時候纔會去執行

好比你能夠觀察到
mm_channel的mm_channel_state_type_t state;
mm_stream的mm_stream_state_type_t state;
均表示這個結構當前的狀態

另外
struct mm_camera_obj
struct mm_channel
struct mm_stream
這三個也是自上而下包含的,而且stream和channel還會持有父結構(暫且這麼稱呼,實際爲container關係)的引用。

實際上Vendor的HAL每一個都有本身實現的方法,也可能包含不少特有的東西,好比這裏它會餵給ioctl一些特有的命令或者數據結構,這些咱們就只有在作特定平臺的時候去考慮了。這些均可能變幻無窮,好比OMAP4它同DRV溝通是透過rpmsg,並用OpenMAX的一套規範來實現的。

理論就這麼多,接着看一個實例,好比咱們在Camera Service要去start preview:

Camera2Client::startPreviewL
    StreamingProcessor->updatePreviewStream
        Camera2Device->createStream
            StreamAdapter->connectToDevice
                camera2_device_t->ops->allocate_stream // 上面有分析
                native_window_api_*或者native_window_*
 
    StreamingProcessor->startStream
        Camera2Device->setStreamingRequest
            Camera2Device::RequestQueue->setStreamSlot // 建立一個stream slot
                Camera2Device::RequestQueue->signalConsumerLocked

status_t Camera2Device::MetadataQueue::signalConsumerLocked() {
    status_t res = OK;
    notEmpty.signal();
    if (mSignalConsumer && mDevice != NULL) {
        mSignalConsumer = false;
        mMutex.unlock();
        res = mDevice->ops->notify_request_queue_not_empty(mDevice); // 通知Vendor HAL的run command thread去運行,
                                                                     // notify_request_queue_not_empty這個事件不是每次都會觸發的,只有初始化時候
                                                                     // 或者run command thread在dequeue的時候發現數據爲NULL,
                                                                     // 而Camera Service之變又有新的request進來的時候纔會去觸發
                                                                     // 能夠說是減輕負擔吧,不用沒有請求的時候,thread也一直在那裏
                                                                     // 不過一般碰到這樣的狀況都是利用鎖讓thread停在那裏
        mMutex.lock();
    }
    return res;
}
然而在Qualcomm HAL當中

int notify_request_queue_not_empty(const struct camera2_device *device) // 這個方法註冊到camera2_device_ops_t當中
    QCameraHardwareInterface->notify_request_queue_not_empty()
        pthread_create(&mCommandThread, &attr, command_thread, (void *)this) != 0)
void *command_thread(void *obj)
{
    ...
    pme->runCommandThread(obj);
}
void QCameraHardwareInterface::runCommandThread(void *data)
{
    /**
     * This function implements the main service routine for the incoming
     * frame requests, this thread routine is started everytime we get a
     * notify_request_queue_not_empty trigger, this thread makes the
     * assumption that once it receives a NULL on a dequest_request call
     * there will be a fresh notify_request_queue_not_empty call that is
     * invoked thereby launching a new instance of this thread. Therefore,
     * once we get a NULL on a dequeue request we simply let this thread die
     */
    int res;
    camera_metadata_t *request=NULL;
    mPendingRequests=0;
 
    while (mRequestQueueSrc) { // mRequestQueueSrc是經過set_request_queue_src_ops設置進來的
                               // 參見Camera2Device::MetadataQueue::setConsumerDevice
                               // 在Camera2Device::initialize當中被調用
        ALOGV("%s:Dequeue request using mRequestQueueSrc:%p",__func__,mRequestQueueSrc);
        mRequestQueueSrc->dequeue_request(mRequestQueueSrc, &request); // 取framework request
        if (request==NULL) {
            ALOGE("%s:No more requests available from src command \
                    thread dying",__func__);
            return;
        }
        mPendingRequests++;
 
        /* Set the metadata values */
 
        /* Wait for the SOF for the new metadata values to be applied */
 
        /* Check the streams that need to be active in the stream request */
        sort_camera_metadata(request);
 
        camera_metadata_entry_t streams;
        res = find_camera_metadata_entry(request,
                ANDROID_REQUEST_OUTPUT_STREAMS,
                &streams);
        if (res != NO_ERROR) {
            ALOGE("%s: error reading output stream tag", __FUNCTION__);
            return;
        }
 
        res = tryRestartStreams(streams); // 會去prepareStream和streamOn,後面有詳細代碼
        if (res != NO_ERROR) {
            ALOGE("error tryRestartStreams %d", res);
            return;
        }
 
        /* 3rd pass: Turn on all streams requested */
        for (uint32_t i = 0; i < streams.count; i++) {
            int streamId = streams.data.u8[i];
            QCameraStream *stream = QCameraStream::getStreamAtId(streamId);
 
            /* Increment the frame pending count in each stream class */
 
            /* Assuming we will have the stream obj in had at this point may be
             * may be multiple objs in which case we loop through array of streams */
            stream->onNewRequest();
        }
        ALOGV("%s:Freeing request using mRequestQueueSrc:%p",__func__,mRequestQueueSrc);
        /* Free the request buffer */
        mRequestQueueSrc->free_request(mRequestQueueSrc,request);
        mPendingRequests--;
        ALOGV("%s:Completed request",__func__);
    }
 
    QCameraStream::streamOffAll();
}
下面這個方法解釋mRequestQueueSrc來自何處

// Connect to camera2 HAL as consumer (input requests/reprocessing)
status_t Camera2Device::MetadataQueue::setConsumerDevice(camera2_device_t *d) {
    ATRACE_CALL();
    status_t res;
    res = d->ops->set_request_queue_src_ops(d,
            this);
    if (res != OK) return res;
    mDevice = d;
    return OK;
}

由於

QCameraStream_preview->prepareStream
    QCameraStream->initStream
        mm_camera_vtbl_t->ops->add_stream(... stream_cb_routine ...) // 這是用來返回數據的callback,帶mm_camera_super_buf_t*和void*兩參數
            mm_camera_add_stream
                mm_channel_fsm_fn(..., MM_CHANNEL_EVT_ADD_STREAM, ..., mm_evt_paylod_add_stream_t)
                    mm_channel_fsm_fn_stopped
                        mm_channel_add_stream(..., mm_camera_buf_notify_t, ...)
                            mm_stream_fsm_inited

在mm_channel_add_stream當中有把mm_camera_buf_notify_t包裝到mm_stream_t

mm_stream_t *stream_obj = NULL;
/* initialize stream object */
memset(stream_obj, 0, sizeof(mm_stream_t));
/* cd through intf always palced at idx 0 of buf_cb */
stream_obj->buf_cb[0].cb = buf_cb; // callback
stream_obj->buf_cb[0].user_data = user_data;
stream_obj->buf_cb[0].cb_count = -1; /* infinite by default */ // 默認無限次數

而且mm_stream_fsm_inited,傳進來的event參數也是MM_STREAM_EVT_ACQUIRE

int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
                             mm_stream_evt_type_t evt,
                             void * in_val,
                             void * out_val)
{
    int32_t rc = 0;
    char dev_name[MM_CAMERA_DEV_NAME_LEN];
 
    switch (evt) {
    case MM_STREAM_EVT_ACQUIRE:
        if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
            CDBG_ERROR("%s: NULL channel or camera obj\n", __func__);
            rc = -1;
            break;
        }
 
        snprintf(dev_name, sizeof(dev_name), "/dev/%s",
                 mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl));
 
        my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK); // 打開視頻設備
        if (my_obj->fd <= 0) {
            CDBG_ERROR("%s: open dev returned %d\n", __func__, my_obj->fd);
            rc = -1;
            break;
        }
        rc = mm_stream_set_ext_mode(my_obj);
        if (0 == rc) {
            my_obj->state = MM_STREAM_STATE_ACQUIRED; // mm_stream_state_type_t
        } else {
            /* failed setting ext_mode
             * close fd */
            if(my_obj->fd > 0) {
                close(my_obj->fd);
                my_obj->fd = -1;
            }
            break;
        }
        rc = get_stream_inst_handle(my_obj);
        if(rc) {
            if(my_obj->fd > 0) {
                close(my_obj->fd);
                my_obj->fd = -1;
            }
        }
        break;
    default:
        CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d",
                   __func__,evt,my_obj->state);
        rc = -1;
        break;
    }
    return rc;
}

還有

QCameraStream->streamOn
    mm_camera_vtbl_t->ops->start_streams
        mm_camera_intf_start_streams
            mm_camera_start_streams
                mm_channel_fsm_fn(..., MM_CHANNEL_EVT_START_STREAM, ...)
                    mm_stream_fsm_fn(..., MM_STREAM_EVT_START, ...)
                        mm_camera_cmd_thread_launch // 啓動CB線程
                        mm_stream_streamon(mm_stream_t)
                            mm_camera_poll_thread_add_poll_fd(..., mm_stream_data_notify , ...)

static void mm_stream_data_notify(void* user_data)
{
    mm_stream_t *my_obj = (mm_stream_t*)user_data;
    int32_t idx = -1, i, rc;
    uint8_t has_cb = 0;
    mm_camera_buf_info_t buf_info;
 
    if (NULL == my_obj) {
        return;
    }
 
    if (MM_STREAM_STATE_ACTIVE_STREAM_ON != my_obj->state) {
        /* this Cb will only received in active_stream_on state
         * if not so, return here */
        CDBG_ERROR("%s: ERROR!! Wrong state (%d) to receive data notify!",
                   __func__, my_obj->state);
        return;
    }
 
    memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
 
    pthread_mutex_lock(&my_obj->buf_lock);
    rc = mm_stream_read_msm_frame(my_obj, &buf_info); // 經過ioctl(..., VIDIOC_DQBUF, ...)讀取frame數據
    if (rc != 0) {
        pthread_mutex_unlock(&my_obj->buf_lock);
        return;
    }
    idx = buf_info.buf->buf_idx;
 
    /* update buffer location */
    my_obj->buf_status[idx].in_kernel = 0;
 
    /* update buf ref count */
    if (my_obj->is_bundled) {
        /* need to add into super buf since bundled, add ref count */
        my_obj->buf_status[idx].buf_refcnt++;
    }
 
    for (i=0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
        if(NULL != my_obj->buf_cb[i].cb) {
            /* for every CB, add ref count */
            my_obj->buf_status[idx].buf_refcnt++;
            has_cb = 1;
        }
    }
    pthread_mutex_unlock(&my_obj->buf_lock);
 
    mm_stream_handle_rcvd_buf(my_obj, &buf_info); // mm_camera_queue_enq,往queue裏面丟frame數據(
                                                  // 前提是有註冊callback),並透過sem_post通知queue
                                                  // 而後mm_camera_cmd_thread_launch啓動的線程會
                                                  // 輪循讀取數據,而後執行CB
}

這樣就會致使在stream on的時候stream_cb_routine(實如今QCameraStream當中)就會一直執行

void stream_cb_routine(mm_camera_super_buf_t *bufs,
                       void *userdata)
{
    QCameraStream *p_obj=(QCameraStream*) userdata;
    switch (p_obj->mExtImgMode) { // 這個mode在prepareStream的時候就會肯定
    case MM_CAMERA_PREVIEW:
        ALOGE("%s : callback for MM_CAMERA_PREVIEW", __func__);
        ((QCameraStream_preview *)p_obj)->dataCallback(bufs); // CAMERA_PREVIEW和CAMERA_VIDEO是同樣的?
        break;
    case MM_CAMERA_VIDEO:
        ALOGE("%s : callback for MM_CAMERA_VIDEO", __func__);
        ((QCameraStream_preview *)p_obj)->dataCallback(bufs);
        break;
    case MM_CAMERA_SNAPSHOT_MAIN:
        ALOGE("%s : callback for MM_CAMERA_SNAPSHOT_MAIN", __func__);
        p_obj->p_mm_ops->ops->qbuf(p_obj->mCameraHandle,
                                   p_obj->mChannelId,
                                   bufs->bufs[0]);
        break;
    case MM_CAMERA_SNAPSHOT_THUMBNAIL:
        break;
    default:
        break;
    }
}

void QCameraStream::dataCallback(mm_camera_super_buf_t *bufs)
{
    if (mPendingCount != 0) { // 這個dataCallback是一直在都在回來麼?
                               // 並且從代碼來看設置下去的callback次數默認是-1,-1就表示infinite。
                               // 彷佛只能這樣才能解釋,不然沒人觸發的話,即便mPendingCount在onNewRequest當中加1了
                               // 這裏也感知不到
        ALOGD("Got frame request");
        pthread_mutex_lock(&mFrameDeliveredMutex);
        mPendingCount--;
        ALOGD("Completed frame request");
        pthread_cond_signal(&mFrameDeliveredCond);
        pthread_mutex_unlock(&mFrameDeliveredMutex);
        processPreviewFrame(bufs);
    } else {
        p_mm_ops->ops->qbuf(mCameraHandle,
                mChannelId, bufs->bufs[0]); // 若是沒有須要數據的狀況,直接把buffer壓入DRV的隊列當中,會call到V4L2的QBUF
    }
}

比較好奇的是在手裏這版QCam HAL的code當中camera2_frame_queue_dst_ops_t沒有被用到

int QCameraHardwareInterface::set_frame_queue_dst_ops(
    const camera2_frame_queue_dst_ops_t *frame_dst_ops)
{
    mFrameQueueDst = frame_dst_ops; // 這個如今彷佛沒有用到嘛
    return OK;
}

這樣Camera Service的FrameProcessor的Camera2Device->getNextFrame就永遠也獲取不到數據,不知道是否是我手裏的這版代碼的問題,並且在最新的Qualcomm Camera HAL代碼也不在AOSP樹當中了,而是直接以proprietary形式給的so檔,這只是題外話。

因此整體來看,這裏可能有幾個QCameraStream,每一個stream負責本身的事情。
他們之間也有相互關係,好比有可能新的stream進來會致使其餘已經stream-on的stream從新啓動。

在Camera HAL 2.0當中咱們還有個重點就是re-process stream
簡單的說就是把output stream做爲input stream再次添加到BufferQueue中,讓其餘的consumer來處理,就相似一個chain同樣。
目前在ZslProcessor當中有用到。

ZslProcessor->updateStream
    Camera2Device->createStream
    Camera2Device->createReprocessStreamFromStream // release的時候是先delete re-process
        new ReprocessStreamAdapter
        ReprocessStreamAdapter->connectToDevice
            camera2_device_t->ops->allocate_reprocess_stream_from_stream

這裏ReprocessStreamAdapter實際就是camera2_stream_in_ops_t,負責管理re-process的stream。

可是這版的代碼Qualcomm也彷佛沒有去實現,因此暫時到此爲止,若是後面找到相應的代碼,再來看。

因此看完這麼多沒必要以爲驚訝,站在Camera Service的立場,它持有兩個MetadataQueue,mRequestQueue和mFrameQueue。
app請求的動做,好比set parameter/start preview/start recording會直接轉化爲request,放到mRequestQueue,而後去重啓preview/recording stream。
好比capture也會轉換爲request,放到mRequestQueue。
若是有必要,會經過notify_request_queue_not_empty去通知QCam HAL有請求須要處理,而後QCam HAL會啓動一個線程(QCameraHardwareInterface::runCommandThread)去作處理。直到全部request處理完畢退出線程。
在這個處理的過程中會分別調用到每一個stream的processPreviewFrame,有必要的話它每一個都會調用本身後續的callback。
還有一個實現的細節就是,stream_cb_routine是從start stream就有開始註冊在同一個channel上的,而stream_cb_routine間接調用QCameraStream::dataCallback(固然stream_cb_routine有去指定這個callback回來的緣由是什麼,就好調用對應的dataCallback),這個callback是一直都在回來,因此每次new request讓mPendingCount加1以後,dataCallback回來纔會調用processPreviewFrame,不然就直接把buffer再次壓回DRV隊列當中。

void QCameraStream::dataCallback(mm_camera_super_buf_t *bufs)
{
    if (mPendingCount != 0) { // 這個dataCallback是一直在都在回來麼?
                               // 並且從代碼來看設置下去的callback次數默認是-1,-1就表示infinite。
                               // 彷佛只能這樣才能解釋,不然沒人觸發的話,即便mPendingCount在onNewRequest當中加1了
                               // 這裏也感知不到
        ALOGD("Got frame request");
        pthread_mutex_lock(&mFrameDeliveredMutex);
        mPendingCount--;
        ALOGD("Completed frame request");
        pthread_cond_signal(&mFrameDeliveredCond);
        pthread_mutex_unlock(&mFrameDeliveredMutex);
        processPreviewFrame(bufs);
    } else {
        p_mm_ops->ops->qbuf(mCameraHandle,
                mChannelId, bufs->bufs[0]); // 若是沒有須要數據的狀況,直接把buffer壓入DRV的隊列當中,會call到V4L2的QBUF
    }
}


void QCameraStream::onNewRequest()
{
    ALOGI("%s:E",__func__);
    pthread_mutex_lock(&mFrameDeliveredMutex);
    ALOGI("Sending Frame request");
    mPendingCount++;
    pthread_cond_wait(&mFrameDeliveredCond, &mFrameDeliveredMutex); // 等帶一個請求處理完,再作下一個請求
    ALOGV("Got frame");
    pthread_mutex_unlock(&mFrameDeliveredMutex);
    ALOGV("%s:X",__func__);
}

processPreviewFrame會調用到建立這個stream的時候關聯進來的那個BufferQueue的enqueue_buffer方法,把數據塞到BufferQueue中,而後對應的consumer就會收到了。
好比在Android Camera HAL 2.0當中目前有
camera2/BurstCapture.h
camera2/CallbackProcessor.h
camera2/JpegProcessor.h
camera2/StreamingProcessor.h
camera2/ZslProcessor.h
實現了對應的Consumer::FrameAvailableListener,可是burst-capture如今能夠不考慮,由於都還只是stub實現。

ZslProcessor.h和CaptureSequencer.h都有去實現FrameProcessor::FilteredListener的onFrameAvailable(...)
可是咱們以前講過這版QCam HAL沒有實現,因此FrameProcessor是沒法獲取到meta data的。
因此這樣來看onFrameAbailable都不會獲得通知。(我相信是我手裏的這版代碼的問題啦)

以前咱們說過QCam HAL有部分東西沒有實現,因此mFrameQueue就不會有數據,可是它原本應該是DRV回來的元數據會queue到這裏面。

另外
CaptureSequencer.h還有去實現onCaptureAvailable,當JpegProcessor處理完了會通知它。

好奇?多個stream(s)不是同時返回的,這樣若是CPU處理快慢不一樣就會有時間差?還有很好奇DRV是如何處理Video snapshot的,若是buffer是順序的,就會存在Video少一個frame,若是不是順序的,那就是DRV一次返回多個buffer?之前真沒有想過這個問題@_@

相關文章
相關標籤/搜索