1、Camera 架構html
NOTE:這是 Android Camera API 1 ,Camera 的架構與 Android 總體架構是保持一致的: Framework : Camera.java Android Runtime : android_hardware_Camera.cpp Library : Client (Camera.cpp, ICameraClient.cpp, etc...) Server (CameraService.cpp, ICameraService.cpp, etc...) HAL : CameraHardwareInterface.h
架構簡圖:java
根據架構簡圖能夠看到,實際上 Camera 的架構與 Android 架構是一一對應的,上層應用調用 Camera 相關的方法後,指令依次經過框架層、運行時環境、本地庫、硬件抽象層,最終到達具體設備。設備執行動做後,得到的數據又會沿着反方向依次發送到最上層。
須要注意的是,在本地庫這一層中,涉及到一個 C/S 結構:android
相關源碼位置(Android 7.1 源碼):api
Application:(這部分不是學習的重點) packages/apps/Camera2/src/com/android/camera/*** Framework: /frameworks/base/core/java/android/hardware/Camera.java Android Runtime: frameworks/base/core/jni/android_hardware_Camera.cpp C/C++ Libraries: Client: frameworks/av/camera/CameraBase.cpp frameworks/av/camera/Camera.cpp frameworks/av/camera/ICamera.cpp frameworks/av/camera/aidl/android/hardware/ICamera.aidl frameworks/av/camera/aidl/android/hardware/ICameraClient.aidl Server: frameworks/av/camera/cameraserver/main_cameraserver.cpp frameworks/av/services/camera/libcameraservice/CameraService.cpp frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp frameworks/av/camera/aidl/android/hardware/ICameraService.aidl HAL: HAL 1:(此篇文章分析) frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h HAL 3:(參考:http://www.javashuo.com/article/p-sleuwkox-hu.html ) frameworks/av/services/camera/libcameraservice/device3/***
2、Camera Open 調用流程數組
frameworks/base/core/java/android/hardware/Camera.java
/*** * Creates a new Camera object to access * the first back-facing camera on the * device. If the device does not have a back-facing camera, * this returns null. * @see #open(int) */ public static Camera open() { int numberOfCameras = getNumberOfCameras(); //獲取 Camera 設備的個數。 CameraInfo cameraInfo = new CameraInfo(); for (int i = 0; i < numberOfCameras; i++) { getCameraInfo(i, cameraInfo); //依次獲取設備信息, if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) { //若是是獲取到後置攝像頭(默認),則調用 new Camera(int) 構造對應的攝像頭實例。 return new Camera(i); } } return null; }
Camera(int cameraId)
:架構
/** used by Camera#open, Camera#open(int) */ Camera(int cameraId) { int err = cameraInitNormal(cameraId); //經過調用 cameraInitNormal(Id) 方法對指定攝像頭進行初始化。 if (checkInitErrors(err)) { if (err == -EACCES) { throw new RuntimeException("Fail to connect to camera service"); } else if (err == -ENODEV) { throw new RuntimeException("Camera initialization failed"); } // Should never hit this. throw new RuntimeException("Unknown camera error"); } }
cameraInitNormal(int cameraId)
:app
private int cameraInitNormal(int cameraId) { return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_NORMAL_CONNECT); //調用 cameraInitVersion(int cameraId, int halVersion),指定 halVersion 參數 }
cameraInitVersion(int cameraId, int halVersion)
:框架
private int cameraInitVersion(int cameraId, int halVersion) { //將各個回調函數置空 mShutterCallback = null; mRawImageCallback = null; mJpegCallback = null; mPreviewCallback = null; mPostviewCallback = null; mUsingPreviewAllocation = false; mZoomListener = null; Looper looper; //經過 Looper 對事件處理對象進行實例化後,就調用 native_setup 方法進入 JNI(Java Native Interface) 庫中調用對應的函數。 if ((looper = Looper.myLooper()) != null) { mEventHandler = new EventHandler(this, looper); } else if ((looper = Looper.getMainLooper()) != null) { mEventHandler = new EventHandler(this, looper); } else { mEventHandler = null; } return native_setup(new WeakReference<Camera>(this), cameraId, halVersion, ActivityThread.currentOpPackageName()); }
至此,open()
方法開始進入 Android Runtime
層。ide
簡圖總結:函數
2. android_hardware_Camera.cpp(Android Runtime):frameworks/base/core/jni/android_hardware_Camera.cpp
// connect to camera service static jint android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz, jobject weak_this, jint cameraId, jint halVersion, jstring clientPackageName) { // convert jstring to String16(clientPackageName -> clientName) //剛開始要先把 作一個類型轉換,變成 ...... ...... sp<Camera> camera; //創建一個 類型的 if (halVersion == CAMERA_HAL_API_VERSION_NORMAL_CONNECT) { /***** NOTE THIS *****/ // Default path: hal version is don't care, do normal camera connect. camera = Camera::connect(cameraId, clientName, //進入 的 C/S 結構中,而 則屬於 Camera::USE_CALLING_UID, Camera::USE_CALLING_PID); } else { jint status = Camera::connectLegacy(cameraId, halVersion, clientName, Camera::USE_CALLING_UID, camera); if (status != NO_ERROR) { return status; } } if (camera == NULL) { return -EACCES; } //最後對返回的實例進行一些基本的檢查,並保存上下文 // make sure camera hardware is alive if (camera->getStatus() != NO_ERROR) { return NO_INIT; } // save context in opaque field ...... ...... }clientPackageNameclientNameCameraStrongPointer(sp),而後經過Camera::connect()
或Camera::connectLegacy()
,讓客戶端與服務端進行鏈接,並返回相應的Camera
實例Camera::connect()Camera::connectLegacy()CameraC/C++ LibrariesCameraClient
簡圖總結:
3.Camera(C/C++ Libraries):
(1)frameworks/av/include/camera/Camera.h
template <> struct CameraTraits<Camera> { typedef CameraListener TCamListener; typedef ::android::hardware::ICamera TCamUser; typedef ::android::hardware::ICameraClient TCamCallbacks; typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService) (const sp<::android::hardware::ICameraClient>&, int, const String16&, int, int, /*out*/ sp<::android::hardware::ICamera>*); static TCamConnectService fnConnectService; };
(2)framework/av/camera/Camera.cpp
CameraTraits<Camera>::TCamConnectService CameraTraits<Camera>::fnConnectService = //注: 是對應到 函數的 &::android::hardware::ICameraService::connect;fnConnectServiceICameraService::connect
Camera::connect
:
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName, int clientUid, int clientPid) { return CameraBaseT::connect(cameraId, //直接調用了 這是定義在 中的函數。 clientPackageName, clientUid, clientPid); }CameraBaseT::connect()CameraBase.cpp
(3)frameworks/av/include/camera/CameraBase.h
template <typename TCam, typename TCamTraits = CameraTraits<TCam> > // 對應 ; 對應 TCamCameraTCamTraitsCameraTraits<Camera>
注意類成員變量聲明部分,便可知道 CameraBaseT
對應 CameraBase<Camera>
。
sp<TCamUser> mCamera; status_t mStatus; sp<TCamListener> mListener; const int mCameraId; /***** NOTE THIS *****/ typedef CameraBase<TCam> CameraBaseT;
(4)framework/av/camera/CameraBase.cpp
template <typename TCam, typename TCamTraits> sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId, const String16& clientPackageName, int clientUid, int clientPid) { ALOGV("%s: connect", __FUNCTION__); /***** NOTE THIS *****/ sp<TCam> c = new TCam(cameraId); //實例化一個 經過 獲取 指針。Camera,CameraICameraClient
sp<TCamCallbacks> cl = c; const sp<::android::hardware::ICameraService> cs = getCameraService(); binder::Status ret; if (cs != nullptr) { /***** NOTE THIS *****/ TCamConnectService fnConnectService = TCamTraits::fnConnectService; // ret = (cs.get()->*fnConnectService)(cl, cameraId, //經過 函數得到一個 , 即 實例。 clientPackageName, clientUid, clientPid, /*out*/ &c->mCamera); } if (ret.isOk() && c->mCamera != nullptr) { /***** NOTE THIS *****/ IInterface::asBinder(c->mCamera)->linkToDeath(c); //將 實例與 創建聯繫。 c->mStatus = NO_ERROR; } else { ALOGW("An error occurred while connecting to camera %d: %s", cameraId, (cs != nullptr) ? "Service not available" : ret.toString8().string()); c.clear(); } return c; }經過getCameraService()
函數獲取ICameraService
。getCameraService()ICameraServiceICameraService::connect()mCameraICameraICameraBinder
// establish binder interface to camera service template <typename TCam, typename TCamTraits> const sp<::android::hardware::ICameraService> CameraBase<TCam, TCamTraits>::getCameraService() { Mutex::Autolock _l(gLock); /***** NOTE THIS *****/ if (gCameraService.get() == 0) { // 是一個 char value[PROPERTY_VALUE_MAX]; property_get("config.disable_cameraservice", value, "0"); if (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0) { return gCameraService; //首先調用 } /***** NOTE THIS *****/ //若沒有返回,則經過 來獲取一個 ,這個過程當中主要是經過 來進行數據獲取。 sp<IServiceManager> sm = defaultServiceManager(); sp<IBinder> binder; do { binder = sm->getService(String16(kCameraServiceName)); if (binder != 0) { break; } ALOGW("CameraService not published, waiting..."); usleep(kCameraServicePollDelay); } while(true); if (gDeathNotifier == NULL) { gDeathNotifier = new DeathNotifier(); } binder->linkToDeath(gDeathNotifier); /***** NOTE THIS *****/ gCameraService = interface_cast<::android::hardware::ICameraService>(binder); } ALOGE_IF(gCameraService == 0, "no CameraService!?"); return gCameraService; }gCameraServiceICameraService,調用get
函數,若是能獲取到ICameraService
則返回。getICameraServiceICameraServiceIServiceManagerICameraServiceIBinder
4.ICameraService:
CameraBase
中,所調用的 connect
對應的是 CameraService::connect()
。 (1)frameworks/av/camera/aidl/android/hardware/ICameraService.aidl
/** * Open a camera device through the old camera API */ ICamera connect(ICameraClient client, int cameraId, String opPackageName, int clientUid, int clientPid);
(2)out/target/product/generic/obj/SHARED_LIBRARIES/libcamera_client_intermediates/aidl-generated/src/aidl/android/hardware/ICameraService.cpp
//這個 以及其頭文件 都是根據其對應的 文件自動生成的。 ::android::binder::Status BpCameraService::connect(const ::android::sp<::android::hardware::ICameraClient>& client, //這裏是 ,它繼承了 ,同時也繼承了 。 int32_t cameraId, const ::android::String16& opPackageName, int32_t clientUid, int32_t clientPid, ::android::sp<::android::hardware::ICamera>* _aidl_return) { ::android::Parcel _aidl_data; // 能夠當作是 通信中的信息傳遞中介,後面將相應的數據寫入Parcel ::android::Parcel _aidl_reply; //返回的 數據判斷是否有 ::android::status_t _aidl_ret_status = ::android::OK; ::android::binder::Status _aidl_status; _aidl_ret_status = _aidl_data.writeInterfaceToken(getInterfaceDescriptor()); //首先把。 /***** NOTE THIS *****/ if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } _aidl_ret_status = _aidl_data.writeStrongBinder(::android::hardware::ICameraClient::asBinder(client)); if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } _aidl_ret_status = _aidl_data.writeInt32(cameraId); if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } _aidl_ret_status = _aidl_data.writeString16(opPackageName); if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } _aidl_ret_status = _aidl_data.writeInt32(clientUid); if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } _aidl_ret_status = _aidl_data.writeInt32(clientPid); if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } /***** NOTE THIS *****/ _aidl_ret_status = remote()->transact(ICameraService::CONNECT, _aidl_data, &_aidl_reply); //調用遠程接口 中的處理函數 。 if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } _aidl_ret_status = _aidl_status.readFromParcel(_aidl_reply); if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } if (!_aidl_status.isOk()) { return _aidl_status; } _aidl_ret_status = _aidl_reply.readStrongBinder(_aidl_return); if (((_aidl_ret_status) != (::android::OK))) { goto _aidl_error; } _aidl_error: _aidl_status.setFromStatusT(_aidl_ret_status); return _aidl_status; }ICameraService.cppICameraService.haidlBpCameraserviceICameraServiceBpInterfaceParcelBinderreplyerrorremote()transact()
BnCameraService::onTransact()
:消息處理函數
case Call::CONNECT: { ::android::sp<::android::hardware::ICameraClient> in_client; int32_t in_cameraId; ::android::String16 in_opPackageName; int32_t in_clientUid; int32_t in_clientPid; /***** NOTE THIS *****/ ::android::sp<::android::hardware::ICamera> _aidl_return; if (!(_aidl_data.checkInterface(this))) { _aidl_ret_status = ::android::BAD_TYPE; break; } //接收 傳來的數據 _aidl_ret_status = _aidl_data.readStrongBinder(&in_client); if (((_aidl_ret_status) != (::android::OK))) { break; } _aidl_ret_status = _aidl_data.readInt32(&in_cameraId); if (((_aidl_ret_status) != (::android::OK))) { break; } _aidl_ret_status = _aidl_data.readString16(&in_opPackageName); if (((_aidl_ret_status) != (::android::OK))) { break; } _aidl_ret_status = _aidl_data.readInt32(&in_clientUid); if (((_aidl_ret_status) != (::android::OK))) { break; } _aidl_ret_status = _aidl_data.readInt32(&in_clientPid); if (((_aidl_ret_status) != (::android::OK))) { break; } /***** NOTE THIS *****/ ::android::binder::Status _aidl_status(connect(in_client, in_cameraId, in_opPackageName, in_clientUid, in_clientPid, &_aidl_return)); //調用了具體的 函數獲取 而且返 _aidl_ret_status = _aidl_status.writeToParcel(_aidl_reply); if (((_aidl_ret_status) != (::android::OK))) { break; } if (!_aidl_status.isOk()) { break; } /***** NOTE THIS *****/ _aidl_ret_status = _aidl_reply->writeStrongBinder(::android::hardware::ICamera::asBinder(_aidl_return)); if (((_aidl_ret_status) != (::android::OK))) { break; } } break;BpconnectICamera
5.ICamera:
(1)frameworks/av/camera/ICamera.cpp
virtual status_t connect(const sp<ICameraClient>& cameraClient) // 類中的 函數 { Parcel data, reply; data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); data.writeStrongBinder(IInterface::asBinder(cameraClient)); remote()->transact(CONNECT, data, &reply); return reply.readInt32(); }BpCameraconnect()
BnCamera
類中,onTransact
函數則有相應的處理:
case CONNECT: { CHECK_INTERFACE(ICamera, data, reply); sp<ICameraClient> cameraClient = interface_cast<ICameraClient>(data.readStrongBinder()); reply->writeInt32(connect(cameraClient)); return NO_ERROR; } break;
frameworks/av/services/camera/libcameraservice/CameraService.cpp
Status CameraService::connect( const sp<ICameraClient>& cameraClient, int cameraId, const String16& clientPackageName, int clientUid, int clientPid, /*out*/ sp<ICamera>* device) { ATRACE_CALL(); Status ret = Status::ok(); String8 id = String8::format("%d", cameraId); sp<Client> client = nullptr; //真正實現邏輯是在 函數中 ret = connectHelper<ICameraClient,Client>(cameraClient, id, CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, clientUid, clientPid, API_1, /*legacyMode*/ false, /*shimUpdateOnly*/ false, /*out*/client); if(!ret.isOk()) { logRejected(id, getCallingPid(), String8(clientPackageName), ret.toString8()); return ret; } *device = client; //得到一個客戶端實例而且經過 返回 return ret; }connectHelper()*device
(3)frameworks/av/services/camera/libcameraservice/CameraService.h
sp<BasicClient> clientTmp = nullptr; std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>> partial; if ((err = handleEvictionsLocked(cameraId, originalClientPid, effectiveApiLevel, IInterface::asBinder(cameraCb), clientName8, /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) { /***** do something *****/ } /***** NOTE THIS *****/ if (clientTmp.get() != nullptr) { //若是客戶端實例已經存在於 ,則直接將其取出返回 // Handle special case for API1 MediaRecorder where the existing client is returned device = static_cast<CLIENT*>(clientTmp.get()); return ret; } // give flashlight a chance to close devices if necessary. mFlashlight->prepareDeviceOpen(cameraId); // TODO: Update getDeviceVersion + HAL interface to use strings for Camera IDs int id = cameraIdToInt(cameraId); if (id == -1) { ALOGE("%s: Invalid camera ID %s, cannot get device version from HAL.", __FUNCTION__, cameraId.string()); return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Bad camera ID \"%s\" passed to camera open", cameraId.string()); } int facing = -1; /***** NOTE THIS *****/ int deviceVersion = getDeviceVersion(id, /*out*/&facing); //獲取 ,而後再調用 函數建立一個客戶端。 sp<BasicClient> tmp = nullptr; if(!(ret = makeClient(this, cameraCb, clientPackageName, id, facing, clientPid, clientUid, getpid(), legacyMode, halVersion, deviceVersion, effectiveApiLevel, /*out*/&tmp)).isOk()) { return ret; } client = static_cast<CLIENT*>(tmp.get()); LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state", __FUNCTION__); /***** NOTE THIS *****/ if ((err = client->initialize(mModule)) != OK) { //調用其 函數進行初始化,注意其傳入的參數是 ,這個參數是鏈接 與 的關鍵參數。 /***** do somthing *****/ } // Update shim paremeters for legacy clients if (effectiveApiLevel == API_1) { // Assume we have always received a Client subclass for API1 sp<Client> shimClient = reinterpret_cast<Client*>(client.get()); String8 rawParams = shimClient->getParameters(); CameraParameters params(rawParams); auto cameraState = getCameraState(cameraId); if (cameraState != nullptr) { cameraState->setShimParams(params); } else { ALOGE("%s: Cannot update shim parameters for camera %s, no such device exists.", __FUNCTION__, cameraId.string()); } } if (shimUpdateOnly) { // If only updating legacy shim parameters, immediately disconnect client mServiceLock.unlock(); client->disconnect(); mServiceLock.lock(); } else { // Otherwise, add client to active clients list finishConnectLocked(client, partial); } } // lock is destroyed, allow further connect calls // Important: release the mutex here so the client can call back into the service from its // destructor (can be at the end of the call) device = client;MediaRecorderdeviceVersionmakeClient()initialize()mModuleLibrariesHAL
(4)frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp
status_t CameraClient::initialize(CameraModule *module) { int callingPid = getCallingPid(); status_t res; LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId); // Verify ops permissions res = startCameraOps(); if (res != OK) { return res; } char camera_device_name[10]; snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId); /***** NOTE THIS *****/ mHardware = new CameraHardwareInterface(camera_device_name); //獲取 實例。 res = mHardware->initialize(module); if (res != OK) { ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); mHardware.clear(); return res; } //設置三個回調函數(這裏與數據流密切相關) mHardware->setCallbacks(notifyCallback, dataCallback, dataCallbackTimestamp, (void *)(uintptr_t)mCameraId); // Enable zoom, error, focus, and metadata messages by default enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS | CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE); LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId); return OK; }CameraHardwareInterface
6.HAL:frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h
status_t initialize(CameraModule *module) { ALOGI("Opening camera %s", mName.string()); camera_info info; status_t res = module->getCameraInfo(atoi(mName.string()), &info); //經過 ,從 層的庫中調用相關的函數獲取 設備信息 if (res != OK) { return res; } int rc = OK; //根據模塊 的版本,判斷是用 函數仍是用 調用 後,經過 庫與 if (module->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_3 && info.device_version > CAMERA_DEVICE_API_VERSION_1_0) { // Open higher version camera device as HAL1.0 device. rc = module->openLegacy(mName.string(), CAMERA_DEVICE_API_VERSION_1_0, (hw_device_t **)&mDevice); } else { rc = module->open(mName.string(), (hw_device_t **)&mDevice); } if (rc != OK) { ALOGE("Could not open camera %s: %d", mName.string(), rc); return rc; } initHalPreviewWindow(); return rc; }moduleHALCameraAPIopenopenLegacy,openHALLinux Kernel交互
至此, Camera1的Open
調用流程分析完畢。
3、Camera hw_get_module() 相關邏輯
本節由hw_get_module()
函數入手,去探究 Libraries
層是如何調用 HAL
層的庫中的函數的,CameraService
是在開機時就會啓動,會調用一個名爲 onFirstRef()
的成員函數,下面就從這裏開始分析。
1. CameraService:framework/av/services/camera/libcameraservice/CameraService.cpp
BnCameraService::onFirstRef(); //首先調用其基類的 函數 // Update battery life tracking if service is restarting BatteryNotifier& notifier(BatteryNotifier::getInstance()); //更新 (從看應該跟電池有關,估計是檢測電池電量太低就不開啓閃光燈或者相機)。 notifier.noteResetCamera(); notifier.noteResetFlashlight(); camera_module_t *rawModule; /*** NOTE THIS ***/ int err = hw_get_module(CAMERA_HARDWARE_MODULE_ID, //經過 函數獲取 。 (const hw_module_t **)&rawModule); if (err < 0) { ALOGE("Could not load camera HAL module: %d (%s)", err, strerror(-err)); logServiceError("Could not load camera HAL module", err); return; } /*** NOTE THIS ***/ mModule = new CameraModule(rawModule); //利用 建立 的實例, 是 類。 err = mModule->init();onFirstRefnotifierBatteryNotifierhw_get_modulerawModulerawModulemModulemModuleCameraModule
2. hardware:
(1)hardware/libhardware/include/hardware/hardware.h
/** * Name of the hal_module_info */ #define HAL_MODULE_INFO_SYM HMI /** * Name of the hal_module_info as a string */ #define HAL_MODULE_INFO_SYM_AS_STR "HMI"
/** * Get the module info associated with a module by id. * * @return: 0 == success, <0 == error and *module == NULL */ int hw_get_module(const char *id, const struct hw_module_t **module); //做用是經過傳入的 來獲取模塊相關的信息(成功則返回 ,出錯則返回值小於 且 ) /** * Get the module info associated with a module instance by class 'class_id' * and instance 'inst'. * * Some modules types necessitate multiple instances. For example audio supports * multiple concurrent interfaces and thus 'audio' is the module class * and 'primary' or 'a2dp' are module interfaces. This implies that the files * providing these modules would be named audio.primary.<variant>.so and * audio.a2dp.<variant>.so * * @return: 0 == success, <0 == error and *module == NULL */ int hw_get_module_by_class(const char *class_id, const char *inst, //做用是經過 獲取與模塊實例相關的信息。 const struct hw_module_t **module);id00*module == NULLclass_id
(2)hardware/libhardware/hardware.c
static const char *variant_keys[] = { "ro.hardware", /* This goes first so that it can pick up a different file on the emulator. */ "ro.product.board", "ro.board.platform", "ro.arch" };
int hw_get_module(const char *id, const struct hw_module_t **module) { return hw_get_module_by_class(id, NULL, module); //讀取庫文件,嘗試的順序是: } ... //經過 函數加載模塊。 /* First try a property specific to the class and possibly instance */ snprintf(prop_name, sizeof(prop_name), "ro.hardware.%s", name); if (property_get(prop_name, prop, NULL) > 0) { if (hw_module_exists(path, sizeof(path), name, prop) == 0) { goto found; } } /* Loop through the configuration variants looking for a module */ for (i=0 ; i<HAL_VARIANT_KEYS_COUNT; i++) { if (property_get(variant_keys[i], prop, NULL) == 0) { continue; } if (hw_module_exists(path, sizeof(path), name, prop) == 0) { goto found; } } /* Nothing found, try the default */ if (hw_module_exists(path, sizeof(path), name, "default") == 0) { goto found; } return -ENOENT; found: /* load the module, if this fails, we're doomed, and we should not try * to load a different variant. */ /*** NOTE THIS ***/ return load(class_id, path, module);ro.hardware、ro.product.board、
ro.product.board、ro.board.platform、
ro.arch、
default
ro.board.platform、
ro.board.platform、ro.arch、
default
ro.arch、
ro.arch、default
default
defaultload
load()
:
NOTE:
爲了獲取動態連接庫中的結構體,咱們須要用到一個字符串 sym,sym 對應宏 HAL_MODULE_INFO_SYM_AS_STR,即 「HMI」。
動態連接庫 .so 文件,是一個 ELF 文件。
ELF:Executable and Linkable Format,可執行連接格式,ELF 文件頭保存了一個路線圖,用於描述文件的組織結構。
經過 readelf -s 命令,咱們能夠查看對應的 .so 文件描述,能夠看到其中有一個 Name 屬性爲 HMI ,其對應的位置就是咱們所須要的結構體 hw_module_t。
因而經過 HMI 字段,就能夠從動態連接庫中讀取出相應的結構體,從而得以在 Libraries 層中調用 HAL 層的庫函數。
static int load(const char *id, const char *path, const struct hw_module_t **pHmi) { int status = -EINVAL; void *handle = NULL; struct hw_module_t *hmi = NULL; /* * load the symbols resolving undefined symbols before * dlopen returns. Since RTLD_GLOBAL is not or'd in with * RTLD_NOW the external symbols will not be global */ /*** NOTE THIS ***/ handle = dlopen(path, RTLD_NOW); //調用 函數獲取一個 。 if (handle == NULL) { char const *err_str = dlerror(); ALOGE("load: module=%s\n%s", path, err_str?err_str:"unknown"); status = -EINVAL; goto done; } /* Get the address of the struct hal_module_info. */ /*** NOTE THIS ***/ const char *sym = HAL_MODULE_INFO_SYM_AS_STR; hmi = (struct hw_module_t *)dlsym(handle, sym); //調用 函數從動態連接庫中獲取 類型的 。 if (hmi == NULL) { ALOGE("load: couldn't find symbol %s", sym); status = -EINVAL; goto done; } /* Check that the id matches */ if (strcmp(id, hmi->id) != 0) { ALOGE("load: id=%s != hmi->id=%s", id, hmi->id); status = -EINVAL; goto done; } hmi->dso = handle; /* success */ status = 0; /*** NOTE THIS ***/ done: if (status != 0) { hmi = NULL; if (handle != NULL) { dlclose(handle); handle = NULL; } } else { ALOGV("loaded HAL id=%s path=%s hmi=%p handle=%p", id, path, *pHmi, handle); } *pHmi = hmi; return status; }dlopen()handledlsym()hw_module_thmi
至此,得到了最終的 rawModule
,而後回到 onFirstRef()
中繼續分析。
(1)frameworks/av/services/camera/libcameraservice/common/CameraModule.cpp
CameraModule::CameraModule(camera_module_t *module) { if (module == NULL) { ALOGE("%s: camera hardware module must not be null", __FUNCTION__); assert(0); } mModule = module; // 是 類型。 }mModulecamera_module_t
init()
:
mModule
的 init()
函數。init()
函數,則 init
流程到這裏就能夠結束了。int CameraModule::init() { ATRACE_CALL(); int res = OK; if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4 && mModule->init != NULL) { ATRACE_BEGIN("camera_module->init"); res = mModule->init(); ATRACE_END(); } mCameraInfoMap.setCapacity(getNumberOfCameras()); return res; }
(2)相關代碼:
hardware/libhardware/include/hardware/camera_common.h hardware/qcom/camera/QCamera2/QCamera2Factory.h hardware/qcom/camera/QCamera2/QCamera2Factory.cpp hardware/qcom/camera/QCamera2/QCamera2Hal.cpp
簡圖總結:
上面從 CameraService::onFirstRef() 入手,逐漸理順了以 hw_get_module() 爲中心的一個調用邏輯。實際上,Android HAL 層有一個通用的入口,即宏 HAL_MODULE_INFO_SYM,經過它,能夠獲取 HAL 層中的模塊實例,從而調用 HAL 層所提供的函數。理解了 HAL 層的入口,接下來就能夠去對 Camera.startPreview() 的控制流程進行分析,從而再次加深對 Camera 控制流的理解。
1.Frameworks:frameworks/base/core/java/android/hardware/Camera.java
/** * Starts capturing and drawing preview frames to the screen. * Preview will not actually start until a surface is supplied * with {@link #setPreviewDisplay(SurfaceHolder)} or * {@link #setPreviewTexture(SurfaceTexture)}. * * <p>If {@link #setPreviewCallback(Camera.PreviewCallback)}, * {@link #setOneShotPreviewCallback(Camera.PreviewCallback)}, or * {@link #setPreviewCallbackWithBuffer(Camera.PreviewCallback)} were * called, {@link Camera.PreviewCallback#onPreviewFrame(byte[], Camera)} * will be called when preview data becomes available. */ public native final void startPreview(); //給上層 application 提供一個接口, 進入 Runtime 層。
2.Android Runtime:frameworks/base/core/jni/android_hardware_Camera.cpp
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz) { ALOGV("startPreview"); sp<Camera> camera = get_native_camera(env, thiz, NULL); //調用 函數獲取一個 實例。 if (camera == 0) return; if (camera->startPreview() != NO_ERROR) { jniThrowRuntimeException(env, "startPreview failed"); return; } }get_native_camera()Camera
sp<Camera> get_native_camera(JNIEnv *env, jobject thiz, JNICameraContext** pContext) { sp<Camera> camera; Mutex::Autolock _l(sLock); JNICameraContext* context = reinterpret_cast<JNICameraContext*>(env->GetLongField(thiz, fields.context)); //從 中獲取關於 的上下文。 if (context != NULL) { camera = context->getCamera(); //從上下文信息中獲取 實例。 } ALOGV("get_native_camera: context=%p, camera=%p", context, camera.get()); if (camera == 0) { jniThrowRuntimeException(env, "Camera is being used after Camera.release() was called"); } if (pContext != NULL) *pContext = context; return camera; }DVMCameraCamera
3. Libraries:
(1)frameworks/av/camera/Camera.cpp
// start preview mode status_t Camera::startPreview() { ALOGV("startPreview"); sp <::android::hardware::ICamera> c = mCamera; // 便是在 過程當中返回的 ,它具體實現了 接口。 if (c == 0) return NO_INIT; return c->startPreview(); }mCameraconnectCameraClientstartPreview()
(2)frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp
// start preview mode status_t CameraClient::startPreview() { LOG1("startPreview (pid %d)", getCallingPid()); return startCameraMode(CAMERA_PREVIEW_MODE); //經過 函數進入具體的實現邏輯。 }startCameraMode
startCameraMode()
:
// start preview or recording status_t CameraClient::startCameraMode(camera_mode mode) { //根據傳入的參數 肯定進入的分支。 LOG1("startCameraMode(%d)", mode); Mutex::Autolock lock(mLock); status_t result = checkPidAndHardware(); if (result != NO_ERROR) return result; switch(mode) { case CAMERA_PREVIEW_MODE: if (mSurface == 0 && mPreviewWindow == 0) { LOG1("mSurface is not set yet."); // still able to start preview in this case. } return startPreviewMode();//調用 。 case CAMERA_RECORDING_MODE: if (mSurface == 0 && mPreviewWindow == 0) { ALOGE("mSurface or mPreviewWindow must be set before startRecordingMode."); return INVALID_OPERATION; } return startRecordingMode(); default: return UNKNOWN_ERROR; } }CAMERA_PREVIEW_MODEstartPreviewMode()
startPreviewMode()
:
status_t CameraClient::startPreviewMode() { LOG1("startPreviewMode"); status_t result = NO_ERROR; // if preview has been enabled, nothing needs to be done if (mHardware->previewEnabled()) { return NO_ERROR; //若是預覽已經存在,則直接返回成功信息。 } if (mPreviewWindow != 0) { mHardware->setPreviewScalingMode( // 是 的實例,在 過程的最後被初始化。 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); mHardware->setPreviewTransform(mOrientation); } mHardware->setPreviewWindow(mPreviewWindow); //經過 調用 和 接口。 result = mHardware->startPreview(); if (result == NO_ERROR) { mCameraService->updateProxyDeviceState( //進入 層。 ICameraServiceProxy::CAMERA_STATE_ACTIVE, String8::format("%d", mCameraId)); } return result; }mHardwareCameraHardwareInterfaceconnectmHardwaresetPreviewWindow()startPreview()HAL
4. HAL:
(1)frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h
/** * Returns true if preview is enabled. */ int previewEnabled() { ALOGV("%s(%s)", __FUNCTION__, mName.string()); if (mDevice->ops->preview_enabled) // 便是經過 相關流程進行初始化的設備實例,它的類型是 。 return mDevice->ops->preview_enabled(mDevice); //若是 存在,則返回 。 return false; }mDevicehw_get_module()camera_device_tpreviewtrue
setPreviewWindow()
:
/** Set the ANativeWindow to which preview frames are sent */ status_t setPreviewWindow(const sp<ANativeWindow>& buf) { ALOGV("%s(%s) buf %p", __FUNCTION__, mName.string(), buf.get()); if (mDevice->ops->set_preview_window) { //經過 繼續向下調用 mPreviewWindow = buf; if (buf != nullptr) { if (mPreviewScalingMode != NOT_SET) { setPreviewScalingMode(mPreviewScalingMode); } if (mPreviewTransform != NOT_SET) { setPreviewTransform(mPreviewTransform); } } mHalPreviewWindow.user = this; ALOGV("%s &mHalPreviewWindow %p mHalPreviewWindow.user %p", __FUNCTION__, &mHalPreviewWindow, mHalPreviewWindow.user); return mDevice->ops->set_preview_window(mDevice, buf.get() ? &mHalPreviewWindow.nw : 0); } return INVALID_OPERATION; }mDevice->ops
startPreview()
:
/** 關於 mDevice,結合 Camera.open() 流程與 hw_get_module() 相關邏輯,能夠知道它的邏輯是這樣的: 在 CameraService 啓動時,會調用 onFirstRef() 對 module 進行初始化,獲取 module 實例。 在 open 過程當中,CameraClient 鏈接 CameraServer 成功時,會實例化 CameraHardwareInterface,並傳入 module 實例對其初始化。 在初始化過程當中,經過 module 實例對應的 open 方法,咱們得到一個 device 實例,即 mDevice,這對應了具體的攝像頭設備。 經過 mDevice 就能夠將對應的指令傳達到硬件設備。 */ status_t startPreview() { ALOGV("%s(%s)", __FUNCTION__, mName.string()); if (mDevice->ops->start_preview) return mDevice->ops->start_preview(mDevice); return INVALID_OPERATION; }
(2)hardware/libhardware/include/hardware/camera.h
typedef struct camera_device { //這裏就聲明瞭要追蹤的 。 /** * camera_device.common.version must be in the range * HARDWARE_DEVICE_API_VERSION(0,0)-(1,FF). CAMERA_DEVICE_API_VERSION_1_0 is * recommended. */ hw_device_t common; camera_device_ops_t *ops; void *priv; } camera_device_t;camera_device_t
其中struct camera_device_ops:全部關於
Camera
設備的操做對應的函數指針都在這裏聲明瞭。
typedef struct camera_device_ops { int (*set_preview_window)(struct camera_device *, struct preview_stream_ops *window); void (*set_callbacks)(struct camera_device *, camera_notify_callback notify_cb, camera_data_callback data_cb, camera_data_timestamp_callback data_cb_timestamp, camera_request_memory get_memory, void *user); void (*enable_msg_type)(struct camera_device *, int32_t msg_type); void (*disable_msg_type)(struct camera_device *, int32_t msg_type); int (*msg_type_enabled)(struct camera_device *, int32_t msg_type); /** * Start preview mode. */ int (*start_preview)(struct camera_device *); void (*stop_preview)(struct camera_device *); int (*preview_enabled)(struct camera_device *); int (*store_meta_data_in_buffers)(struct camera_device *, int enable); int (*start_recording)(struct camera_device *); void (*stop_recording)(struct camera_device *); int (*recording_enabled)(struct camera_device *); void (*release_recording_frame)(struct camera_device *, const void *opaque); int (*auto_focus)(struct camera_device *); int (*cancel_auto_focus)(struct camera_device *); int (*take_picture)(struct camera_device *); int (*cancel_picture)(struct camera_device *); int (*set_parameters)(struct camera_device *, const char *parms); char *(*get_parameters)(struct camera_device *); void (*put_parameters)(struct camera_device *, char *); int (*send_command)(struct camera_device *, int32_t cmd, int32_t arg1, int32_t arg2); void (*release)(struct camera_device *); int (*dump)(struct camera_device *, int fd); } camera_device_ops_t;
(3)hardware/ti/omap4-aah/camera/CameraHal_Module.cpp
:
在
open
流程中,就指定了 ops
中指針的對應關係。
memset(camera_device, 0, sizeof(*camera_device)); memset(camera_ops, 0, sizeof(*camera_ops)); camera_device->base.common.tag = HARDWARE_DEVICE_TAG; camera_device->base.common.version = 0; camera_device->base.common.module = (hw_module_t *)(module); camera_device->base.common.close = camera_device_close; camera_device->base.ops = camera_ops; camera_ops->set_preview_window = camera_set_preview_window; camera_ops->set_callbacks = camera_set_callbacks; camera_ops->enable_msg_type = camera_enable_msg_type; camera_ops->disable_msg_type = camera_disable_msg_type; camera_ops->msg_type_enabled = camera_msg_type_enabled; camera_ops->start_preview = camera_start_preview; camera_ops->stop_preview = camera_stop_preview; camera_ops->preview_enabled = camera_preview_enabled; camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers; camera_ops->start_recording = camera_start_recording; camera_ops->stop_recording = camera_stop_recording; camera_ops->recording_enabled = camera_recording_enabled; camera_ops->release_recording_frame = camera_release_recording_frame; camera_ops->auto_focus = camera_auto_focus; camera_ops->cancel_auto_focus = camera_cancel_auto_focus; camera_ops->take_picture = camera_take_picture; camera_ops->cancel_picture = camera_cancel_picture; camera_ops->set_parameters = camera_set_parameters; camera_ops->get_parameters = camera_get_parameters; camera_ops->put_parameters = camera_put_parameters; camera_ops->send_command = camera_send_command; camera_ops->release = camera_release; camera_ops->dump = camera_dump; *device = &camera_device->base.common; // -------- TI specific stuff -------- camera_device->cameraid = cameraid;
camera_start_preview()
:
int camera_start_preview(struct camera_device * device) { CAMHAL_LOG_MODULE_FUNCTION_NAME; int rv = -EINVAL; ti_camera_device_t* ti_dev = NULL; if(!device) return rv; ti_dev = (ti_camera_device_t*) device; rv = gCameraHals[ti_dev->cameraid]->startPreview(); // 是 。 return rv; }gCameraHalsCameraHal *
(4)hardware/ti/omap4-aah/camera/CameraHal.cpp : 將
Camera Hardware Interface
映射到 V4L2
。
status_t CameraHal::startPreview() { LOG_FUNCTION_NAME; status_t ret = cameraPreviewInitialization(); //首先調用了 函數進行初始化。 if (!mPreviewInitializationDone) return ret; mPreviewInitializationDone = false; if(mDisplayAdapter.get() != NULL) { CAMHAL_LOGDA("Enabling display"); int width, height; mParameters.getPreviewSize(&width, &height); #if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview); #else ret = mDisplayAdapter->enableDisplay(width, height, NULL); #endif if ( ret != NO_ERROR ) { CAMHAL_LOGEA("Couldn't enable display"); CAMHAL_ASSERT_X(false, "At this stage mCameraAdapter->mStateSwitchLock is still locked, " "deadlock is guaranteed"); goto error; } } CAMHAL_LOGDA("Starting CameraAdapter preview mode"); ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW); //經過 發送 指令,若成功執行,則完成流程。 if(ret!=NO_ERROR) { CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter"); goto error; } CAMHAL_LOGDA("Started preview"); mPreviewEnabled = true; mPreviewStartInProgress = false; return ret; error: CAMHAL_LOGEA("Performing cleanup after error"); //Do all the cleanup freePreviewBufs(); mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW); if(mDisplayAdapter.get() != NULL) { mDisplayAdapter->disableDisplay(false); } mAppCallbackNotifier->stop(); mPreviewStartInProgress = false; mPreviewEnabled = false; LOG_FUNCTION_NAME_EXIT; return ret; }cameraPreviewInitialization()CameraAdapterCAMERA_START_PREVIEW
cameraPreviewInitialization()
:
mCameraAdapter->sendCommand()
來發送指令,並獲取一些數據。Adapter
(如 V4L Adapter
),就會調用相應的函數進行處理。//////////// /** @brief Set preview mode related initialization -> Camera Adapter set params -> Allocate buffers -> Set use buffers for preview @param none @return NO_ERROR @todo Update function header with the different errors that are possible */ status_t CameraHal::cameraPreviewInitialization() { status_t ret = NO_ERROR; CameraAdapter::BuffersDescriptor desc; CameraFrame frame; unsigned int required_buffer_count; unsigned int max_queueble_buffers; #if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS gettimeofday(&mStartPreview, NULL); #endif LOG_FUNCTION_NAME; if (mPreviewInitializationDone) { return NO_ERROR; } if ( mPreviewEnabled ){ CAMHAL_LOGDA("Preview already running"); LOG_FUNCTION_NAME_EXIT; return ALREADY_EXISTS; } if ( NULL != mCameraAdapter ) { ret = mCameraAdapter->setParameters(mParameters); //經過 設置相關參數; } if ((mPreviewStartInProgress == false) && (mDisplayPaused == false)){ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW,( int ) &frame); if ( NO_ERROR != ret ){ CAMHAL_LOGEB("Error: CAMERA_QUERY_RESOLUTION_PREVIEW %d", ret); return ret; } ///Update the current preview width and height mPreviewWidth = frame.mWidth; mPreviewHeight = frame.mHeight; } ///If we don't have the preview callback enabled and display adapter, if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL)){ CAMHAL_LOGD("Preview not started. Preview in progress flag set"); mPreviewStartInProgress = true; ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SWITCH_TO_EXECUTING); if ( NO_ERROR != ret ){ CAMHAL_LOGEB("Error: CAMERA_SWITCH_TO_EXECUTING %d", ret); return ret; } return NO_ERROR; } if( (mDisplayAdapter.get() != NULL) && ( !mPreviewEnabled ) && ( mDisplayPaused ) ) { CAMHAL_LOGDA("Preview is in paused state"); mDisplayPaused = false; mPreviewEnabled = true; if ( NO_ERROR == ret ) { ret = mDisplayAdapter->pauseDisplay(mDisplayPaused); if ( NO_ERROR != ret ) { CAMHAL_LOGEB("Display adapter resume failed %x", ret); } } //restart preview callbacks if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) { mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME); } signalEndImageCapture(); return ret; } required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS)); ///Allocate the preview buffers ret = allocPreviewBufs(mPreviewWidth, mPreviewHeight, mParameters.getPreviewFormat(), required_buffer_count, max_queueble_buffers); //申請 空間; if ( NO_ERROR != ret ) { CAMHAL_LOGEA("Couldn't allocate buffers for Preview"); goto error; } if ( mMeasurementEnabled ) { ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA, ( int ) &frame, required_buffer_count); if ( NO_ERROR != ret ) { return ret; } ///Allocate the preview data buffers ret = allocPreviewDataBufs(frame.mLength, required_buffer_count); if ( NO_ERROR != ret ) { CAMHAL_LOGEA("Couldn't allocate preview data buffers"); goto error; } if ( NO_ERROR == ret ) { //在申請 空間成功後設置相應的成員。 desc.mBuffers = mPreviewDataBuffers; desc.mOffsets = mPreviewDataOffsets; desc.mFd = mPreviewDataFd; desc.mLength = mPreviewDataLength; desc.mCount = ( size_t ) required_buffer_count; desc.mMaxQueueable = (size_t) required_buffer_count; mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA, ( int ) &desc); } } ///Pass the buffers to Camera Adapter desc.mBuffers = mPreviewBuffers; desc.mOffsets = mPreviewOffsets; desc.mFd = mPreviewFd; desc.mLength = mPreviewLength; desc.mCount = ( size_t ) required_buffer_count; desc.mMaxQueueable = (size_t) max_queueble_buffers; ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW, ( int ) &desc); if ( NO_ERROR != ret ) { CAMHAL_LOGEB("Failed to register preview buffers: 0x%x", ret); freePreviewBufs(); return ret; } ///Start the callback notifier ret = mAppCallbackNotifier->start(); //開啓回調通知。 if( ALREADY_EXISTS == ret ) { //Already running, do nothing CAMHAL_LOGDA("AppCallbackNotifier already running"); ret = NO_ERROR; } else if ( NO_ERROR == ret ) { CAMHAL_LOGDA("Started AppCallbackNotifier.."); mAppCallbackNotifier->setMeasurements(mMeasurementEnabled); } else { CAMHAL_LOGDA("Couldn't start AppCallbackNotifier"); goto error; } if (ret == NO_ERROR) mPreviewInitializationDone = true; //將 對應到相應的回調函數中,以供上層 獲取預覽所需的數據。 mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count); return ret; error: CAMHAL_LOGEA("Performing cleanup after error"); //Do all the cleanup freePreviewBufs(); mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW); if(mDisplayAdapter.get() != NULL) { mDisplayAdapter->disableDisplay(false); } mAppCallbackNotifier->stop(); mPreviewStartInProgress = false; mPreviewEnabled = false; LOG_FUNCTION_NAME_EXIT; return ret; }AdapterBuffersBuffersBuffersAPP
(5)hardware/ti/omap4-aah/camera/BaseCameraAdapter.cpp : 各常量分別對應不一樣的命令。
const LUT cameraCommandsUserToHAL[] = { { "CAMERA_START_PREVIEW", CameraAdapter::CAMERA_START_PREVIEW }, { "CAMERA_STOP_PREVIEW", CameraAdapter::CAMERA_STOP_PREVIEW }, { "CAMERA_START_VIDEO", CameraAdapter::CAMERA_START_VIDEO }, { "CAMERA_STOP_VIDEO", CameraAdapter::CAMERA_STOP_VIDEO }, { "CAMERA_START_IMAGE_CAPTURE", CameraAdapter::CAMERA_START_IMAGE_CAPTURE }, { "CAMERA_STOP_IMAGE_CAPTURE", CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE }, { "CAMERA_PERFORM_AUTOFOCUS", CameraAdapter::CAMERA_PERFORM_AUTOFOCUS }, { "CAMERA_CANCEL_AUTOFOCUS", CameraAdapter::CAMERA_CANCEL_AUTOFOCUS }, { "CAMERA_PREVIEW_FLUSH_BUFFERS", CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS }, { "CAMERA_START_SMOOTH_ZOOM", CameraAdapter::CAMERA_START_SMOOTH_ZOOM }, { "CAMERA_STOP_SMOOTH_ZOOM", CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM }, { "CAMERA_USE_BUFFERS_PREVIEW", CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW }, { "CAMERA_SET_TIMEOUT", CameraAdapter::CAMERA_SET_TIMEOUT }, { "CAMERA_CANCEL_TIMEOUT", CameraAdapter::CAMERA_CANCEL_TIMEOUT }, { "CAMERA_START_BRACKET_CAPTURE", CameraAdapter::CAMERA_START_BRACKET_CAPTURE }, { "CAMERA_STOP_BRACKET_CAPTURE", CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE }, { "CAMERA_QUERY_RESOLUTION_PREVIEW", CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW }, { "CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE", CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE }, { "CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA", CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA }, { "CAMERA_USE_BUFFERS_IMAGE_CAPTURE", CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE }, { "CAMERA_USE_BUFFERS_PREVIEW_DATA", CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA }, { "CAMERA_TIMEOUT_EXPIRED", CameraAdapter::CAMERA_TIMEOUT_EXPIRED }, { "CAMERA_START_FD", CameraAdapter::CAMERA_START_FD }, { "CAMERA_STOP_FD", CameraAdapter::CAMERA_STOP_FD }, { "CAMERA_SWITCH_TO_EXECUTING", CameraAdapter::CAMERA_SWITCH_TO_EXECUTING }, { "CAMERA_USE_BUFFERS_VIDEO_CAPTURE", CameraAdapter::CAMERA_USE_BUFFERS_VIDEO_CAPTURE }, #ifdef OMAP_ENHANCEMENT_CPCAM { "CAMERA_USE_BUFFERS_REPROCESS", CameraAdapter::CAMERA_USE_BUFFERS_REPROCESS }, { "CAMERA_START_REPROCESS", CameraAdapter::CAMERA_START_REPROCESS }, #endif };
BaseCameraAdapter::sendCommand()
:利用 switch
將不一樣的命令對應到各自的邏輯中。
case CameraAdapter::CAMERA_START_PREVIEW: // 具體操做在其子類中實現,後面經過子類 繼續分析。 { CAMHAL_LOGDA("Start Preview"); if ( ret == NO_ERROR ) { ret = setState(operation); } if ( ret == NO_ERROR ) { ret = startPreview(); } if ( ret == NO_ERROR ) { ret = commitState(); } else { ret |= rollbackState(); } break; }BaseCameraAdapter::startPreview()V4LCameraAdapter
(6)hardware/ti/omap4-aah/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
private: class PreviewThread : public android::Thread { V4LCameraAdapter* mAdapter; //類 繼承了 public: PreviewThread(V4LCameraAdapter* hw) : //該線程不斷執行 中的 函數。 Thread(false), mAdapter(hw) { } virtual void onFirstRef() { run("CameraPreviewThread", android::PRIORITY_URGENT_DISPLAY); } virtual bool threadLoop() { mAdapter->previewThread(); // loop until we need to quit return true; } }; //Used for calculation of the average frame rate during preview status_t recalculateFPS(); char * GetFrame(int &index); int previewThread();V4LCameraAdapterBaseCameraAdapterAdapterpreviewThread()
(7)hardware/ti/omap4-aah/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
startPreview()
:
status_t V4LCameraAdapter::startPreview() { status_t ret = NO_ERROR; LOG_FUNCTION_NAME; android::AutoMutex lock(mPreviewBufsLock); if(mPreviewing) { ret = BAD_VALUE; goto EXIT; } for (int i = 0; i < mPreviewBufferCountQueueable; i++) { mVideoInfo->buf.index = i; mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; mVideoInfo->buf.memory = V4L2_MEMORY_MMAP; ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf); //經過 函數從硬件獲取須要的數據,並存入 。 if (ret < 0) { CAMHAL_LOGEA("VIDIOC_QBUF Failed"); goto EXIT; } nQueued++; } ret = v4lStartStreaming();// Create and start preview thread for receiving buffers from V4L Camera if(!mCapturing) { mPreviewThread = new PreviewThread(this); //啓動一個 ,用於接收從 攝像頭設備傳回的數據。 CAMHAL_LOGDA("Created preview thread"); } //Update the flag to indicate we are previewing(代表預覽功能已開啓) mPreviewing = true; mCapturing = false; EXIT: LOG_FUNCTION_NAME_EXIT; return ret; }v4lIoctl()BuffersPreviewThreadV4L設置標誌,
previewThread()
:
int V4LCameraAdapter::previewThread() { status_t ret = NO_ERROR; int width, height; CameraFrame frame; void *y_uv[2]; int index = 0; int stride = 4096; char *fp = NULL; mParams.getPreviewSize(&width, &height); if (mPreviewing) { fp = this->GetFrame(index); if(!fp) { ret = BAD_VALUE; goto EXIT; } CameraBuffer *buffer = mPreviewBufs.keyAt(index); CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer); if (!lframe) { ret = BAD_VALUE; goto EXIT; } debugShowFPS(); if ( mFrameSubscribers.size() == 0 ) { ret = BAD_VALUE; goto EXIT; } y_uv[0] = (void*) lframe->mYuv[0]; //y_uv[1] = (void*) lframe->mYuv[1]; //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride); convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height); //獲取設備傳回的數據,並進行一些格式轉換操做. CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] ); #ifdef SAVE_RAW_FRAMES unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2); //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height); saveFile( nv12_buff, ((width*height)*3/2) ); free (nv12_buff); #endif //給幀數據進行一些必要的參數設置,如幀大小、時間戳等。 frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC; frame.mBuffer = buffer; frame.mLength = width*height*3/2; frame.mAlignment = stride; frame.mOffset = 0; frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC); frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC; if (mRecording) { frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC; mFramesWithEncoder++; } ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask); if (ret != NO_ERROR) { CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret); } else { ret = sendFrameToSubscribers(&frame); //將幀數據發送給用戶 } } EXIT: return ret; }
(8)hardware/ti/omap4-aah/camera/AppCallbackNotifier.cpp:預覽功能初始化的部分,調用到了
AppCallbackNotifier
類的函數。
status_t AppCallbackNotifier::startPreviewCallbacks(android::CameraParameters ¶ms, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count) { unsigned int *bufArr; int size = 0; LOG_FUNCTION_NAME; android::AutoMutex lock(mLock); if ( NULL == mFrameProvider ) { CAMHAL_LOGEA("Trying to start video recording without FrameProvider"); return -EINVAL; } if ( mPreviewing ) { CAMHAL_LOGDA("+Already previewing"); return NO_INIT; } int w,h; ///Get preview size params.getPreviewSize(&w, &h); // save preview pixel format, size and stride mPreviewWidth = w; mPreviewHeight = h; mPreviewStride = 4096; mPreviewPixelFormat = CameraHal::getPixelFormatConstant(params.getPreviewFormat()); size = CameraHal::calculateBufferSize(mPreviewPixelFormat, w, h); mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL); if (!mPreviewMemory) { return NO_MEMORY; } for (int i=0; i < AppCallbackNotifier::MAX_BUFFERS; i++) { mPreviewBuffers[i].type = CAMERA_BUFFER_MEMORY; mPreviewBuffers[i].opaque = (unsigned char*) mPreviewMemory->data + (i*size); mPreviewBuffers[i].mapped = mPreviewBuffers[i].opaque; } if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) { mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC); } if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_POSTVIEW_FRAME) ) { mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME); //同步預覽幀 } mPreviewBufCount = 0; mPreviewing = true; LOG_FUNCTION_NAME_EXIT; return NO_ERROR; }
以上回調函數在這裏設置:
void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal, camera_notify_callback notify_cb, camera_data_callback data_cb, camera_data_timestamp_callback data_cb_timestamp, camera_request_memory get_memory, void *user) { android::AutoMutex lock(mLock); LOG_FUNCTION_NAME; mCameraHal = cameraHal; mNotifyCb = notify_cb; mDataCb = data_cb; mDataCbTimestamp = data_cb_timestamp; mRequestMemory = get_memory; mCallbackCookie = user; LOG_FUNCTION_NAME_EXIT; }
notifyEvent()
:
case CameraHalEvent::EVENT_METADATA: //預覽元數據 metaEvtData = evt->mEventData->metadataEvent; if ( ( NULL != mCameraHal ) && ( NULL != mNotifyCb) && ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ) ) { // WA for an issue inside CameraService camera_memory_t *tmpBuffer = mRequestMemory(-1, 1, 1, NULL); //申請一個 對應的 空間後,就調用回調函數將元數據往上層進行傳輸了。 mDataCb(CAMERA_MSG_PREVIEW_METADATA, tmpBuffer, 0, metaEvtData->getMetadataResult(), mCallbackCookie); metaEvtData.clear(); if ( NULL != tmpBuffer ) { tmpBuffer->release(tmpBuffer); } } break;camera_memory_tBuffers
簡圖總結: HAL
層中,CameraHardwareInterface
是通用的入口,而真正實現與驅動層的對接是與平臺相關,不一樣平臺有不一樣的實現方案。
Camera API 1
中,數據流主要是經過函數回調的方式,依照從下往上的方向,逐層 return
到 Applications 中。
1.Open 時設置回調:frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h
/** Set the notification and data callbacks */ void setCallbacks(notify_callback notify_cb, data_callback data_cb, data_callback_timestamp data_cb_timestamp, void* user) { mNotifyCb = notify_cb; //設置 回調,這用來通知數據已經更新。 mDataCb = data_cb; //置 回調以及 回調,對應的是函數指針 與 。 mDataCbTimestamp = data_cb_timestamp; mCbUser = user; ALOGV("%s(%s)", __FUNCTION__, mName.string()); if (mDevice->ops->set_callbacks) { //注:設置 對應回調函數時,傳入的不是以前設置的函數指針,而是 這樣的函數。在該文件中,實現了 ,將回調函數作了一層封裝。 mDevice->ops->set_callbacks(mDevice, __notify_cb, __data_cb, __data_cb_timestamp, __get_memory, this); } }notifydatadataTimestampmDataCbmDataCvTimestampmDevice->ops__data_cb__data_cb
__data_cb()
:對原 callback
函數簡單封裝,附加了一個防止數組越界判斷。
static void __data_cb(int32_t msg_type, const camera_memory_t *data, unsigned int index, camera_frame_metadata_t *metadata, void *user) { ALOGV("%s", __FUNCTION__); CameraHardwareInterface *__this = static_cast<CameraHardwareInterface *>(user); sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle)); if (index >= mem->mNumBufs) { ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__, index, mem->mNumBufs); return; } __this->mDataCb(msg_type, mem->mBuffers[index], metadata, __this->mCbUser); }
2.控制流:
(1)frameworks/base/core/java/android/hardware/Camera.java
takePicture()
:
public final void takePicture(ShutterCallback shutter, PictureCallback raw, PictureCallback postview, PictureCallback jpeg) { mShutterCallback = shutter; //設置快門回調。 mRawImageCallback = raw; //設置各類類型的圖片數據回調。 mPostviewCallback = postview; mJpegCallback = jpeg; // If callback is not set, do not send me callbacks. int msgType = 0; if (mShutterCallback != null) { msgType |= CAMERA_MSG_SHUTTER; } if (mRawImageCallback != null) { msgType |= CAMERA_MSG_RAW_IMAGE; } if (mPostviewCallback != null) { msgType |= CAMERA_MSG_POSTVIEW_FRAME; } if (mJpegCallback != null) { msgType |= CAMERA_MSG_COMPRESSED_IMAGE; } native_takePicture(msgType); //調用 方法,傳入的參數 是根據相應 是否存在而肯定的,每種 應該對應一個二進制中的數位(如 1,10,100 中 1 的位置) mFaceDetectionRunning = false; }JNI takePicturemsgTypeCallBackCallback
3.Android Runtime:frameworks/base/core/jni/android_hardware_Camera.cpp
static void android_hardware_Camera_takePicture(JNIEnv *env, jobject thiz, jint msgType) { ALOGV("takePicture"); JNICameraContext* context; sp<Camera> camera = get_native_camera(env, thiz, &context); //獲取已經打開的 實例,調用其 接口。 if (camera == 0) return; /* * When CAMERA_MSG_RAW_IMAGE is requested, if the raw image callback * buffer is available, CAMERA_MSG_RAW_IMAGE is enabled to get the * notification _and_ the data; otherwise, CAMERA_MSG_RAW_IMAGE_NOTIFY * is enabled to receive the callback notification but no data. * * Note that CAMERA_MSG_RAW_IMAGE_NOTIFY is not exposed to the * Java application. * 注意,在這個函數中,對於 RAW_IMAGE 有一些附加操做: 若是設置了 RAW 的 callback ,則要檢查上下文中,是否能找到對應 Buffer。 若沒法找到 Buffer ,則將 CAMERA_MSG_RAW_IMAGE 的信息去掉,換成 CAMERA_MSG_RAW_IMAGE_NOTIFY。 替換後,就只會得到 notification 的消息,而沒有對應的圖像數據。 */ if (msgType & CAMERA_MSG_RAW_IMAGE) { ALOGV("Enable raw image callback buffer"); if (!context->isRawImageCallbackBufferAvailable()) { ALOGV("Enable raw image notification, since no callback buffer exists"); msgType &= ~CAMERA_MSG_RAW_IMAGE; msgType |= CAMERA_MSG_RAW_IMAGE_NOTIFY; } } if (camera->takePicture(msgType) != NO_ERROR) { jniThrowRuntimeException(env, "takePicture failed"); return; } }cameratakePicture()
4.C/C++ Libraries
(1)frameworks/av/camera/Camera.cpp
// take a picture status_t Camera::takePicture(int msgType) { ALOGV("takePicture: 0x%x", msgType); sp <::android::hardware::ICamera> c = mCamera; if (c == 0) return NO_INIT; return c->takePicture(msgType); //獲取一個 ,調用其 接口。 }ICameratakePicture
(2)frameworks/av/camera/ICamera.cpp
// take a picture - returns an IMemory (ref-counted mmap) status_t takePicture(int msgType) { ALOGV("takePicture: 0x%x", msgType); Parcel data, reply; data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); data.writeInt32(msgType); remote()->transact(TAKE_PICTURE, data, &reply); //利用 機制發送相應指令到服務端,實際調用到的是 函數。 status_t ret = reply.readInt32(); return ret; }BinderCameraClient::takePicture()
(3)frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp
// take a picture - image is returned in callback status_t CameraClient::takePicture(int msgType) { LOG1("takePicture (pid %d): 0x%x", getCallingPid(), msgType); Mutex::Autolock lock(mLock); status_t result = checkPidAndHardware(); if (result != NO_ERROR) return result; if ((msgType & CAMERA_MSG_RAW_IMAGE) && //注: 指令與 指令不能同時有效,須要進行對應的檢查。 (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) { ALOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY" " cannot be both enabled"); return BAD_VALUE; } // We only accept picture related message types // and ignore other types of messages for takePicture(). int picMsgType = msgType //對傳入的指令過濾,只留下與 操做相關的。 & (CAMERA_MSG_SHUTTER | CAMERA_MSG_POSTVIEW_FRAME | CAMERA_MSG_RAW_IMAGE | CAMERA_MSG_RAW_IMAGE_NOTIFY | CAMERA_MSG_COMPRESSED_IMAGE); enableMsgType(picMsgType); return mHardware->takePicture(); //調用 中的 接口。 }CAMERA_MSG_RAW_IMAGECAMERA_MSG_RAW_IMAGE_NOTIFYtakePicture()CameraHardwareInterfacetakePicture()
5.數據流:因爲數據流是經過 callback
函數實現的,因此探究其流程的時候我是從底層向上層進行分析的
(1)HAL:frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h
/** * Take a picture. */ status_t takePicture() { ALOGV("%s(%s)", __FUNCTION__, mName.string()); if (mDevice->ops->take_picture) return mDevice->ops->take_picture(mDevice); //經過 中設置的函數指針,調用 層中具體平臺對應的 操做的實現邏輯。 return INVALID_OPERATION; }mDeviceHALtakePicture
__data_cb()
:該回調函數是在同文件中實現的 setCallbacks()
函數中設置的,Camera
設備得到數據後,就會往上傳輸,在 HAL
層中會調用到這個回調函數。
static void __data_cb(int32_t msg_type, const camera_memory_t *data, unsigned int index, camera_frame_metadata_t *metadata, void *user) { ALOGV("%s", __FUNCTION__); CameraHardwareInterface *__this = static_cast<CameraHardwareInterface *>(user); sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle)); if (index >= mem->mNumBufs) { ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__, index, mem->mNumBufs); return; } __this->mDataCb(msg_type, mem->mBuffers[index], metadata, __this->mCbUser); // 指針對應的是 類中實現的 。 }mDataCbCameraClientdataCallback()
(2)C/C++ Libraries:
frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp
void CameraClient::dataCallback(int32_t msgType, //該回調在 函數中設置到 中。 const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) { LOG2("dataCallback(%d)", msgType); sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get()); //啓動這個回調後,就從 中獲取已鏈接的客戶端。 if (client.get() == nullptr) return; if (!client->lockIfMessageWanted(msgType)) return; if (dataPtr == 0 && metadata == NULL) { ALOGE("Null data returned in data callback"); client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0); return; } switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) { //根據 ,啓動對應的 操做。 case CAMERA_MSG_PREVIEW_FRAME: client->handlePreviewData(msgType, dataPtr, metadata); break; case CAMERA_MSG_POSTVIEW_FRAME: client->handlePostview(dataPtr); break; case CAMERA_MSG_RAW_IMAGE: client->handleRawPicture(dataPtr); break; case CAMERA_MSG_COMPRESSED_IMAGE: client->handleCompressedPicture(dataPtr); break; default: client->handleGenericData(msgType, dataPtr, metadata); break; } }initialize()CameraHardwareInterfaceCookiemsgTypehandle
handleRawPicture()
:
// picture callback - raw image ready void CameraClient::handleRawPicture(const sp<IMemory>& mem) { disableMsgType(CAMERA_MSG_RAW_IMAGE); ssize_t offset; size_t size; sp<IMemoryHeap> heap = mem->getMemory(&offset, &size); sp<hardware::ICameraClient> c = mRemoteCallback; //在 流程中, 函數調用時, 已經設置爲一個客戶端實例,其對應的是 的強指針。 mLock.unlock(); if (c != 0) { c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL); //基於 機制來啓動客戶端的 客戶端的, 是實如今 類中。 } }openconnect()mRemoteCallbackICameraClientBinderdataCallbackdataCallbackCamera
frameworks/av/camera/Camera.cpp
// callback from camera service when frame or image is ready void Camera::dataCallback(int32_t msgType, const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata) { sp<CameraListener> listener; { Mutex::Autolock _l(mLock); listener = mListener; } if (listener != NULL) { listener->postData(msgType, dataPtr, metadata); //調用 的 接口(中實現),將數據繼續向上傳輸。 } }CameraListenerpostDataandroid_hardware_Camera.cpp
(3)Android Runtime :frameworks/base/core/jni/android_hardware_Camera.cpp
void JNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata) //postData是 類的成員函數,該類繼承了 。 { // VM pointer will be NULL if object is released Mutex::Autolock _l(mLock); JNIEnv *env = AndroidRuntime::getJNIEnv(); //首先獲取虛擬機指針 if (mCameraJObjectWeak == NULL) { ALOGW("callback on dead camera object"); return; } int32_t dataMsgType = msgType & ~CAMERA_MSG_PREVIEW_METADATA; //而後過濾掉 信息 // return data based on callback type switch (dataMsgType) { case CAMERA_MSG_VIDEO_FRAME: // should never happen break; // For backward-compatibility purpose, if there is no callback // buffer for raw image, the callback returns null. case CAMERA_MSG_RAW_IMAGE: ALOGV("rawCallback"); if (mRawImageCallbackBuffers.isEmpty()) { env->CallStaticVoidMethod(mCameraJClass, fields.post_event, mCameraJObjectWeak, dataMsgType, 0, 0, NULL); } else { copyAndPost(env, dataPtr, dataMsgType); //關鍵是在於 函數 } break; // There is no data. case 0: break; default: ALOGV("dataCallback(%d, %p)", dataMsgType, dataPtr.get()); copyAndPost(env, dataPtr, dataMsgType); break; } // post frame metadata to Java if (metadata && (msgType & CAMERA_MSG_PREVIEW_METADATA)) { postMetadata(env, CAMERA_MSG_PREVIEW_METADATA, metadata); } }JNICameraContextCameraListenerCAMERA_MSG_PREVIEW_METADATAcopyAndPost()
copyAndPost()
:
void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType) { jbyteArray obj = NULL; // allocate Java byte array and copy data if (dataPtr != NULL) { //首先確認 中數據是否存在 ssize_t offset; size_t size; sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size); ALOGV("copyAndPost: off=%zd, size=%zu", offset, size); uint8_t *heapBase = (uint8_t*)heap->base(); if (heapBase != NULL) { const jbyte* data = reinterpret_cast<const jbyte*>(heapBase + offset); if (msgType == CAMERA_MSG_RAW_IMAGE) { obj = getCallbackBuffer(env, &mRawImageCallbackBuffers, size); } else if (msgType == CAMERA_MSG_PREVIEW_FRAME && mManualBufferMode) { obj = getCallbackBuffer(env, &mCallbackBuffers, size); if (mCallbackBuffers.isEmpty()) { ALOGV("Out of buffers, clearing callback!"); mCamera->setPreviewCallbackFlags(CAMERA_FRAME_CALLBACK_FLAG_NOOP); mManualCameraCallbackSet = false; if (obj == NULL) { return; } } } else { ALOGV("Allocating callback buffer"); obj = env->NewByteArray(size); } if (obj == NULL) { ALOGE("Couldn't allocate byte array for JPEG data"); env->ExceptionClear(); } else { env->SetByteArrayRegion(obj, 0, size, data); } } else { ALOGE("image heap is NULL"); } } // post image data to Java env->CallStaticVoidMethod(mCameraJClass, fields.post_event, //將圖像傳給 端 mCameraJObjectWeak, msgType,0, 0, obj); if (obj) { env->DeleteLocalRef(obj); } }MemoryJava
(4)frameworks/base/core/java/android/hardware/Camera.java
private static void postEventFromNative(Object camera_ref, //繼承了 類 int what, int arg1, int arg2, Object obj) { Camera c = (Camera)((WeakReference)camera_ref).get(); //首先肯定 是否已經實例化。 if (c == null) return; if (c.mEventHandler != null) { Message m = c.mEventHandler.obtainMessage(what, arg1, arg2, obj); //經過 的成員 的 方法將從 環境中得到的數據封裝成 類的一個實例, c.mEventHandler.sendMessage(m); //而後調用 方法將數據傳出。 } }HandlerCameraCameramEventHandlerobtainMessageNativeMessagesendMessage()
@Override public void handleMessage(Message msg) { //繼承了 類 switch(msg.what) { case CAMERA_MSG_SHUTTER: if (mShutterCallback != null) { mShutterCallback.onShutter(); } return; case CAMERA_MSG_RAW_IMAGE: if (mRawImageCallback != null) { mRawImageCallback.onPictureTaken((byte[])msg.obj, mCamera); } return; case CAMERA_MSG_COMPRESSED_IMAGE: if (mJpegCallback != null) { mJpegCallback.onPictureTaken((byte[])msg.obj, mCamera); } return; case CAMERA_MSG_PREVIEW_FRAME: PreviewCallback pCb = mPreviewCallback; if (pCb != null) { if (mOneShot) { // Clear the callback variable before the callback // in case the app calls setPreviewCallback from // the callback function mPreviewCallback = null; } else if (!mWithBuffer) { // We're faking the camera preview mode to prevent // the app from being flooded with preview frames. // Set to oneshot mode again. setHasPreviewCallback(true, false); } pCb.onPreviewFrame((byte[])msg.obj, mCamera); } return; case CAMERA_MSG_POSTVIEW_FRAME: if (mPostviewCallback != null) { mPostviewCallback.onPictureTaken((byte[])msg.obj, mCamera); //經過調用這個方法,底層傳輸到此的數據最終發送到最上層的 Java 應用中,上層應用經過解析 獲得圖像數據。 } return; case CAMERA_MSG_FOCUS: AutoFocusCallback cb = null; synchronized (mAutoFocusCallbackLock) { cb = mAutoFocusCallback; } if (cb != null) { boolean success = msg.arg1 == 0 ? false : true; cb.onAutoFocus(success, mCamera); } return; case CAMERA_MSG_ZOOM: if (mZoomListener != null) { mZoomListener.onZoomChange(msg.arg1, msg.arg2 != 0, mCamera); } return; case CAMERA_MSG_PREVIEW_METADATA: if (mFaceListener != null) { mFaceListener.onFaceDetection((Face[])msg.obj, mCamera); } return; case CAMERA_MSG_ERROR : Log.e(TAG, "Error " + msg.arg1); if (mErrorCallback != null) { mErrorCallback.onError(msg.arg1, mCamera); } return; case CAMERA_MSG_FOCUS_MOVE: if (mAutoFocusMoveCallback != null) { mAutoFocusMoveCallback.onAutoFocusMoving(msg.arg1 == 0 ? false : true, mCamera); } return; default: Log.e(TAG, "Unknown message type " + msg.what); return; } }HandlerMessage
簡圖總結:
總結:
不論是控制流仍是數據流,都是要經過五大層次依次執行下一步的。控制流是將命令從頂層流向底層,而數據流則是將底層的數據流向頂層。若是要自定義一個對數據進行處理的 C++ 功能庫,並將其加入相機中,能夠經過對 HAL 層進行一些修改,將 RAW 圖像流向本身的處理庫,再將處理後的 RAW 圖像傳回 HAL 層(須要在 HAL 層對 RAW 格式進行一些處理才能把圖像上傳),最後經過正常的回調流程把圖像傳到頂層應用中。