Android Camera CameraHal.cpp 初始化分析
2015-11-29 18:33
323 查看
Android Camera CameraHal.cpp 分析
作为上层和底层的中转站,hal层在android中起到的作用不言而喻,针对camera的研究已经有一段时间了,这里自己还是决定静下心来好好的分析一下CameraHal.cpp这里的代码,
对自己更好的理解hal与上层和底层的交互作用不可小觑,特别对理解hal与kernel driver的交互过程作用很大
不多说废话了,开始今天的主题
我们首先从CameraHal的初始化,那么他是从哪里开始化的呢?这里之前的文章中已经有过,只是没有重点介绍,这里还是说一下吧
是在camera最初open的时候会调用到camerahal_module.cpp中的以下方法进行初始化的
/*******************************************************************
* implementation of camera_module functions
*******************************************************************/
/* open device handle to one of the cameras
*
* assume camera service will keep singleton of each camera
* so this function will always only be called once per camera instance
*/
int camera_device_open(const hw_module_t* module, const char* name,
hw_device_t** device)
{
int rv = 0;
int num_cameras = 0;
int cameraid;
ti_camera_device_t* camera_device = NULL;
camera_device_ops_t* camera_ops = NULL;
android::CameraHal* camera = NULL;
android::CameraProperties::Properties* properties = NULL;
android::Mutex::Autolock
lock(gCameraHalDeviceLock);
LOGI("camera_device open");
if (name != NULL) {
cameraid = atoi(name);
num_cameras = gCameraProperties.camerasSupported();
if(cameraid > num_cameras)
{
LOGE("camera service provided cameraid out of bounds, "
"cameraid = %d, num supported = %d",
cameraid, num_cameras);
rv = -EINVAL;
goto fail;
}
if(gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED)
{
LOGE("maximum number of cameras already open");
rv = -ENOMEM;
goto fail;
}
camera_device = (ti_camera_device_t*)malloc(sizeof(*camera_device));
if(!camera_device)
{
LOGE("camera_device allocation fail");
rv = -ENOMEM;
goto fail;
}
camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
if(!camera_ops)
{
LOGE("camera_ops allocation fail");
rv = -ENOMEM;
goto fail;
}
memset(camera_device, 0, sizeof(*camera_device));
memset(camera_ops, 0, sizeof(*camera_ops));
camera_device->base.common.tag = HARDWARE_DEVICE_TAG;
camera_device->base.common.version = 0;
camera_device->base.common.module = (hw_module_t *)(module);
camera_device->base.common.close = camera_device_close;
camera_device->base.ops = camera_ops;
camera_ops->set_preview_window = camera_set_preview_window;
camera_ops->set_callbacks = camera_set_callbacks;
camera_ops->enable_msg_type = camera_enable_msg_type;
camera_ops->disable_msg_type = camera_disable_msg_type;
camera_ops->msg_type_enabled = camera_msg_type_enabled;
camera_ops->start_preview = camera_start_preview;
camera_ops->stop_preview = camera_stop_preview;
camera_ops->preview_enabled = camera_preview_enabled;
camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers;
camera_ops->start_recording = camera_start_recording;
camera_ops->stop_recording = camera_stop_recording;
camera_ops->recording_enabled = camera_recording_enabled;
camera_ops->release_recording_frame = camera_release_recording_frame;
camera_ops->auto_focus = camera_auto_focus;
camera_ops->cancel_auto_focus = camera_cancel_auto_focus;
camera_ops->take_picture = camera_take_picture;
camera_ops->cancel_picture = camera_cancel_picture;
camera_ops->set_parameters = camera_set_parameters;
camera_ops->get_parameters = camera_get_parameters;
camera_ops->put_parameters = camera_put_parameters;
camera_ops->send_command = camera_send_command;
camera_ops->release = camera_release;
camera_ops->dump = camera_dump;
*device = &camera_device->base.common;
// -------- TI
specific stuff --------
camera_device->cameraid = cameraid;
if(gCameraProperties.getProperties(cameraid, &properties) < 0)
{
LOGE("Couldn't get camera properties");
rv = -ENOMEM;
goto fail;
}
****************重点就在这里了****************
camera = new android::CameraHal(cameraid);
if(!camera)
{
LOGE("Couldn't create instance of CameraHal class");
rv = -ENOMEM;
goto fail;
}
if(properties && (camera->initialize(properties) != android::NO_ERROR))
{
LOGE("Couldn't initialize camera instance");
rv = -ENODEV;
goto fail;
}
gCameraHals[cameraid] = camera;
gCamerasOpen++;
}
return rv;
fail:
if(camera_device) {
free(camera_device);
camera_device = NULL;
}
if(camera_ops) {
free(camera_ops);
camera_ops = NULL;
}
if(camera) {
delete camera;
camera = NULL;
}
*device = NULL;
return rv;
}
上面横线中间的部分就是camerahal的初始化了,首先new camerahal,并且调用initialize方法,最后将实例化好的camerahal保存到gCameraHals这个数组中,之后会通过这个数组找到我们实例化好的camerahal,实现我们对hal层接口的使用
现在我们就开始看看camerahal的initialize方法的实现
/**
@brief Initialize the Camera HAL
Creates CameraAdapter, AppCallbackNotifier, DisplayAdapter and MemoryManager
@param None
@return NO_ERROR - On success
NO_MEMORY - On failure to allocate memory for any
of the objects
@remarks Camera Hal internal function
*/
status_t CameraHal::initialize(CameraProperties::Properties* properties)
{
LOG_FUNCTION_NAME;
int sensor_index = 0;
const char* sensor_name = NULL;
///Initialize the event mask used for registering
an event provider for AppCallbackNotifier
///Currently, registering
all events as to be coming from CameraAdapter
int32_t eventMask = CameraHalEvent::ALL_EVENTS;
// Get my camera properties
mCameraProperties = properties;
if(!mCameraProperties)
{
goto fail_loop;
}
// Dump the properties of this Camera
// will only print if DEBUG macro is defined
mCameraProperties->dump();
if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX)) != 0 )
{
sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX));
}
if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_NAME)) != 0 ) {
sensor_name = mCameraProperties->get(CameraProperties::CAMERA_NAME);
}
CAMHAL_LOGDB("Sensor index= %d; Sensor name= %s", sensor_index, sensor_name);
1.这里很重要*******************************************************************************
这里做一个判断决定我们是使用V4LCameraAdapter还是OMXCameraAdapter
接下来将要重点学习OMX机制,这篇文章我们假设走else分支
if (strcmp(sensor_name, V4L_CAMERA_NAME_USB) == 0) {
#ifdef V4L_CAMERA_ADAPTER
mCameraAdapter = V4LCameraAdapter_Factory(sensor_index);
#endif
}
else {
#ifdef OMX_CAMERA_ADAPTER
mCameraAdapter = OMXCameraAdapter_Factory(sensor_index);
#endif
}
if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties)!=NO_ERROR))
{
CAMHAL_LOGEA("Unable to create or initialize CameraAdapter");
mCameraAdapter = NULL;
goto fail_loop;
}
mCameraAdapter->incStrong(mCameraAdapter);
mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this);
mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this);
2.这里很重要*****************************************************************************
这里实例化一个AppCallbackNotifier,并且调用initialize方法进行初始化
if(!mAppCallbackNotifier.get())
{
/// Create the callback notifier
mAppCallbackNotifier = new AppCallbackNotifier();
if( ( NULL == mAppCallbackNotifier.get() ) || ( mAppCallbackNotifier->initialize() != NO_ERROR))
{
CAMHAL_LOGEA("Unable to create or initialize AppCallbackNotifier");
goto fail_loop;
}
}
3.这里很重要*****************************************************************************
这里实例化一个MemoryManager,并且调用initialize方法进行初始化,以及其他一些set
或者初始化
if(!mMemoryManager.get())
{
/// Create Memory Manager
mMemoryManager = new MemoryManager();
if( ( NULL == mMemoryManager.get() ) || ( mMemoryManager->initialize() != NO_ERROR))
{
CAMHAL_LOGEA("Unable to create or initialize MemoryManager");
goto fail_loop;
}
}
///Setup the class dependencies...
///AppCallbackNotifier has to know
where to get the Camera frames and the events like auto focus lock etc
from.
///CameraAdapter is the
one which provides those events
///Set it
as the frame and event providers for AppCallbackNotifier
///@remarks
setEventProvider API takes in a bit mask of events for registering a provider for the
different events
/// That way, if events
can come from DisplayAdapter in future, we will be able to add it as provider
/// for any
event
mAppCallbackNotifier->setEventProvider(eventMask, mCameraAdapter);
mAppCallbackNotifier->setFrameProvider(mCameraAdapter);
///Any dynamic errors that happen during the camera use case has to be
propagated back to the application
///via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that
notifies such errors to the application
///Set it
as the error handler for CameraAdapter
mCameraAdapter->setErrorHandler(mAppCallbackNotifier.get());
///Start the callback notifier
if(mAppCallbackNotifier->start() != NO_ERROR)
{
CAMHAL_LOGEA("Couldn't start AppCallbackNotifier");
goto fail_loop;
}
CAMHAL_LOGDA("Started AppCallbackNotifier..");
mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);
4.这里很重要*****************************************************************************
这里进行camera参数的设置
///Initialize default parameters
initDefaultParameters();
if ( setParameters(mParameters) != NO_ERROR )
{
CAMHAL_LOGEA("Failed to set default parameters?!");
}
5.这里很重要*****************************************************************************
这里实例化一个SensorListener,并且调用initialize方法进行初始化,以及其他一些初始设置
// register for sensor events
mSensorListener = new SensorListener();
if (mSensorListener.get()) {
if (mSensorListener->initialize() == NO_ERROR) {
mSensorListener->setCallbacks(orientation_cb, this);
mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION);
} else {
CAMHAL_LOGEA("Error initializing SensorListener. not fatal, continuing");
mSensorListener.clear();
mSensorListener = NULL;
}
}
LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
fail_loop:
///Free up the resources because we failed somewhere up
deinitialize();
LOG_FUNCTION_NAME_EXIT;
return NO_MEMORY;
}
上面一共五个步骤,接下来将一一分析,自我感觉很有这样的必要
一.OMXCameraAdapter的实例化和初始化
首先看一下OMXCameraAdapter的默认构造函数
OMXCameraAdapter::OMXCameraAdapter(size_t
sensor_index)
{
LOG_FUNCTION_NAME;
mOmxInitialized = false;
mComponentState = OMX_StateInvalid;
mSensorIndex = sensor_index;
mPictureRotation = 0;
// Initial values
mTimeSourceDelta = 0;
onlyOnce = true;
mDccData.pData = NULL;
mInitSem.Create(0);
mFlushSem.Create(0);
mUsePreviewDataSem.Create(0);
mUsePreviewSem.Create(0);
mUseCaptureSem.Create(0);
mUseReprocessSem.Create(0);
mStartPreviewSem.Create(0);
mStopPreviewSem.Create(0);
mStartCaptureSem.Create(0);
mStopCaptureSem.Create(0);
mStopReprocSem.Create(0);
mSwitchToLoadedSem.Create(0);
mCaptureSem.Create(0);
mSwitchToExecSem.Create(0);
mCameraAdapterParameters.mHandleComp = 0;
mUserSetExpLock = OMX_FALSE;
mUserSetWbLock = OMX_FALSE;
mFramesWithDucati = 0;
mFramesWithDisplay = 0;
mFramesWithEncoder = 0;
#ifdef CAMERAHAL_OMX_PROFILING
mDebugProfile = 0;
#endif
LOG_FUNCTION_NAME_EXIT;
}
这其中只是对很多参数的默认初始化,接下来看看initialize方法
/*--------------------Camera
Adapter Class STARTS here-----------------------------*/
status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
{
LOG_FUNCTION_NAME;
char value[PROPERTY_VALUE_MAX];
const char *mountOrientationString = NULL;
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
property_get("debug.camera.framecounts", value, "0");
mDebugFcs = atoi(value);
#ifdef CAMERAHAL_OMX_PROFILING
property_get("debug.camera.profile", value, "0");
mDebugProfile = atoi(value);
#endif
TIMM_OSAL_ERRORTYPE osalError = OMX_ErrorNone;
OMX_ERRORTYPE eError = OMX_ErrorNone;
status_t ret = NO_ERROR;
mLocalVersionParam.s.nVersionMajor = 0x1;
mLocalVersionParam.s.nVersionMinor = 0x1;
mLocalVersionParam.s.nRevision = 0x0 ;
mLocalVersionParam.s.nStep = 0x0;
mPending3Asettings = 0;//E3AsettingsAll;
mPendingCaptureSettings = 0;
mPendingPreviewSettings = 0;
if ( 0 != mInitSem.Count() )
{
CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count());
LOG_FUNCTION_NAME_EXIT;
return NO_INIT;
}
///Update the preview and image
capture port indexes
mCameraAdapterParameters.mPrevPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW;
// temp changed in order to build
OMX_CAMERA_PORT_VIDEO_OUT_IMAGE;
mCameraAdapterParameters.mImagePortIndex = OMX_CAMERA_PORT_IMAGE_OUT_IMAGE;
mCameraAdapterParameters.mMeasurementPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT;
//currently not supported use preview port instead
mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
mCameraAdapterParameters.mVideoInPortIndex = OMX_CAMERA_PORT_VIDEO_IN_VIDEO;
// 1.OMX_Init
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
return ErrorUtils::omxToAndroidError(eError);
}
mOmxInitialized = true;
// 2.Initialize
the callback handles
OMX_CALLBACKTYPE callbacks;
callbacks.EventHandler = android::OMXCameraAdapterEventHandler;
callbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone;
callbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone;
// 3.Get the
handle to the OMX Component
eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, this, callbacks);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
}
GOTO_EXIT_IF((eError != OMX_ErrorNone), eError);
mComponentState = OMX_StateLoaded;
CAMHAL_LOGVB("OMX_GetHandle -0x%x sensor_index = %lu", eError, mSensorIndex);
initDccFileDataSave(&mCameraAdapterParameters.mHandleComp, mCameraAdapterParameters.mPrevPortIndex);
eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
OMX_CommandPortDisable,
OMX_ALL,
NULL);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortDisable) -0x%x", eError);
}
GOTO_EXIT_IF((eError != OMX_ErrorNone), eError);
// 4.Register for port
enable event
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandPortEnable,
mCameraAdapterParameters.mPrevPortIndex,
mInitSem);
if(ret != NO_ERROR) {
CAMHAL_LOGEB("Error in registering for event %d", ret);
goto EXIT;
}
// 5.Enable
PREVIEW Port
eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
OMX_CommandPortEnable,
mCameraAdapterParameters.mPrevPortIndex,
NULL);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
// 6.Wait for the
port enable event to occur
ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT);
if ( NO_ERROR == ret ) {
CAMHAL_LOGDA("-Port enable event arrived");
} else {
ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandPortEnable,
mCameraAdapterParameters.mPrevPortIndex,
NULL);
CAMHAL_LOGEA("Timeout for enabling preview port expired!");
goto EXIT;
}
// 7.Select the sensor
OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
sensorSelect.eSensor = (OMX_SENSORSELECT) mSensorIndex;
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect);
if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while selecting the sensor index as %d - 0x%x", mSensorIndex, eError);
return BAD_VALUE;
} else {
CAMHAL_LOGDB("Sensor %d selected successfully", mSensorIndex);
}
#ifdef CAMERAHAL_DEBUG
printComponentVersion(mCameraAdapterParameters.mHandleComp);
#endif
// 8.初始化默认参数
mBracketingEnabled = false;
mZoomBracketingEnabled = false;
mBracketingBuffersQueuedCount = 0;
mBracketingRange = 1;
mLastBracetingBufferIdx = 0;
mBracketingBuffersQueued = NULL;
mOMXStateSwitch = false;
mBracketingSet = false;
#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
mRawCapture = false;
mYuvCapture = false;
#endif
mCaptureSignalled = false;
mCaptureConfigured = false;
mReprocConfigured = false;
mRecording = false;
mWaitingForSnapshot = false;
mPictureFormatFromClient = NULL;
mCapabilitiesOpMode = MODE_MAX;
mCapMode = INITIAL_MODE;
mIPP = IPP_NULL;
mVstabEnabled = false;
mVnfEnabled = false;
mBurstFrames = 1;
mBurstFramesAccum = 0;
mCapturedFrames = 0;
mFlushShotConfigQueue = false;
mPictureQuality = 100;
mCurrentZoomIdx = 0;
mTargetZoomIdx = 0;
mPreviousZoomIndx = 0;
mReturnZoomStatus = false;
mZoomInc = 1;
mZoomParameterIdx = 0;
mExposureBracketingValidEntries = 0;
mZoomBracketingValidEntries = 0;
mSensorOverclock = false;
mAutoConv = OMX_TI_AutoConvergenceModeMax;
mManualConv = 0;
mDeviceOrientation = 0;
mCapabilities = caps;
mZoomUpdating = false;
mZoomUpdate = false;
mGBCE = BRIGHTNESS_OFF;
mGLBCE = BRIGHTNESS_OFF;
mParameters3A.ExposureLock = OMX_FALSE;
mParameters3A.WhiteBalanceLock = OMX_FALSE;
mEXIFData.mGPSData.mAltitudeValid = false;
mEXIFData.mGPSData.mDatestampValid = false;
mEXIFData.mGPSData.mLatValid = false;
mEXIFData.mGPSData.mLongValid = false;
mEXIFData.mGPSData.mMapDatumValid = false;
mEXIFData.mGPSData.mProcMethodValid = false;
mEXIFData.mGPSData.mVersionIdValid = false;
mEXIFData.mGPSData.mTimeStampValid = false;
mEXIFData.mModelValid = false;
mEXIFData.mMakeValid = false;
//update the mDeviceOrientation with the sensor mount orientation.
//So that the face detect will work before onOrientationEvent()
//get triggered.
CAMHAL_ASSERT(mCapabilities);
mountOrientationString = mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
CAMHAL_ASSERT(mountOrientationString);
mDeviceOrientation = atoi(mountOrientationString);
if (mSensorIndex != 2) {
mCapabilities->setMode(MODE_HIGH_SPEED);
}
if (mCapabilities->get(CameraProperties::SUPPORTED_ZOOM_STAGES) != NULL) {
mMaxZoomSupported = mCapabilities->getInt(CameraProperties::SUPPORTED_ZOOM_STAGES) + 1;
} else {
mMaxZoomSupported = 1;
}
// 9.initialize
command handling thread
if(mCommandHandler.get() == NULL)
mCommandHandler = new CommandHandler(this);
if ( NULL == mCommandHandler.get() )
{
CAMHAL_LOGEA("Couldn't create command handler");
return NO_MEMORY;
}
ret = mCommandHandler->run("CallbackThread", PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("command handler thread already runnning!!");
ret = NO_ERROR;
} else {
CAMHAL_LOGEA("Couldn't run command handlerthread");
return ret;
}
}
// 10.initialize
omx callback handling thread
if(mOMXCallbackHandler.get() == NULL)
mOMXCallbackHandler = new OMXCallbackHandler(this);
if ( NULL == mOMXCallbackHandler.get() )
{
CAMHAL_LOGEA("Couldn't create omx callback handler");
return NO_MEMORY;
}
ret = mOMXCallbackHandler->run("OMXCallbackThread", PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("omx callback handler thread already runnning!!");
ret = NO_ERROR;
} else {
CAMHAL_LOGEA("Couldn't run omx callback handler thread");
return ret;
}
}
OMX_INIT_STRUCT_PTR (&mRegionPriority, OMX_TI_CONFIG_3A_REGION_PRIORITY);
OMX_INIT_STRUCT_PTR (&mFacePriority, OMX_TI_CONFIG_3A_FACE_PRIORITY);
mRegionPriority.nPortIndex = OMX_ALL;
mFacePriority.nPortIndex = OMX_ALL;
//Setting this flag will that the first setParameter call will apply
all 3A settings
//and will not conditionally
apply based on current values.
mFirstTimeInit = true;
//Flag to avoid calling setVFramerate() before
OMX_SetParameter(OMX_IndexParamPortDefinition)
//Ducati will return an error otherwise.
mSetFormatDone = false;
memset(mExposureBracketingValues, 0, EXP_BRACKET_RANGE*sizeof(int));
memset(mZoomBracketingValues, 0, ZOOM_BRACKET_RANGE*sizeof(int));
mMeasurementEnabled = false;
mFaceDetectionRunning = false;
mFaceDetectionPaused = false;
mFDSwitchAlgoPriority = false;
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex], 0, sizeof(OMXCameraPortParameters));
// 11.initialize
3A defaults
mParameters3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
mParameters3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
mParameters3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
mParameters3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
mParameters3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
mParameters3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
mParameters3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
mParameters3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
mParameters3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
mParameters3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
mParameters3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
mParameters3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
mParameters3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
mParameters3A.ExposureLock = OMX_FALSE;
mParameters3A.FocusLock = OMX_FALSE;
mParameters3A.WhiteBalanceLock = OMX_FALSE;
mParameters3A.ManualExposure = 0;
mParameters3A.ManualExposureRight = 0;
mParameters3A.ManualGain = 0;
mParameters3A.ManualGainRight = 0;
mParameters3A.AlgoFixedGamma = OMX_TRUE;
mParameters3A.AlgoNSF1 = OMX_TRUE;
mParameters3A.AlgoNSF2 = OMX_TRUE;
mParameters3A.AlgoSharpening = OMX_TRUE;
mParameters3A.AlgoThreeLinColorMap = OMX_TRUE;
mParameters3A.AlgoGIC = OMX_TRUE;
LOG_FUNCTION_NAME_EXIT;
return ErrorUtils::omxToAndroidError(eError);
EXIT:
CAMHAL_LOGDB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
return ErrorUtils::omxToAndroidError(eError);
}
这个initialize的过程做的事情还是比较多的,这里关系到OMX的很多知识点,是之后要研究的,现在只是先知道这里
在完成initialize方法之后,
mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this);
mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this);
这两个方法同样非常重要,首先我们这里调用的是mCameraAdapter的方法,mCameraAdapter是OMXCameraAdapter这个类的实例,但是其实OMXCameraAdapter这个类中是没有以上这两个方法的,但是我们接着看,OMXCameraAdapter这个类的定义
class OMXCameraAdapter : public BaseCameraAdapter
OMXCameraAdapter继承于BaseCameraAdapter,不错,上面的两个方法是在BaseCameraAdapter这个类中实现的,我们接着看看
status_t BaseCameraAdapter::registerImageReleaseCallback(release_image_buffers_callback
callback, void *user_data)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
mReleaseImageBuffersCallback = callback;
mReleaseData = user_data;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
status_t BaseCameraAdapter::registerEndCaptureCallback(end_image_capture_callback
callback, void *user_data)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
mEndImageCaptureCallback= callback;
mEndCaptureData = user_data;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
这两个方法实现方式一模一样,只是将传入的函数指针和user_data保存到其中而已
mEndImageCaptureCallback这个方法会在OMXCameraAdapter的fillThisBuffer中被调用,他的实现在
void releaseImageBuffers(void *userData)
{
LOG_FUNCTION_NAME;
if (NULL != userData) {
CameraHal *c = reinterpret_cast<CameraHal *>(userData);//user_data就是指向我们实例化的OMXCameraAdapter变量mCameraAdapter
c->freeImageBufs();//接着调用mCameraAdapter的freeImageBufs方法
}
LOG_FUNCTION_NAME_EXIT;
}
status_t CameraHal::freeImageBufs()
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
if ( NO_ERROR == ret )
{
if( NULL != mImageBufs )
{
///@todo
Pluralise the name of this method to freeBuffers
ret = mMemoryManager->freeBuffer(mImageBufs);//通过memoryManager释放内存
mImageBufs = NULL;
}
else
{
ret = -EINVAL;
}
}
LOG_FUNCTION_NAME_EXIT;
return ret;
}
mReleaseImageBuffersCallback 这个方法将在OMXCapture.cpp中被调用,他的实现和上面是相似的,就不再说了
二.AppCallbackNotifier的实例化和初始化
AppCallbackNotifier这个类是没有实现构造函数的,我们就先看看他的initialize方法吧
/**
* NotificationHandler class
*/
///Initialization function for AppCallbackNotifier
status_t AppCallbackNotifier::initialize()
{
LOG_FUNCTION_NAME;
mPreviewMemory = 0;
mMeasurementEnabled = false;
mNotifierState = NOTIFIER_STOPPED;
///Create
the app notifier thread
mNotificationThread = new
NotificationThread(this);
if(!mNotificationThread.get())
{
CAMHAL_LOGEA("Couldn't create Notification thread");
return NO_MEMORY;
}
///Start
the display thread
status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY);
if(ret!=NO_ERROR)
{
CAMHAL_LOGEA("Couldn't run NotificationThread");
mNotificationThread.clear();
return ret;
}
mUseMetaDataBufferMode = true;
mRawAvailable = false;
mRecording = false;
mPreviewing = false;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
这个initialize方法做的事情相对简单但是有及其重要,他创建了一个notificationthread,然后运行这个thread
这里我们看看这个很忙碌的线程都干了些什么事情,之所以说他忙碌,是因为他一直不停的等待消息,有消息就处理,不能磨叽磨叽的
bool AppCallbackNotifier::notificationThread()
{
bool shouldLive = true;
status_t ret;
LOG_FUNCTION_NAME;
//CAMHAL_LOGDA("Notification
Thread waiting for message");
ret = TIUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
&mEventQ,
&mFrameQ,
AppCallbackNotifier::NOTIFIER_TIMEOUT);
//CAMHAL_LOGDA("Notification
Thread received message");
//上面等待message,消息到来时开始往下运行,之后区分这些消息到底是什么消息,就像邮局收到邮件接着往下分发一样
if (mNotificationThread->msgQ().hasMsg()) {
///Received a message from CameraHal, process
it
CAMHAL_LOGDA("Notification Thread received message from Camera HAL");
shouldLive = processMessage();//先进行消息的筛选,这里跳出有问题的邮件抛弃掉,收到NOTIFIER_EXIT消息则退出
if(!shouldLive) {
CAMHAL_LOGDA("Notification Thread exiting.");
return shouldLive;
}
}
if(mEventQ.hasMsg()) {
///Received an event from one of the event providers
CAMHAL_LOGDA("Notification Thread received an event from event provider (CameraAdapter)");
notifyEvent();//分类完成就分类处理了,这里是enent
事件的处理
}
if(mFrameQ.hasMsg()) {
///Received a frame from one of the frame providers
//CAMHAL_LOGDA("Notification
Thread received a frame from frame provider (CameraAdapter)");
notifyFrame();//分类完成就分类处理了,这里是frame
事件的处理
}
LOG_FUNCTION_NAME_EXIT;
return shouldLive;
}
这里先看一下notifyEvent方法的处理过程:
void AppCallbackNotifier::notifyEvent()
{
///Receive and send
the event notifications to app
TIUTILS::Message msg;
LOG_FUNCTION_NAME;
{
Mutex::Autolock lock(mLock);
if ( !mEventQ.hasMsg() ) {
return;
} else {
mEventQ.get(&msg);
}
}
bool ret = true;
CameraHalEvent *evt = NULL;
CameraHalEvent::FocusEventData *focusEvtData;
CameraHalEvent::ZoomEventData *zoomEvtData;
CameraHalEvent::MetaEventData metaEvtData;
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED)
{
return;
}
switch(msg.command)
{
case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT:
evt = ( CameraHalEvent * ) msg.arg1;
if ( NULL == evt )
{
CAMHAL_LOGEA("Invalid CameraHalEvent");
return;
}
switch(evt->mEventType)
{
case CameraHalEvent::EVENT_SHUTTER:
if ( ( NULL != mCameraHal ) &&
( NULL != mNotifyCb ) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_SHUTTER) ) )
{
mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
}
mRawAvailable = false;
break;
case CameraHalEvent::EVENT_FOCUS_LOCKED:
case CameraHalEvent::EVENT_FOCUS_ERROR:
focusEvtData = &evt->mEventData->focusEvent;
if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_SUCCESS ) &&
( NULL != mCameraHal ) &&
( NULL != mNotifyCb ) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) ) {
mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
} else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_FAIL ) &&
( NULL != mCameraHal ) &&
( NULL != mNotifyCb ) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) ) {
mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
}
break;
case CameraHalEvent::EVENT_ZOOM_INDEX_REACHED:
zoomEvtData = &evt->mEventData->zoomEvent;
if ( ( NULL != mCameraHal ) &&
( NULL != mNotifyCb) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_ZOOM) ) )
{
mNotifyCb(CAMERA_MSG_ZOOM, zoomEvtData->currentZoomIndex, zoomEvtData->targetZoomIndexReached, mCallbackCookie);
}
break;
case CameraHalEvent::EVENT_METADATA:
metaEvtData = evt->mEventData->metadataEvent;
if ( ( NULL != mCameraHal ) &&
( NULL != mNotifyCb) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ) )
{
// WA for an issue inside CameraService
camera_memory_t *tmpBuffer = mRequestMemory(-1, 1, 1, NULL);
mDataCb(CAMERA_MSG_PREVIEW_METADATA,
tmpBuffer,
0,
metaEvtData->getMetadataResult(),
mCallbackCookie);
metaEvtData.clear();
if ( NULL != tmpBuffer ) {
tmpBuffer->release(tmpBuffer);
}
}
break;
case CameraHalEvent::ALL_EVENTS:
break;
default:
break;
}
break;
}
if ( NULL != evt )
{
delete evt;
}
LOG_FUNCTION_NAME_EXIT;
}
针对每个不同消息处理方法基本是相同的,主要通过上次register好的callback方法,这个之前已经在文章中说过,不在多做说明
这里还是比较重要的,但不是这篇文章的重点,之后有机会还会在说
再看一下notifyFrame的处理过程:
算了,我还是不把他的处理过程贴出来,怕吓到人,挺庞大的,处理了很多事件,当然重要,先知道,再看吧
这里你处理了notifyEvent和notifyFrame这里消息,但是这些消息是从哪里来呢?知道了接收者,那么就必须找到发送者
mAppCallbackNotifier->setEventProvider(eventMask, mCameraAdapter);
mAppCallbackNotifier->setFrameProvider(mCameraAdapter);
不错,就是在这里指定了enent和frame消息的提供者(provider)
先看看setEventProvider的实现
void AppCallbackNotifier::setEventProvider(int32_t
eventMask, MessageNotifier * eventNotifier)
{
LOG_FUNCTION_NAME;
///@remarks
There is no NULL check here. We will check
///for NULL when
we get start command from CameraHal
///@Remarks
Currently only one event provider (CameraAdapter) is supported
///@todo
Have an array of event providers for each event bitmask
mEventProvider = new
EventProvider(eventNotifier, this, eventCallbackRelay);
if ( NULL == mEventProvider )
{
CAMHAL_LOGEA("Error in creating EventProvider");
}
else
{
mEventProvider->enableEventNotification(eventMask);
}
LOG_FUNCTION_NAME_EXIT;
}
实例化了一个EnentProvider的对象,并且enable EventNotification
再看看setFrameProvider的实现吧
void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier)
{
LOG_FUNCTION_NAME;
///@remarks
There is no NULL check here. We will check
///for NULL when
we get the start command from CameraAdapter
mFrameProvider = new
FrameProvider(frameNotifier, this, frameCallbackRelay);
if ( NULL == mFrameProvider )
{
CAMHAL_LOGEA("Error in creating FrameProvider");
}
else
{
//Register only for captured images and RAW for now
//TODO: Register for and handle
all types of frames
mFrameProvider->enableFrameNotification(CameraFrame::IMAGE_FRAME);
mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
}
LOG_FUNCTION_NAME_EXIT;
}
方法基本一致,实例化一个FrameProvider的对象,并且enable FrameNotification
但是这里必须分析一下FrameProvider的构造方法
FrameProvider(FrameNotifier *fn, void* cookie, frame_callback
frameCallback)
:mFrameNotifier(fn), mCookie(cookie),mFrameCallback(frameCallback) { }
这里通过传入的参数实例化了mFrameNotifier,初始化了mCookie,初始化了mFrameCallback,看似没什么却至关只要啊,后面你会知道的,同样EventProvider同样的
FrameProvider和EnentProvider接口的实现在CameraHalUtilClasses.cpp文件
接着往下走:Start the callback notifier
mAppCallbackNotifier->start()
status_t AppCallbackNotifier::start()
{
LOG_FUNCTION_NAME;
if(mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED)
{
CAMHAL_LOGDA("AppCallbackNotifier already running");
LOG_FUNCTION_NAME_EXIT;
return ALREADY_EXISTS;
}
///Check whether initial conditions are met for us to start
///A frame provider should be available, if not return error
if(!mFrameProvider)
{
///AppCallbackNotifier not properly
initialized
CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Frame provider is NULL");
LOG_FUNCTION_NAME_EXIT;
return NO_INIT;
}
///At least one event notifier should be available, if not return error
///@todo
Modify here when there is an array of event providers
if(!mEventProvider)
{
CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Event provider is NULL");
LOG_FUNCTION_NAME_EXIT;
///AppCallbackNotifier not properly
initialized
return NO_INIT;
}
mNotifierState = AppCallbackNotifier::NOTIFIER_STARTED;
CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STARTED \n");
gEncoderQueue.clear();
LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
}
只是做了一下检查,最后clear了编码队列
接着走:mAppCallbackNotifier->setMeasurements
void AppCallbackNotifier::setMeasurements(bool
enable)
{
Mutex::Autolock lock(mLock);
LOG_FUNCTION_NAME;
mMeasurementEnabled = enable;
if ( enable )
{
mFrameProvider->enableFrameNotification(CameraFrame::FRAME_DATA_SYNC);
}
LOG_FUNCTION_NAME_EXIT;
}
AppCallbackNotifier对象实例化以及初始化到这里完成了
三.MemoryManager的实例化和初始化
无疑这个类跟内存有着很密切的关系,这个定义了自己的构造函数,但是这里不说了,是在大财小用了,只一条语句,直接看看他的initialize方法吧
status_t MemoryManager::initialize() {
if ( mIonFd == -1 ) {
mIonFd = ion_open();
if ( mIonFd < 0 ) {
CAMHAL_LOGE("ion_open() failed, error: %d", mIonFd);
mIonFd = -1;
return NO_INIT;
}
}
return OK;
}
只是调用了ion_open这个方法获得了一个fd
int ion_open()
{
int fd = open("/dev/ion", O_RDWR);
if (fd < 0)
LOGE("open /dev/ion failed!\n");
return fd;
}
开始时我晕了,ion到底是什么device啊?赶紧查一查
这里我不做过多说明,可以看看这个分享,同时感谢大牛的分享:/article/9235714.html
ION与PMEM类似,管理一或多个内存池,其中有一些会在boot time的时候预先分配,以备给特殊的硬件使用(GPU,显示控制器等)。它通过ION heaps来管理这些pool。
它可以被userspace的process之间或者内核中的模块之间进行内存共享
四.SensorListener的实例化和初始化
在SensorListener的构造函数中对一些参数进行了默认初始化,这里不知说明,直接看看他的initialize方法实现
status_t SensorListener::initialize() {
status_t ret = NO_ERROR;
SensorManager& mgr(SensorManager::getInstance());
LOG_FUNCTION_NAME;
sp<Looper> mLooper;
mSensorEventQueue = mgr.createEventQueue();
if (mSensorEventQueue == NULL) {
CAMHAL_LOGEA("createEventQueue returned NULL");
ret = NO_INIT;
goto out;
}
mLooper = new
Looper(false);
mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);
if (mSensorLooperThread.get() == NULL)
mSensorLooperThread = new
SensorLooperThread(mLooper.get());
if (mSensorLooperThread.get() == NULL) {
CAMHAL_LOGEA("Couldn't create sensor looper thread");
ret = NO_MEMORY;
goto out;
}
ret = mSensorLooperThread->run("sensor
looper thread", PRIORITY_URGENT_DISPLAY);
if (ret == INVALID_OPERATION){
CAMHAL_LOGDA("thread already running ?!?");
} else if (ret != NO_ERROR) {
CAMHAL_LOGEA("couldn't run thread");
goto out;
}
out:
LOG_FUNCTION_NAME_EXIT;
return ret;
}
看到这里我个人是感觉这里挺抽象的,这里先简单说明一下,这里首先获取到SensorManager,通过这个SensorManager创建一个EventQueue,然后实例化一个Looper对象,将这个EventQueue的fd添加到mLooper中,最后创建一个SensorLooperThread,并启动这个线程
下一步:mSensorListener->setCallbacks(orientation_cb, this);
void SensorListener::setCallbacks(orientation_callback_t
orientation_cb, void *cookie) {
LOG_FUNCTION_NAME;
if (orientation_cb) {
mOrientationCb = orientation_cb;
}
mCbCookie = cookie;
LOG_FUNCTION_NAME_EXIT;
}
这里的方法跟上面讲过的setcallback方法其实基本是相通的,注册的这个回调函数是在以下方法中被调用到的
void SensorListener::handleOrientation(uint32_t
orientation, uint32_t tilt) {
LOG_FUNCTION_NAME;
Mutex::Autolock lock(&mLock);
if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) {
mOrientationCb(orientation, tilt, mCbCookie);
}
LOG_FUNCTION_NAME_EXIT;
}
也就低调用一下方法:
static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) {
CameraHal *camera = NULL;
if (cookie) {
camera = (CameraHal*) cookie;//这个cookie(this)指向我们实例化mCameraAdapter
camera->onOrientationEvent(orientation, tilt);//调用mCameraHal的onOrientationEvent方法
}
}
我们接着跟踪下去:
/**
Callback function to receive orientation events from SensorListener
*/
void CameraHal::onOrientationEvent(uint32_t orientation, uint32_t
tilt) {
LOG_FUNCTION_NAME;
if ( NULL != mCameraAdapter ) {
mCameraAdapter->onOrientationEvent(orientation, tilt);
}
LOG_FUNCTION_NAME_EXIT;
}
继续,实现在OMXCameraAdapter中
void OMXCameraAdapter::onOrientationEvent(uint32_t
orientation, uint32_t tilt)
{
LOG_FUNCTION_NAME;
static const unsigned int DEGREES_TILT_IGNORE = 45;
// if tilt angle is greater
than DEGREES_TILT_IGNORE
// we are going to ignore the orientation returned from
// sensor. the orientation returned from sensor is not
// reliable. Value of DEGREES_TILT_IGNORE may need adjusting
if (tilt > DEGREES_TILT_IGNORE) {
return;
}
int mountOrientation = 0;
bool isFront = false;
if (mCapabilities) {
const char * const mountOrientationString =
mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
if (mountOrientationString) {
mountOrientation = atoi(mountOrientationString);
}
const char * const facingString = mCapabilities->get(CameraProperties::FACING_INDEX);
if (facingString) {
isFront = strcmp(facingString, TICameraParameters::FACING_FRONT) == 0;
}
}
// direction is a constant sign for facing, meaning
the rotation direction relative to device
// +1 (clockwise) for back
sensor and -1 (counter-clockwise) for front
sensor
const int direction = isFront ? -1 : 1;
int rotation = mountOrientation + direction*orientation;
// crop the calculated value to [0..360) range
while ( rotation < 0 ) rotation += 360;
rotation %= 360;
if (rotation != mDeviceOrientation) {
mDeviceOrientation = rotation;
// restart face detection with new rotation
setFaceDetectionOrientation(mDeviceOrientation);
}
CAMHAL_LOGVB("orientation = %d tilt = %d device_orientation = %d", orientation, tilt, mDeviceOrientation);
LOG_FUNCTION_NAME_EXIT;
}
这里暂时不做分析
最后一步:mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION)
void SensorListener::enableSensor(sensor_type_t
type) {
Sensor const* sensor;
SensorManager& mgr(SensorManager::getInstance());
LOG_FUNCTION_NAME;
Mutex::Autolock lock(&mLock);
if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
mSensorEventQueue->enableSensor(sensor);
mSensorEventQueue->setEventRate(sensor, ms2ns(100));
sensorsEnabled |= SENSOR_ORIENTATION;
}
LOG_FUNCTION_NAME_EXIT;
}
使能sensor
到这里为止CameraHal的这个初始化过程完成了,这里个人感觉十分重要,其他的接口实现固然重要,但是清清楚楚的知道这个初始化过程会让你hal与上层和底层的交互先有一个大体的认知,这比你一头扎进去到处乱撞要高效很多
Android Camere Study 待续。。。。
作为上层和底层的中转站,hal层在android中起到的作用不言而喻,针对camera的研究已经有一段时间了,这里自己还是决定静下心来好好的分析一下CameraHal.cpp这里的代码,
对自己更好的理解hal与上层和底层的交互作用不可小觑,特别对理解hal与kernel driver的交互过程作用很大
不多说废话了,开始今天的主题
我们首先从CameraHal的初始化,那么他是从哪里开始化的呢?这里之前的文章中已经有过,只是没有重点介绍,这里还是说一下吧
是在camera最初open的时候会调用到camerahal_module.cpp中的以下方法进行初始化的
/*******************************************************************
* implementation of camera_module functions
*******************************************************************/
/* open device handle to one of the cameras
*
* assume camera service will keep singleton of each camera
* so this function will always only be called once per camera instance
*/
int camera_device_open(const hw_module_t* module, const char* name,
hw_device_t** device)
{
int rv = 0;
int num_cameras = 0;
int cameraid;
ti_camera_device_t* camera_device = NULL;
camera_device_ops_t* camera_ops = NULL;
android::CameraHal* camera = NULL;
android::CameraProperties::Properties* properties = NULL;
android::Mutex::Autolock
lock(gCameraHalDeviceLock);
LOGI("camera_device open");
if (name != NULL) {
cameraid = atoi(name);
num_cameras = gCameraProperties.camerasSupported();
if(cameraid > num_cameras)
{
LOGE("camera service provided cameraid out of bounds, "
"cameraid = %d, num supported = %d",
cameraid, num_cameras);
rv = -EINVAL;
goto fail;
}
if(gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED)
{
LOGE("maximum number of cameras already open");
rv = -ENOMEM;
goto fail;
}
camera_device = (ti_camera_device_t*)malloc(sizeof(*camera_device));
if(!camera_device)
{
LOGE("camera_device allocation fail");
rv = -ENOMEM;
goto fail;
}
camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
if(!camera_ops)
{
LOGE("camera_ops allocation fail");
rv = -ENOMEM;
goto fail;
}
memset(camera_device, 0, sizeof(*camera_device));
memset(camera_ops, 0, sizeof(*camera_ops));
camera_device->base.common.tag = HARDWARE_DEVICE_TAG;
camera_device->base.common.version = 0;
camera_device->base.common.module = (hw_module_t *)(module);
camera_device->base.common.close = camera_device_close;
camera_device->base.ops = camera_ops;
camera_ops->set_preview_window = camera_set_preview_window;
camera_ops->set_callbacks = camera_set_callbacks;
camera_ops->enable_msg_type = camera_enable_msg_type;
camera_ops->disable_msg_type = camera_disable_msg_type;
camera_ops->msg_type_enabled = camera_msg_type_enabled;
camera_ops->start_preview = camera_start_preview;
camera_ops->stop_preview = camera_stop_preview;
camera_ops->preview_enabled = camera_preview_enabled;
camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers;
camera_ops->start_recording = camera_start_recording;
camera_ops->stop_recording = camera_stop_recording;
camera_ops->recording_enabled = camera_recording_enabled;
camera_ops->release_recording_frame = camera_release_recording_frame;
camera_ops->auto_focus = camera_auto_focus;
camera_ops->cancel_auto_focus = camera_cancel_auto_focus;
camera_ops->take_picture = camera_take_picture;
camera_ops->cancel_picture = camera_cancel_picture;
camera_ops->set_parameters = camera_set_parameters;
camera_ops->get_parameters = camera_get_parameters;
camera_ops->put_parameters = camera_put_parameters;
camera_ops->send_command = camera_send_command;
camera_ops->release = camera_release;
camera_ops->dump = camera_dump;
*device = &camera_device->base.common;
// -------- TI
specific stuff --------
camera_device->cameraid = cameraid;
if(gCameraProperties.getProperties(cameraid, &properties) < 0)
{
LOGE("Couldn't get camera properties");
rv = -ENOMEM;
goto fail;
}
****************重点就在这里了****************
camera = new android::CameraHal(cameraid);
if(!camera)
{
LOGE("Couldn't create instance of CameraHal class");
rv = -ENOMEM;
goto fail;
}
if(properties && (camera->initialize(properties) != android::NO_ERROR))
{
LOGE("Couldn't initialize camera instance");
rv = -ENODEV;
goto fail;
}
gCameraHals[cameraid] = camera;
gCamerasOpen++;
}
return rv;
fail:
if(camera_device) {
free(camera_device);
camera_device = NULL;
}
if(camera_ops) {
free(camera_ops);
camera_ops = NULL;
}
if(camera) {
delete camera;
camera = NULL;
}
*device = NULL;
return rv;
}
上面横线中间的部分就是camerahal的初始化了,首先new camerahal,并且调用initialize方法,最后将实例化好的camerahal保存到gCameraHals这个数组中,之后会通过这个数组找到我们实例化好的camerahal,实现我们对hal层接口的使用
现在我们就开始看看camerahal的initialize方法的实现
/**
@brief Initialize the Camera HAL
Creates CameraAdapter, AppCallbackNotifier, DisplayAdapter and MemoryManager
@param None
@return NO_ERROR - On success
NO_MEMORY - On failure to allocate memory for any
of the objects
@remarks Camera Hal internal function
*/
status_t CameraHal::initialize(CameraProperties::Properties* properties)
{
LOG_FUNCTION_NAME;
int sensor_index = 0;
const char* sensor_name = NULL;
///Initialize the event mask used for registering
an event provider for AppCallbackNotifier
///Currently, registering
all events as to be coming from CameraAdapter
int32_t eventMask = CameraHalEvent::ALL_EVENTS;
// Get my camera properties
mCameraProperties = properties;
if(!mCameraProperties)
{
goto fail_loop;
}
// Dump the properties of this Camera
// will only print if DEBUG macro is defined
mCameraProperties->dump();
if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX)) != 0 )
{
sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX));
}
if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_NAME)) != 0 ) {
sensor_name = mCameraProperties->get(CameraProperties::CAMERA_NAME);
}
CAMHAL_LOGDB("Sensor index= %d; Sensor name= %s", sensor_index, sensor_name);
1.这里很重要*******************************************************************************
这里做一个判断决定我们是使用V4LCameraAdapter还是OMXCameraAdapter
接下来将要重点学习OMX机制,这篇文章我们假设走else分支
if (strcmp(sensor_name, V4L_CAMERA_NAME_USB) == 0) {
#ifdef V4L_CAMERA_ADAPTER
mCameraAdapter = V4LCameraAdapter_Factory(sensor_index);
#endif
}
else {
#ifdef OMX_CAMERA_ADAPTER
mCameraAdapter = OMXCameraAdapter_Factory(sensor_index);
#endif
}
if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties)!=NO_ERROR))
{
CAMHAL_LOGEA("Unable to create or initialize CameraAdapter");
mCameraAdapter = NULL;
goto fail_loop;
}
mCameraAdapter->incStrong(mCameraAdapter);
mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this);
mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this);
2.这里很重要*****************************************************************************
这里实例化一个AppCallbackNotifier,并且调用initialize方法进行初始化
if(!mAppCallbackNotifier.get())
{
/// Create the callback notifier
mAppCallbackNotifier = new AppCallbackNotifier();
if( ( NULL == mAppCallbackNotifier.get() ) || ( mAppCallbackNotifier->initialize() != NO_ERROR))
{
CAMHAL_LOGEA("Unable to create or initialize AppCallbackNotifier");
goto fail_loop;
}
}
3.这里很重要*****************************************************************************
这里实例化一个MemoryManager,并且调用initialize方法进行初始化,以及其他一些set
或者初始化
if(!mMemoryManager.get())
{
/// Create Memory Manager
mMemoryManager = new MemoryManager();
if( ( NULL == mMemoryManager.get() ) || ( mMemoryManager->initialize() != NO_ERROR))
{
CAMHAL_LOGEA("Unable to create or initialize MemoryManager");
goto fail_loop;
}
}
///Setup the class dependencies...
///AppCallbackNotifier has to know
where to get the Camera frames and the events like auto focus lock etc
from.
///CameraAdapter is the
one which provides those events
///Set it
as the frame and event providers for AppCallbackNotifier
///@remarks
setEventProvider API takes in a bit mask of events for registering a provider for the
different events
/// That way, if events
can come from DisplayAdapter in future, we will be able to add it as provider
/// for any
event
mAppCallbackNotifier->setEventProvider(eventMask, mCameraAdapter);
mAppCallbackNotifier->setFrameProvider(mCameraAdapter);
///Any dynamic errors that happen during the camera use case has to be
propagated back to the application
///via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that
notifies such errors to the application
///Set it
as the error handler for CameraAdapter
mCameraAdapter->setErrorHandler(mAppCallbackNotifier.get());
///Start the callback notifier
if(mAppCallbackNotifier->start() != NO_ERROR)
{
CAMHAL_LOGEA("Couldn't start AppCallbackNotifier");
goto fail_loop;
}
CAMHAL_LOGDA("Started AppCallbackNotifier..");
mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);
4.这里很重要*****************************************************************************
这里进行camera参数的设置
///Initialize default parameters
initDefaultParameters();
if ( setParameters(mParameters) != NO_ERROR )
{
CAMHAL_LOGEA("Failed to set default parameters?!");
}
5.这里很重要*****************************************************************************
这里实例化一个SensorListener,并且调用initialize方法进行初始化,以及其他一些初始设置
// register for sensor events
mSensorListener = new SensorListener();
if (mSensorListener.get()) {
if (mSensorListener->initialize() == NO_ERROR) {
mSensorListener->setCallbacks(orientation_cb, this);
mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION);
} else {
CAMHAL_LOGEA("Error initializing SensorListener. not fatal, continuing");
mSensorListener.clear();
mSensorListener = NULL;
}
}
LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
fail_loop:
///Free up the resources because we failed somewhere up
deinitialize();
LOG_FUNCTION_NAME_EXIT;
return NO_MEMORY;
}
上面一共五个步骤,接下来将一一分析,自我感觉很有这样的必要
一.OMXCameraAdapter的实例化和初始化
首先看一下OMXCameraAdapter的默认构造函数
OMXCameraAdapter::OMXCameraAdapter(size_t
sensor_index)
{
LOG_FUNCTION_NAME;
mOmxInitialized = false;
mComponentState = OMX_StateInvalid;
mSensorIndex = sensor_index;
mPictureRotation = 0;
// Initial values
mTimeSourceDelta = 0;
onlyOnce = true;
mDccData.pData = NULL;
mInitSem.Create(0);
mFlushSem.Create(0);
mUsePreviewDataSem.Create(0);
mUsePreviewSem.Create(0);
mUseCaptureSem.Create(0);
mUseReprocessSem.Create(0);
mStartPreviewSem.Create(0);
mStopPreviewSem.Create(0);
mStartCaptureSem.Create(0);
mStopCaptureSem.Create(0);
mStopReprocSem.Create(0);
mSwitchToLoadedSem.Create(0);
mCaptureSem.Create(0);
mSwitchToExecSem.Create(0);
mCameraAdapterParameters.mHandleComp = 0;
mUserSetExpLock = OMX_FALSE;
mUserSetWbLock = OMX_FALSE;
mFramesWithDucati = 0;
mFramesWithDisplay = 0;
mFramesWithEncoder = 0;
#ifdef CAMERAHAL_OMX_PROFILING
mDebugProfile = 0;
#endif
LOG_FUNCTION_NAME_EXIT;
}
这其中只是对很多参数的默认初始化,接下来看看initialize方法
/*--------------------Camera
Adapter Class STARTS here-----------------------------*/
status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
{
LOG_FUNCTION_NAME;
char value[PROPERTY_VALUE_MAX];
const char *mountOrientationString = NULL;
property_get("debug.camera.showfps", value, "0");
mDebugFps = atoi(value);
property_get("debug.camera.framecounts", value, "0");
mDebugFcs = atoi(value);
#ifdef CAMERAHAL_OMX_PROFILING
property_get("debug.camera.profile", value, "0");
mDebugProfile = atoi(value);
#endif
TIMM_OSAL_ERRORTYPE osalError = OMX_ErrorNone;
OMX_ERRORTYPE eError = OMX_ErrorNone;
status_t ret = NO_ERROR;
mLocalVersionParam.s.nVersionMajor = 0x1;
mLocalVersionParam.s.nVersionMinor = 0x1;
mLocalVersionParam.s.nRevision = 0x0 ;
mLocalVersionParam.s.nStep = 0x0;
mPending3Asettings = 0;//E3AsettingsAll;
mPendingCaptureSettings = 0;
mPendingPreviewSettings = 0;
if ( 0 != mInitSem.Count() )
{
CAMHAL_LOGEB("Error mInitSem semaphore count %d", mInitSem.Count());
LOG_FUNCTION_NAME_EXIT;
return NO_INIT;
}
///Update the preview and image
capture port indexes
mCameraAdapterParameters.mPrevPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_PREVIEW;
// temp changed in order to build
OMX_CAMERA_PORT_VIDEO_OUT_IMAGE;
mCameraAdapterParameters.mImagePortIndex = OMX_CAMERA_PORT_IMAGE_OUT_IMAGE;
mCameraAdapterParameters.mMeasurementPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_MEASUREMENT;
//currently not supported use preview port instead
mCameraAdapterParameters.mVideoPortIndex = OMX_CAMERA_PORT_VIDEO_OUT_VIDEO;
mCameraAdapterParameters.mVideoInPortIndex = OMX_CAMERA_PORT_VIDEO_IN_VIDEO;
// 1.OMX_Init
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
return ErrorUtils::omxToAndroidError(eError);
}
mOmxInitialized = true;
// 2.Initialize
the callback handles
OMX_CALLBACKTYPE callbacks;
callbacks.EventHandler = android::OMXCameraAdapterEventHandler;
callbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone;
callbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone;
// 3.Get the
handle to the OMX Component
eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, this, callbacks);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_GetHandle -0x%x", eError);
}
GOTO_EXIT_IF((eError != OMX_ErrorNone), eError);
mComponentState = OMX_StateLoaded;
CAMHAL_LOGVB("OMX_GetHandle -0x%x sensor_index = %lu", eError, mSensorIndex);
initDccFileDataSave(&mCameraAdapterParameters.mHandleComp, mCameraAdapterParameters.mPrevPortIndex);
eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
OMX_CommandPortDisable,
OMX_ALL,
NULL);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortDisable) -0x%x", eError);
}
GOTO_EXIT_IF((eError != OMX_ErrorNone), eError);
// 4.Register for port
enable event
ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandPortEnable,
mCameraAdapterParameters.mPrevPortIndex,
mInitSem);
if(ret != NO_ERROR) {
CAMHAL_LOGEB("Error in registering for event %d", ret);
goto EXIT;
}
// 5.Enable
PREVIEW Port
eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,
OMX_CommandPortEnable,
mCameraAdapterParameters.mPrevPortIndex,
NULL);
if(eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_SendCommand(OMX_CommandPortEnable) -0x%x", eError);
}
GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);
// 6.Wait for the
port enable event to occur
ret = mInitSem.WaitTimeout(OMX_CMD_TIMEOUT);
if ( NO_ERROR == ret ) {
CAMHAL_LOGDA("-Port enable event arrived");
} else {
ret |= RemoveEvent(mCameraAdapterParameters.mHandleComp,
OMX_EventCmdComplete,
OMX_CommandPortEnable,
mCameraAdapterParameters.mPrevPortIndex,
NULL);
CAMHAL_LOGEA("Timeout for enabling preview port expired!");
goto EXIT;
}
// 7.Select the sensor
OMX_CONFIG_SENSORSELECTTYPE sensorSelect;
OMX_INIT_STRUCT_PTR (&sensorSelect, OMX_CONFIG_SENSORSELECTTYPE);
sensorSelect.eSensor = (OMX_SENSORSELECT) mSensorIndex;
eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, ( OMX_INDEXTYPE ) OMX_TI_IndexConfigSensorSelect, &sensorSelect);
if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while selecting the sensor index as %d - 0x%x", mSensorIndex, eError);
return BAD_VALUE;
} else {
CAMHAL_LOGDB("Sensor %d selected successfully", mSensorIndex);
}
#ifdef CAMERAHAL_DEBUG
printComponentVersion(mCameraAdapterParameters.mHandleComp);
#endif
// 8.初始化默认参数
mBracketingEnabled = false;
mZoomBracketingEnabled = false;
mBracketingBuffersQueuedCount = 0;
mBracketingRange = 1;
mLastBracetingBufferIdx = 0;
mBracketingBuffersQueued = NULL;
mOMXStateSwitch = false;
mBracketingSet = false;
#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
mRawCapture = false;
mYuvCapture = false;
#endif
mCaptureSignalled = false;
mCaptureConfigured = false;
mReprocConfigured = false;
mRecording = false;
mWaitingForSnapshot = false;
mPictureFormatFromClient = NULL;
mCapabilitiesOpMode = MODE_MAX;
mCapMode = INITIAL_MODE;
mIPP = IPP_NULL;
mVstabEnabled = false;
mVnfEnabled = false;
mBurstFrames = 1;
mBurstFramesAccum = 0;
mCapturedFrames = 0;
mFlushShotConfigQueue = false;
mPictureQuality = 100;
mCurrentZoomIdx = 0;
mTargetZoomIdx = 0;
mPreviousZoomIndx = 0;
mReturnZoomStatus = false;
mZoomInc = 1;
mZoomParameterIdx = 0;
mExposureBracketingValidEntries = 0;
mZoomBracketingValidEntries = 0;
mSensorOverclock = false;
mAutoConv = OMX_TI_AutoConvergenceModeMax;
mManualConv = 0;
mDeviceOrientation = 0;
mCapabilities = caps;
mZoomUpdating = false;
mZoomUpdate = false;
mGBCE = BRIGHTNESS_OFF;
mGLBCE = BRIGHTNESS_OFF;
mParameters3A.ExposureLock = OMX_FALSE;
mParameters3A.WhiteBalanceLock = OMX_FALSE;
mEXIFData.mGPSData.mAltitudeValid = false;
mEXIFData.mGPSData.mDatestampValid = false;
mEXIFData.mGPSData.mLatValid = false;
mEXIFData.mGPSData.mLongValid = false;
mEXIFData.mGPSData.mMapDatumValid = false;
mEXIFData.mGPSData.mProcMethodValid = false;
mEXIFData.mGPSData.mVersionIdValid = false;
mEXIFData.mGPSData.mTimeStampValid = false;
mEXIFData.mModelValid = false;
mEXIFData.mMakeValid = false;
//update the mDeviceOrientation with the sensor mount orientation.
//So that the face detect will work before onOrientationEvent()
//get triggered.
CAMHAL_ASSERT(mCapabilities);
mountOrientationString = mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
CAMHAL_ASSERT(mountOrientationString);
mDeviceOrientation = atoi(mountOrientationString);
if (mSensorIndex != 2) {
mCapabilities->setMode(MODE_HIGH_SPEED);
}
if (mCapabilities->get(CameraProperties::SUPPORTED_ZOOM_STAGES) != NULL) {
mMaxZoomSupported = mCapabilities->getInt(CameraProperties::SUPPORTED_ZOOM_STAGES) + 1;
} else {
mMaxZoomSupported = 1;
}
// 9.initialize
command handling thread
if(mCommandHandler.get() == NULL)
mCommandHandler = new CommandHandler(this);
if ( NULL == mCommandHandler.get() )
{
CAMHAL_LOGEA("Couldn't create command handler");
return NO_MEMORY;
}
ret = mCommandHandler->run("CallbackThread", PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("command handler thread already runnning!!");
ret = NO_ERROR;
} else {
CAMHAL_LOGEA("Couldn't run command handlerthread");
return ret;
}
}
// 10.initialize
omx callback handling thread
if(mOMXCallbackHandler.get() == NULL)
mOMXCallbackHandler = new OMXCallbackHandler(this);
if ( NULL == mOMXCallbackHandler.get() )
{
CAMHAL_LOGEA("Couldn't create omx callback handler");
return NO_MEMORY;
}
ret = mOMXCallbackHandler->run("OMXCallbackThread", PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
CAMHAL_LOGDA("omx callback handler thread already runnning!!");
ret = NO_ERROR;
} else {
CAMHAL_LOGEA("Couldn't run omx callback handler thread");
return ret;
}
}
OMX_INIT_STRUCT_PTR (&mRegionPriority, OMX_TI_CONFIG_3A_REGION_PRIORITY);
OMX_INIT_STRUCT_PTR (&mFacePriority, OMX_TI_CONFIG_3A_FACE_PRIORITY);
mRegionPriority.nPortIndex = OMX_ALL;
mFacePriority.nPortIndex = OMX_ALL;
//Setting this flag will that the first setParameter call will apply
all 3A settings
//and will not conditionally
apply based on current values.
mFirstTimeInit = true;
//Flag to avoid calling setVFramerate() before
OMX_SetParameter(OMX_IndexParamPortDefinition)
//Ducati will return an error otherwise.
mSetFormatDone = false;
memset(mExposureBracketingValues, 0, EXP_BRACKET_RANGE*sizeof(int));
memset(mZoomBracketingValues, 0, ZOOM_BRACKET_RANGE*sizeof(int));
mMeasurementEnabled = false;
mFaceDetectionRunning = false;
mFaceDetectionPaused = false;
mFDSwitchAlgoPriority = false;
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex], 0, sizeof(OMXCameraPortParameters));
memset(&mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex], 0, sizeof(OMXCameraPortParameters));
// 11.initialize
3A defaults
mParameters3A.Effect = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EFFECT, EffLUT);
mParameters3A.FlashMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FLASH_MODE, FlashLUT);
mParameters3A.SceneMode = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_SCENE_MODE, SceneLUT);
mParameters3A.EVCompensation = atoi(OMXCameraAdapter::DEFAULT_EV_COMPENSATION);
mParameters3A.Focus = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_FOCUS_MODE, FocusLUT);
mParameters3A.ISO = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ISO_MODE, IsoLUT);
mParameters3A.Flicker = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_ANTIBANDING, FlickerLUT);
mParameters3A.Brightness = atoi(OMXCameraAdapter::DEFAULT_BRIGHTNESS);
mParameters3A.Saturation = atoi(OMXCameraAdapter::DEFAULT_SATURATION) - SATURATION_OFFSET;
mParameters3A.Sharpness = atoi(OMXCameraAdapter::DEFAULT_SHARPNESS) - SHARPNESS_OFFSET;
mParameters3A.Contrast = atoi(OMXCameraAdapter::DEFAULT_CONTRAST) - CONTRAST_OFFSET;
mParameters3A.WhiteBallance = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_WB, WBalLUT);
mParameters3A.Exposure = getLUTvalue_HALtoOMX(OMXCameraAdapter::DEFAULT_EXPOSURE_MODE, ExpLUT);
mParameters3A.ExposureLock = OMX_FALSE;
mParameters3A.FocusLock = OMX_FALSE;
mParameters3A.WhiteBalanceLock = OMX_FALSE;
mParameters3A.ManualExposure = 0;
mParameters3A.ManualExposureRight = 0;
mParameters3A.ManualGain = 0;
mParameters3A.ManualGainRight = 0;
mParameters3A.AlgoFixedGamma = OMX_TRUE;
mParameters3A.AlgoNSF1 = OMX_TRUE;
mParameters3A.AlgoNSF2 = OMX_TRUE;
mParameters3A.AlgoSharpening = OMX_TRUE;
mParameters3A.AlgoThreeLinColorMap = OMX_TRUE;
mParameters3A.AlgoGIC = OMX_TRUE;
LOG_FUNCTION_NAME_EXIT;
return ErrorUtils::omxToAndroidError(eError);
EXIT:
CAMHAL_LOGDB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
return ErrorUtils::omxToAndroidError(eError);
}
这个initialize的过程做的事情还是比较多的,这里关系到OMX的很多知识点,是之后要研究的,现在只是先知道这里
在完成initialize方法之后,
mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this);
mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this);
这两个方法同样非常重要,首先我们这里调用的是mCameraAdapter的方法,mCameraAdapter是OMXCameraAdapter这个类的实例,但是其实OMXCameraAdapter这个类中是没有以上这两个方法的,但是我们接着看,OMXCameraAdapter这个类的定义
class OMXCameraAdapter : public BaseCameraAdapter
OMXCameraAdapter继承于BaseCameraAdapter,不错,上面的两个方法是在BaseCameraAdapter这个类中实现的,我们接着看看
status_t BaseCameraAdapter::registerImageReleaseCallback(release_image_buffers_callback
callback, void *user_data)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
mReleaseImageBuffersCallback = callback;
mReleaseData = user_data;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
status_t BaseCameraAdapter::registerEndCaptureCallback(end_image_capture_callback
callback, void *user_data)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
mEndImageCaptureCallback= callback;
mEndCaptureData = user_data;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
这两个方法实现方式一模一样,只是将传入的函数指针和user_data保存到其中而已
mEndImageCaptureCallback这个方法会在OMXCameraAdapter的fillThisBuffer中被调用,他的实现在
void releaseImageBuffers(void *userData)
{
LOG_FUNCTION_NAME;
if (NULL != userData) {
CameraHal *c = reinterpret_cast<CameraHal *>(userData);//user_data就是指向我们实例化的OMXCameraAdapter变量mCameraAdapter
c->freeImageBufs();//接着调用mCameraAdapter的freeImageBufs方法
}
LOG_FUNCTION_NAME_EXIT;
}
status_t CameraHal::freeImageBufs()
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
if ( NO_ERROR == ret )
{
if( NULL != mImageBufs )
{
///@todo
Pluralise the name of this method to freeBuffers
ret = mMemoryManager->freeBuffer(mImageBufs);//通过memoryManager释放内存
mImageBufs = NULL;
}
else
{
ret = -EINVAL;
}
}
LOG_FUNCTION_NAME_EXIT;
return ret;
}
mReleaseImageBuffersCallback 这个方法将在OMXCapture.cpp中被调用,他的实现和上面是相似的,就不再说了
二.AppCallbackNotifier的实例化和初始化
AppCallbackNotifier这个类是没有实现构造函数的,我们就先看看他的initialize方法吧
/**
* NotificationHandler class
*/
///Initialization function for AppCallbackNotifier
status_t AppCallbackNotifier::initialize()
{
LOG_FUNCTION_NAME;
mPreviewMemory = 0;
mMeasurementEnabled = false;
mNotifierState = NOTIFIER_STOPPED;
///Create
the app notifier thread
mNotificationThread = new
NotificationThread(this);
if(!mNotificationThread.get())
{
CAMHAL_LOGEA("Couldn't create Notification thread");
return NO_MEMORY;
}
///Start
the display thread
status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY);
if(ret!=NO_ERROR)
{
CAMHAL_LOGEA("Couldn't run NotificationThread");
mNotificationThread.clear();
return ret;
}
mUseMetaDataBufferMode = true;
mRawAvailable = false;
mRecording = false;
mPreviewing = false;
LOG_FUNCTION_NAME_EXIT;
return ret;
}
这个initialize方法做的事情相对简单但是有及其重要,他创建了一个notificationthread,然后运行这个thread
这里我们看看这个很忙碌的线程都干了些什么事情,之所以说他忙碌,是因为他一直不停的等待消息,有消息就处理,不能磨叽磨叽的
bool AppCallbackNotifier::notificationThread()
{
bool shouldLive = true;
status_t ret;
LOG_FUNCTION_NAME;
//CAMHAL_LOGDA("Notification
Thread waiting for message");
ret = TIUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
&mEventQ,
&mFrameQ,
AppCallbackNotifier::NOTIFIER_TIMEOUT);
//CAMHAL_LOGDA("Notification
Thread received message");
//上面等待message,消息到来时开始往下运行,之后区分这些消息到底是什么消息,就像邮局收到邮件接着往下分发一样
if (mNotificationThread->msgQ().hasMsg()) {
///Received a message from CameraHal, process
it
CAMHAL_LOGDA("Notification Thread received message from Camera HAL");
shouldLive = processMessage();//先进行消息的筛选,这里跳出有问题的邮件抛弃掉,收到NOTIFIER_EXIT消息则退出
if(!shouldLive) {
CAMHAL_LOGDA("Notification Thread exiting.");
return shouldLive;
}
}
if(mEventQ.hasMsg()) {
///Received an event from one of the event providers
CAMHAL_LOGDA("Notification Thread received an event from event provider (CameraAdapter)");
notifyEvent();//分类完成就分类处理了,这里是enent
事件的处理
}
if(mFrameQ.hasMsg()) {
///Received a frame from one of the frame providers
//CAMHAL_LOGDA("Notification
Thread received a frame from frame provider (CameraAdapter)");
notifyFrame();//分类完成就分类处理了,这里是frame
事件的处理
}
LOG_FUNCTION_NAME_EXIT;
return shouldLive;
}
这里先看一下notifyEvent方法的处理过程:
void AppCallbackNotifier::notifyEvent()
{
///Receive and send
the event notifications to app
TIUTILS::Message msg;
LOG_FUNCTION_NAME;
{
Mutex::Autolock lock(mLock);
if ( !mEventQ.hasMsg() ) {
return;
} else {
mEventQ.get(&msg);
}
}
bool ret = true;
CameraHalEvent *evt = NULL;
CameraHalEvent::FocusEventData *focusEvtData;
CameraHalEvent::ZoomEventData *zoomEvtData;
CameraHalEvent::MetaEventData metaEvtData;
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED)
{
return;
}
switch(msg.command)
{
case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT:
evt = ( CameraHalEvent * ) msg.arg1;
if ( NULL == evt )
{
CAMHAL_LOGEA("Invalid CameraHalEvent");
return;
}
switch(evt->mEventType)
{
case CameraHalEvent::EVENT_SHUTTER:
if ( ( NULL != mCameraHal ) &&
( NULL != mNotifyCb ) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_SHUTTER) ) )
{
mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
}
mRawAvailable = false;
break;
case CameraHalEvent::EVENT_FOCUS_LOCKED:
case CameraHalEvent::EVENT_FOCUS_ERROR:
focusEvtData = &evt->mEventData->focusEvent;
if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_SUCCESS ) &&
( NULL != mCameraHal ) &&
( NULL != mNotifyCb ) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) ) {
mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
} else if ( ( focusEvtData->focusStatus == CameraHalEvent::FOCUS_STATUS_FAIL ) &&
( NULL != mCameraHal ) &&
( NULL != mNotifyCb ) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) ) {
mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
}
break;
case CameraHalEvent::EVENT_ZOOM_INDEX_REACHED:
zoomEvtData = &evt->mEventData->zoomEvent;
if ( ( NULL != mCameraHal ) &&
( NULL != mNotifyCb) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_ZOOM) ) )
{
mNotifyCb(CAMERA_MSG_ZOOM, zoomEvtData->currentZoomIndex, zoomEvtData->targetZoomIndexReached, mCallbackCookie);
}
break;
case CameraHalEvent::EVENT_METADATA:
metaEvtData = evt->mEventData->metadataEvent;
if ( ( NULL != mCameraHal ) &&
( NULL != mNotifyCb) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ) )
{
// WA for an issue inside CameraService
camera_memory_t *tmpBuffer = mRequestMemory(-1, 1, 1, NULL);
mDataCb(CAMERA_MSG_PREVIEW_METADATA,
tmpBuffer,
0,
metaEvtData->getMetadataResult(),
mCallbackCookie);
metaEvtData.clear();
if ( NULL != tmpBuffer ) {
tmpBuffer->release(tmpBuffer);
}
}
break;
case CameraHalEvent::ALL_EVENTS:
break;
default:
break;
}
break;
}
if ( NULL != evt )
{
delete evt;
}
LOG_FUNCTION_NAME_EXIT;
}
针对每个不同消息处理方法基本是相同的,主要通过上次register好的callback方法,这个之前已经在文章中说过,不在多做说明
这里还是比较重要的,但不是这篇文章的重点,之后有机会还会在说
再看一下notifyFrame的处理过程:
算了,我还是不把他的处理过程贴出来,怕吓到人,挺庞大的,处理了很多事件,当然重要,先知道,再看吧
这里你处理了notifyEvent和notifyFrame这里消息,但是这些消息是从哪里来呢?知道了接收者,那么就必须找到发送者
mAppCallbackNotifier->setEventProvider(eventMask, mCameraAdapter);
mAppCallbackNotifier->setFrameProvider(mCameraAdapter);
不错,就是在这里指定了enent和frame消息的提供者(provider)
先看看setEventProvider的实现
void AppCallbackNotifier::setEventProvider(int32_t
eventMask, MessageNotifier * eventNotifier)
{
LOG_FUNCTION_NAME;
///@remarks
There is no NULL check here. We will check
///for NULL when
we get start command from CameraHal
///@Remarks
Currently only one event provider (CameraAdapter) is supported
///@todo
Have an array of event providers for each event bitmask
mEventProvider = new
EventProvider(eventNotifier, this, eventCallbackRelay);
if ( NULL == mEventProvider )
{
CAMHAL_LOGEA("Error in creating EventProvider");
}
else
{
mEventProvider->enableEventNotification(eventMask);
}
LOG_FUNCTION_NAME_EXIT;
}
实例化了一个EnentProvider的对象,并且enable EventNotification
再看看setFrameProvider的实现吧
void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier)
{
LOG_FUNCTION_NAME;
///@remarks
There is no NULL check here. We will check
///for NULL when
we get the start command from CameraAdapter
mFrameProvider = new
FrameProvider(frameNotifier, this, frameCallbackRelay);
if ( NULL == mFrameProvider )
{
CAMHAL_LOGEA("Error in creating FrameProvider");
}
else
{
//Register only for captured images and RAW for now
//TODO: Register for and handle
all types of frames
mFrameProvider->enableFrameNotification(CameraFrame::IMAGE_FRAME);
mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
}
LOG_FUNCTION_NAME_EXIT;
}
方法基本一致,实例化一个FrameProvider的对象,并且enable FrameNotification
但是这里必须分析一下FrameProvider的构造方法
FrameProvider(FrameNotifier *fn, void* cookie, frame_callback
frameCallback)
:mFrameNotifier(fn), mCookie(cookie),mFrameCallback(frameCallback) { }
这里通过传入的参数实例化了mFrameNotifier,初始化了mCookie,初始化了mFrameCallback,看似没什么却至关只要啊,后面你会知道的,同样EventProvider同样的
FrameProvider和EnentProvider接口的实现在CameraHalUtilClasses.cpp文件
接着往下走:Start the callback notifier
mAppCallbackNotifier->start()
status_t AppCallbackNotifier::start()
{
LOG_FUNCTION_NAME;
if(mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED)
{
CAMHAL_LOGDA("AppCallbackNotifier already running");
LOG_FUNCTION_NAME_EXIT;
return ALREADY_EXISTS;
}
///Check whether initial conditions are met for us to start
///A frame provider should be available, if not return error
if(!mFrameProvider)
{
///AppCallbackNotifier not properly
initialized
CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Frame provider is NULL");
LOG_FUNCTION_NAME_EXIT;
return NO_INIT;
}
///At least one event notifier should be available, if not return error
///@todo
Modify here when there is an array of event providers
if(!mEventProvider)
{
CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Event provider is NULL");
LOG_FUNCTION_NAME_EXIT;
///AppCallbackNotifier not properly
initialized
return NO_INIT;
}
mNotifierState = AppCallbackNotifier::NOTIFIER_STARTED;
CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STARTED \n");
gEncoderQueue.clear();
LOG_FUNCTION_NAME_EXIT;
return NO_ERROR;
}
只是做了一下检查,最后clear了编码队列
接着走:mAppCallbackNotifier->setMeasurements
void AppCallbackNotifier::setMeasurements(bool
enable)
{
Mutex::Autolock lock(mLock);
LOG_FUNCTION_NAME;
mMeasurementEnabled = enable;
if ( enable )
{
mFrameProvider->enableFrameNotification(CameraFrame::FRAME_DATA_SYNC);
}
LOG_FUNCTION_NAME_EXIT;
}
AppCallbackNotifier对象实例化以及初始化到这里完成了
三.MemoryManager的实例化和初始化
无疑这个类跟内存有着很密切的关系,这个定义了自己的构造函数,但是这里不说了,是在大财小用了,只一条语句,直接看看他的initialize方法吧
status_t MemoryManager::initialize() {
if ( mIonFd == -1 ) {
mIonFd = ion_open();
if ( mIonFd < 0 ) {
CAMHAL_LOGE("ion_open() failed, error: %d", mIonFd);
mIonFd = -1;
return NO_INIT;
}
}
return OK;
}
只是调用了ion_open这个方法获得了一个fd
int ion_open()
{
int fd = open("/dev/ion", O_RDWR);
if (fd < 0)
LOGE("open /dev/ion failed!\n");
return fd;
}
开始时我晕了,ion到底是什么device啊?赶紧查一查
这里我不做过多说明,可以看看这个分享,同时感谢大牛的分享:/article/9235714.html
ION与PMEM类似,管理一或多个内存池,其中有一些会在boot time的时候预先分配,以备给特殊的硬件使用(GPU,显示控制器等)。它通过ION heaps来管理这些pool。
它可以被userspace的process之间或者内核中的模块之间进行内存共享
四.SensorListener的实例化和初始化
在SensorListener的构造函数中对一些参数进行了默认初始化,这里不知说明,直接看看他的initialize方法实现
status_t SensorListener::initialize() {
status_t ret = NO_ERROR;
SensorManager& mgr(SensorManager::getInstance());
LOG_FUNCTION_NAME;
sp<Looper> mLooper;
mSensorEventQueue = mgr.createEventQueue();
if (mSensorEventQueue == NULL) {
CAMHAL_LOGEA("createEventQueue returned NULL");
ret = NO_INIT;
goto out;
}
mLooper = new
Looper(false);
mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);
if (mSensorLooperThread.get() == NULL)
mSensorLooperThread = new
SensorLooperThread(mLooper.get());
if (mSensorLooperThread.get() == NULL) {
CAMHAL_LOGEA("Couldn't create sensor looper thread");
ret = NO_MEMORY;
goto out;
}
ret = mSensorLooperThread->run("sensor
looper thread", PRIORITY_URGENT_DISPLAY);
if (ret == INVALID_OPERATION){
CAMHAL_LOGDA("thread already running ?!?");
} else if (ret != NO_ERROR) {
CAMHAL_LOGEA("couldn't run thread");
goto out;
}
out:
LOG_FUNCTION_NAME_EXIT;
return ret;
}
看到这里我个人是感觉这里挺抽象的,这里先简单说明一下,这里首先获取到SensorManager,通过这个SensorManager创建一个EventQueue,然后实例化一个Looper对象,将这个EventQueue的fd添加到mLooper中,最后创建一个SensorLooperThread,并启动这个线程
下一步:mSensorListener->setCallbacks(orientation_cb, this);
void SensorListener::setCallbacks(orientation_callback_t
orientation_cb, void *cookie) {
LOG_FUNCTION_NAME;
if (orientation_cb) {
mOrientationCb = orientation_cb;
}
mCbCookie = cookie;
LOG_FUNCTION_NAME_EXIT;
}
这里的方法跟上面讲过的setcallback方法其实基本是相通的,注册的这个回调函数是在以下方法中被调用到的
void SensorListener::handleOrientation(uint32_t
orientation, uint32_t tilt) {
LOG_FUNCTION_NAME;
Mutex::Autolock lock(&mLock);
if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) {
mOrientationCb(orientation, tilt, mCbCookie);
}
LOG_FUNCTION_NAME_EXIT;
}
也就低调用一下方法:
static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) {
CameraHal *camera = NULL;
if (cookie) {
camera = (CameraHal*) cookie;//这个cookie(this)指向我们实例化mCameraAdapter
camera->onOrientationEvent(orientation, tilt);//调用mCameraHal的onOrientationEvent方法
}
}
我们接着跟踪下去:
/**
Callback function to receive orientation events from SensorListener
*/
void CameraHal::onOrientationEvent(uint32_t orientation, uint32_t
tilt) {
LOG_FUNCTION_NAME;
if ( NULL != mCameraAdapter ) {
mCameraAdapter->onOrientationEvent(orientation, tilt);
}
LOG_FUNCTION_NAME_EXIT;
}
继续,实现在OMXCameraAdapter中
void OMXCameraAdapter::onOrientationEvent(uint32_t
orientation, uint32_t tilt)
{
LOG_FUNCTION_NAME;
static const unsigned int DEGREES_TILT_IGNORE = 45;
// if tilt angle is greater
than DEGREES_TILT_IGNORE
// we are going to ignore the orientation returned from
// sensor. the orientation returned from sensor is not
// reliable. Value of DEGREES_TILT_IGNORE may need adjusting
if (tilt > DEGREES_TILT_IGNORE) {
return;
}
int mountOrientation = 0;
bool isFront = false;
if (mCapabilities) {
const char * const mountOrientationString =
mCapabilities->get(CameraProperties::ORIENTATION_INDEX);
if (mountOrientationString) {
mountOrientation = atoi(mountOrientationString);
}
const char * const facingString = mCapabilities->get(CameraProperties::FACING_INDEX);
if (facingString) {
isFront = strcmp(facingString, TICameraParameters::FACING_FRONT) == 0;
}
}
// direction is a constant sign for facing, meaning
the rotation direction relative to device
// +1 (clockwise) for back
sensor and -1 (counter-clockwise) for front
sensor
const int direction = isFront ? -1 : 1;
int rotation = mountOrientation + direction*orientation;
// crop the calculated value to [0..360) range
while ( rotation < 0 ) rotation += 360;
rotation %= 360;
if (rotation != mDeviceOrientation) {
mDeviceOrientation = rotation;
// restart face detection with new rotation
setFaceDetectionOrientation(mDeviceOrientation);
}
CAMHAL_LOGVB("orientation = %d tilt = %d device_orientation = %d", orientation, tilt, mDeviceOrientation);
LOG_FUNCTION_NAME_EXIT;
}
这里暂时不做分析
最后一步:mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION)
void SensorListener::enableSensor(sensor_type_t
type) {
Sensor const* sensor;
SensorManager& mgr(SensorManager::getInstance());
LOG_FUNCTION_NAME;
Mutex::Autolock lock(&mLock);
if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
mSensorEventQueue->enableSensor(sensor);
mSensorEventQueue->setEventRate(sensor, ms2ns(100));
sensorsEnabled |= SENSOR_ORIENTATION;
}
LOG_FUNCTION_NAME_EXIT;
}
使能sensor
到这里为止CameraHal的这个初始化过程完成了,这里个人感觉十分重要,其他的接口实现固然重要,但是清清楚楚的知道这个初始化过程会让你hal与上层和底层的交互先有一个大体的认知,这比你一头扎进去到处乱撞要高效很多
Android Camere Study 待续。。。。
相关文章推荐
- Android之screenOrientation属性
- Android Camera数据流分析全程记录(overlay方式二)
- Android setColorFilter 滤镜效果
- 王学岗补间动画(一)——补间动画的初步应用
- android自定义View绘制天气温度曲线
- 控件与布局
- Android系统init.rc分析
- Android自助餐之Fragment与Activity通信
- 【Android】LocalBroadcastManager 注册的广播对 PendingIntent 无效
- Android中布局的正确姿势
- Android中的ExpandableListView的使用,以及点击时间禁用,并展开group,去掉小三角
- Android最火的快速开发框架AndroidAnnotations使用详解
- Android页面跳转动画效果
- Android自助餐之SharedPreferences
- Android中五种Toast显示效果
- Android框架------------------AndroidAnnotations
- Android源码国内镜像
- 010ScrollView 滚动条(基础)
- [Android--Tool]Gradle sync started: NullPointerException: null
- Android属性动画完全解析(上),初识属性动画的基本用法(转)