編輯:關於Android編程
在平時工作中,camera模塊是經常進行調試修改的模塊,所以熟悉camera的工作流程以及工作原理將會大大的提供工作效率,但對於整個android系統camera是個十分復雜的模塊,下面對camera的驅動加載進行分析。
從上面可看出,連接 Camera 的pin引腳可大致分為以下幾類:
b) :Standby/PowerDown信號,用於進入待機模式,降低功耗。
c) :Mclk,即MasterClock 信號,是由 BB 端提供。a) :Pclk,即 PixelClock 信號,由 MCLK 分頻得到,作為外部時鐘控制圖像傳輸幀率;
b) :HSYNC,行同步信號,其上升沿表示新一列行圖像數據的開始;
c) :VSYNC,幀同步信號,其下降沿表示新的一幀圖片的開始;
d) :D0-D9 一共 10 根數據線(8/10 根等)。
上圖的架構相信大家都有了一定的了解,android 將系統大致分為應用層、庫文件和硬件抽象層、Linux 內核三層。在底層的內核空間,Camera 的 driver 將其驅動起來以後,將硬件驅動的接口交給硬件抽象層,android 上層的 Camera 應用程序在 android 實時系統中的虛擬機中,加載 android 留給 Camera 公用的一些庫文件,調用硬件抽象層的接口來控制 Camera硬件來實現功能。當然,如果是 Raw 模式的 Camera,還需要在硬件抽象層調用一些參數來控制 Camera 的效果。
Kernel 部分主要有兩塊:一塊是image sensor 驅動,負責具體型號的sensor 的id 檢測,上電,以及在preview,capture,初始化,3A 等等功能設定時的寄存器配置。另一塊是isp driver,通過DMA 將sensor數據流上傳。
HAL層這邊主要分3 塊,一塊是imageio,主要是數據buffer上傳的pipe。一塊是drv,包含imgsensor 和isp 的hal 層控制。最後是feature io,包含各種3A 等性能配置。
#if 1 ///defined(CONFIG_VIDEO_CAPTURE_DRIVERS)
retval = platform_device_register(&sensor_dev);
if (retval != 0){
return retval;
}
#endif
static struct platform_device sensor_dev = {
.name = "image_sensor",
.id = -1,
};
image_sensor的platform類型驅動的device的name為"image_sensor",而在linux中,所有的總線的driver與device都是通過name來與進行匹配的,platform總線也不例外,所以可以通過grep命令來查找camera注冊的總線中driver的注冊路徑為:mediatek/custom/common/kernel/imgsensor/src/kd_sensorlist.c,代碼為:
static struct platform_driver g_stCAMERA_HW_Driver = {
.probe = CAMERA_HW_probe,
.remove = CAMERA_HW_remove,
.suspend = CAMERA_HW_suspend,
.resume = CAMERA_HW_resume,
.driver = {
.name = "image_sensor",
.owner = THIS_MODULE,
}
};
下面就來看看image_sensorplatform的driver整個注冊流程是怎樣實現的。先來看看kd_sensorlist.c驅動文件的init人口函數:
/*=======================================================================
* CAMERA_HW_i2C_init()
*=======================================================================*/
static int __init CAMERA_HW_i2C_init(void)
{
struct proc_dir_entry *prEntry;
//i2c_register_board_info(CAMERA_I2C_BUSNUM, &kd_camera_dev, 1);
i2c_register_board_info(SUPPORT_I2C_BUS_NUM1, &i2c_devs1, 1); // 填充i2c的板極文件
//i2c_register_board_info(SUPPORT_I2C_BUS_NUM2, &i2c_devs2, 1);
if(platform_driver_register(&g_stCAMERA_HW_Driver)){ // 注冊platform總線的driver
PK_ERR("failed to register CAMERA_HW driver\n");
return -ENODEV;
}
//if(platform_driver_register(&g_stCAMERA_HW_Driver2)){
// PK_ERR("failed to register CAMERA_HW driver\n");
// return -ENODEV;
//}
//Register proc file for main sensor register debug
prEntry = create_proc_entry("driver/camsensor", 0, NULL); //在proc下創建driver/camsensor這個節點,用於前置攝像頭進行adb效果調試
if (prEntry) {
prEntry->read_proc = CAMERA_HW_DumpReg_To_Proc;
prEntry->write_proc = CAMERA_HW_Reg_Debug;
}
else {
PK_ERR("add /proc/driver/camsensor entry fail \n");
}
//Register proc file for sub sensor register debug
prEntry = create_proc_entry("driver/camsensor2", 0, NULL); //在proc下創建driver/camsensor2這個節點,用於後置攝像頭進行adb效果調試
if (prEntry) {
prEntry->read_proc = CAMERA_HW_DumpReg_To_Proc;
prEntry->write_proc = CAMERA_HW_Reg_Debug2;
}
else {
PK_ERR("add /proc/driver/camsensor2 entry fail \n");
}
atomic_set(&g_CamHWOpend, 0);
//atomic_set(&g_CamHWOpend2, 0);
atomic_set(&g_CamDrvOpenCnt, 0);
//atomic_set(&g_CamDrvOpenCnt2, 0);
atomic_set(&g_CamHWOpening, 0);
return 0;
}
CAMERA_HW_i2C_init函數主要做的是對I2C總線的版級文件進行了填充,然後注冊platform總線的driver,通過g_stCAMERA_HW_Driver結構體裡面的name來與device進行匹配。同時,在函數裡面還在proc目錄下創建了driver/camsensor和driver/camsensor2兩個節點,這樣做主要是方便sensor的IC原廠FAE利用adb進行效果調試的。
static struct platform_driver g_stCAMERA_HW_Driver = {
.probe = CAMERA_HW_probe,
.remove = CAMERA_HW_remove,
.suspend = CAMERA_HW_suspend,
.resume = CAMERA_HW_resume,
.driver = {
.name = "image_sensor",
.owner = THIS_MODULE,
}
};
g_stCAMERA_HW_Driver結構體中主要有probe、remove、suspend等接口的實現,probe接口為設備注冊的匹配函數,所以在在注冊了driver就會調用.probe = CAMERA_HW_probe進入CAMERA_HW_probe函數:
static int CAMERA_HW_probe(struct platform_device *pdev)
{
return i2c_add_driver(&CAMERA_HW_i2c_driver);
}
struct i2c_driver CAMERA_HW_i2c_driver = {
.probe = CAMERA_HW_i2c_probe,
.remove = CAMERA_HW_i2c_remove,
.driver.name = CAMERA_HW_DRVNAME1,
.id_table = CAMERA_HW_i2c_id,
};
CAMERA_HW_probe做的就是注冊一個i2c的driver,camera會掛載在I2C總線上,利用I2C進入寄存器的讀寫,所以,sensor驅動最終還是會注冊I2C設備,I2C總線的注冊其實跟platform總線的注冊大致相同,注冊完I2C driver後系統就會調用CAMERA_HW_i2c_driver結構體裡面的.probe = CAMERA_HW_i2c_probe:
static int CAMERA_HW_i2c_probe(struct i2c_client *client, const struct i2c_device_id *id)
{
int i4RetValue = 0;
PK_DBG("[CAMERA_HW] Attach I2C \n");
//get sensor i2c client
spin_lock(&kdsensor_drv_lock);
g_pstI2Cclient = client; //這裡是獲得我們的clientdevice,並且以platform方式進行注冊
//set I2C clock rate
g_pstI2Cclient->timing = 300;//200k
spin_unlock(&kdsensor_drv_lock);
//Register char driver
i4RetValue = RegisterCAMERA_HWCharDrv(); // 注冊字符驅動
if(i4RetValue){
PK_ERR("[CAMERA_HW] register char device failed!\n");
return i4RetValue;
}
//spin_lock_init(&g_CamHWLock);
PK_DBG("[CAMERA_HW] Attached!! \n");
return 0;
}
在CAMERA_HW_i2c_probe函數裡面主要就是調用了RegisterCAMERA_HWCharDrv函數來注冊一個字符驅動。
inline static int RegisterCAMERA_HWCharDrv(void)
{
struct device* sensor_device = NULL;
#if CAMERA_HW_DYNAMIC_ALLOCATE_DEVNO
if( alloc_chrdev_region(&g_CAMERA_HWdevno, 0, 1,CAMERA_HW_DRVNAME1) ) // 動態分配一個字符設備
{
PK_DBG("[CAMERA SENSOR] Allocate device no failed\n");
return -EAGAIN;
}
#else
if( register_chrdev_region( g_CAMERA_HWdevno , 1 , CAMERA_HW_DRVNAME1) ) // 靜態分配一個字符設備
{
PK_DBG("[CAMERA SENSOR] Register device no failed\n");
return -EAGAIN;
}
#endif
//Allocate driver
g_pCAMERA_HW_CharDrv = cdev_alloc(); // 申請一個cdev結構體
if(NULL == g_pCAMERA_HW_CharDrv)
{
unregister_chrdev_region(g_CAMERA_HWdevno, 1);
PK_DBG("[CAMERA SENSOR] Allocate mem for kobject failed\n");
return -ENOMEM;
}
//Attatch file operation.
cdev_init(g_pCAMERA_HW_CharDrv, &g_stCAMERA_HW_fops); //關聯到file_operation進入字符設備
g_pCAMERA_HW_CharDrv->owner = THIS_MODULE;
//Add to system
if(cdev_add(g_pCAMERA_HW_CharDrv, g_CAMERA_HWdevno, 1)) //將我們分配的字符設備,attach上file_operation添加到system
{
PK_DBG("[mt6516_IDP] Attatch file operation failed\n");
unregister_chrdev_region(g_CAMERA_HWdevno, 1);
return -EAGAIN;
}
sensor_class = class_create(THIS_MODULE, "sensordrv"); //創建一個sensordrv類
if (IS_ERR(sensor_class)) {
int ret = PTR_ERR(sensor_class);
PK_DBG("Unable to create class, err = %d\n", ret);
return ret;
}
sensor_device = device_create(sensor_class, NULL, g_CAMERA_HWdevno, NULL, CAMERA_HW_DRVNAME1);
return 0;
}
linux驅動分為三大部分驅動,分別為字符驅動,塊驅動以及網絡驅動,而camera的模塊驅動為字符驅動,所以RegisterCAMERA_HWCharDrv函數主要是對camera_image進行字符驅動注冊,代碼開始先判斷是否定義了CAMERA_HW_DYNAMIC_ALLOCATE_DEVNO變量來判斷動態還是靜態分配一個字符設備,然後通過cdev_alloc申請了一個cdev結構體後,通過cdev_init將g_stCAMERA_HW_fops關聯到字符設備,這是這個函數往下走下去的關鍵。然後就是將我們分配的字符設備,attach上file_operation添加到sys,最後在sys/class目錄下創建一個sensordrv類,如下圖所示:
下面來看看g_stCAMERA_HW_fops結構體裡面的內容:
static const struct file_operations g_stCAMERA_HW_fops =
{
.owner = THIS_MODULE,
.open = CAMERA_HW_Open,
.release = CAMERA_HW_Release,
.unlocked_ioctl = CAMERA_HW_Ioctl
};
file_operations是為上層調用底層提供的接口,往往打開接口就是先打開open,那麼先來看看struct file_operations g_stCAMERA_HW_fops結構體中的open函數;
static int CAMERA_HW_Open(struct inode * a_pstInode, struct file * a_pstFile)
{
//reset once in multi-open
if ( atomic_read(&g_CamDrvOpenCnt) == 0) {
//default OFF state
//MUST have
//kdCISModulePowerOn(DUAL_CAMERA_MAIN_SENSOR,"",true,CAMERA_HW_DRVNAME1);
//kdCISModulePowerOn(DUAL_CAMERA_SUB_SENSOR,"",true,CAMERA_HW_DRVNAME1);
//kdCISModulePowerOn(DUAL_CAMERA_MAIN_2_SENSOR,"",true,CAMERA_HW_DRVNAME1);
//kdCISModulePowerOn(DUAL_CAMERA_MAIN_SENSOR,"",false,CAMERA_HW_DRVNAME1);
//kdCISModulePowerOn(DUAL_CAMERA_SUB_SENSOR,"",false,CAMERA_HW_DRVNAME1);
//kdCISModulePowerOn(DUAL_CAMERA_MAIN_2_SENSOR,"",false,CAMERA_HW_DRVNAME1);
}
//
atomic_inc(&g_CamDrvOpenCnt);
return 0;
}
整個函數就是對g_CamDrvOpenCnt變量進行了一個原子讀的過程,沒有進行別的操作。而上層跟驅動進行通訊主要是通過ioctl發送命令,然後進行數據傳輸,然後在看看.unlocked_ioctl= CAMERA_HW_Ioctl中的CAMERA_HW_Ioctl操作;
static long CAMERA_HW_Ioctl(
struct file * a_pstFile,
unsigned int a_u4Command,
unsigned long a_u4Param
)
{
int i4RetValue = 0;
void * pBuff = NULL;
u32 *pIdx = NULL;
mutex_lock(&kdCam_Mutex);
if(_IOC_NONE == _IOC_DIR(a_u4Command)) {
}
else {
pBuff = kmalloc(_IOC_SIZE(a_u4Command),GFP_KERNEL); //申請分配一個buffer
if(NULL == pBuff) {
PK_DBG("[CAMERA SENSOR] ioctl allocate mem failed\n");
i4RetValue = -ENOMEM;
goto CAMERA_HW_Ioctl_EXIT;
}
if(_IOC_WRITE & _IOC_DIR(a_u4Command)){ //判斷是否可寫
//將用戶傳遞過來的命令參數復制到內核空間,接下來我們會根據這個數據進行選擇
if(copy_from_user(pBuff , (void *) a_u4Param, _IOC_SIZE(a_u4Command))) {
kfree(pBuff);
PK_DBG("[CAMERA SENSOR] ioctl copy from user failed\n");
i4RetValue = -EFAULT;
goto CAMERA_HW_Ioctl_EXIT;
}
}
}
pIdx = (u32*)pBuff;
switch(a_u4Command) {
#if 0
case KDIMGSENSORIOC_X_POWER_ON:
i4RetValue = kdModulePowerOn((CAMERA_DUAL_CAMERA_SENSOR_ENUM) *pIdx, true, CAMERA_HW_DRVNAME);
break;
case KDIMGSENSORIOC_X_POWER_OFF:
i4RetValue = kdModulePowerOn((CAMERA_DUAL_CAMERA_SENSOR_ENUM) *pIdx, false, CAMERA_HW_DRVNAME);
break;
#endif
case KDIMGSENSORIOC_X_SET_DRIVER:
i4RetValue = kdSetDriver((unsigned int*)pBuff);
break;
case KDIMGSENSORIOC_T_OPEN:
i4RetValue = adopt_CAMERA_HW_Open();
break;
case KDIMGSENSORIOC_X_GETINFO:
i4RetValue = adopt_CAMERA_HW_GetInfo(pBuff);
break;
case KDIMGSENSORIOC_X_GETRESOLUTION:
i4RetValue = adopt_CAMERA_HW_GetResolution(pBuff);
break;
case KDIMGSENSORIOC_X_FEATURECONCTROL:
i4RetValue = adopt_CAMERA_HW_FeatureControl(pBuff);
break;
case KDIMGSENSORIOC_X_CONTROL:
i4RetValue = adopt_CAMERA_HW_Control(pBuff);
break;
case KDIMGSENSORIOC_T_CLOSE:
i4RetValue = adopt_CAMERA_HW_Close();
break;
case KDIMGSENSORIOC_T_CHECK_IS_ALIVE:
i4RetValue = adopt_CAMERA_HW_CheckIsAlive();
break;
case KDIMGSENSORIOC_X_GET_SOCKET_POS:
i4RetValue = kdGetSocketPostion((unsigned int*)pBuff);
break;
case KDIMGSENSORIOC_X_SET_I2CBUS:
//i4RetValue = kdSetI2CBusNum(*pIdx);
break;
case KDIMGSENSORIOC_X_RELEASE_I2C_TRIGGER_LOCK:
//i4RetValue = kdReleaseI2CTriggerLock();
break;
default :
PK_DBG("No such command \n");
i4RetValue = -EPERM;
break;
}
if(_IOC_READ & _IOC_DIR(a_u4Command)) {
if(copy_to_user((void __user *) a_u4Param , pBuff , _IOC_SIZE(a_u4Command))) {
kfree(pBuff);
PK_DBG("[CAMERA SENSOR] ioctl copy to user failed\n");
i4RetValue = -EFAULT;
goto CAMERA_HW_Ioctl_EXIT;
}
}
kfree(pBuff);
CAMERA_HW_Ioctl_EXIT:
mutex_unlock(&kdCam_Mutex);
return i4RetValue;
}
ioctl主要就是上層通過cmd命令來與底層進行通信,下面就看看這個比較重要的cmd命令:
a):KDIMGSENSORIOC_T_OPEN命令:
case KDIMGSENSORIOC_T_OPEN:
i4RetValue = adopt_CAMERA_HW_Open();
break;
inline static int adopt_CAMERA_HW_Open(void)
{
UINT32 err = 0;
KD_IMGSENSOR_PROFILE_INIT();
if (atomic_read(&g_CamHWOpend) == 0 ) {
if (g_pSensorFunc) { //判斷我們imagesensor 操作函數指針是否為NULL,如果為NULL,報錯,因為我們就是靠這個操作函數集合去操作imagesensor 的
err = g_pSensorFunc->SensorOpen(); // 會調用到kd_MultiSensorFunc裡面的kd_MultiSensorOpen函數
if(ERROR_NONE != err) {
PK_DBG(" ERROR:SensorOpen(), turn off power \n");
kdModulePowerOn((CAMERA_DUAL_CAMERA_SENSOR_ENUM*) g_invokeSocketIdx, g_invokeSensorNameStr, false, CAMERA_HW_DRVNAME1);
}
}
else {
PK_DBG(" ERROR:NULL g_pSensorFunc\n");
}
KD_IMGSENSOR_PROFILE("SensorOpen");
}
else {
//PK_ERR("adopt_CAMERA_HW_Open Fail, g_CamHWOpend = %d,g_CamHWOpend2 = %d\n ",atomic_read(&g_CamHWOpend),atomic_read(&g_CamHWOpend2) );
PK_ERR("adopt_CAMERA_HW_Open Fail, g_CamHWOpend = %d\n ",atomic_read(&g_CamHWOpend) );
}
if (err == 0 ) {
atomic_set(&g_CamHWOpend, 1);
//atomic_set(&g_CamHWOpend2, 1);
}
return err?-EIO:err;
} /* adopt_CAMERA_HW_Open() */
g_pSensorFunc結構體的定義為staticMULTI_SENSOR_FUNCTION_STRUCT *g_pSensorFunc = &kd_MultiSensorFunc,它跟kd_MultiSensorFunc指向相同的地址,if (g_pSensorFunc)由這裡可以看出,上層調用ioctl命令時不可能先走KDIMGSENSORIOC_T_OPEN這個命令,因為此時g_pSensorFunc為NULL,還沒有賦值,由後面的代碼可以看到,ioctl先走的cmd命令是KDIMGSENSORIOC_X_SET_DRIVER。當g_pSensorFunc不為NULL時,就是執行這個err =g_pSensorFunc->SensorOpen();,這裡的SensorOpen函數指向的是kd_MultiSensorOpen函數。
MUINT32 kd_MultiSensorOpen ( void )
{
MUINT32 ret = ERROR_NONE;
MINT32 i = 0;
KD_MULTI_FUNCTION_ENTRY(); // 得到當前時間
for ( i = (KDIMGSENSOR_MAX_INVOKE_DRIVERS-1) ; i >= KDIMGSENSOR_INVOKE_DRIVER_0 ; i-- ) {
if ( g_bEnableDriver[i] && g_pInvokeSensorFunc[i] ) {
// turn on power
ret = kdCISModulePowerOn((CAMERA_DUAL_CAMERA_SENSOR_ENUM)g_invokeSocketIdx[i],(char*)g_invokeSensorNameStr[i],true,CAMERA_HW_DRVNAME1);
if ( ERROR_NONE != ret ) { // 上電
PK_ERR("[%s]",__FUNCTION__);
return ret;
}
//wait for power stable
mDELAY(10);
KD_IMGSENSOR_PROFILE("kdModulePowerOn");
ret = g_pInvokeSensorFunc[i]->SensorOpen(); //調用到模塊驅動中的open函數,g_pInvokeSensorFunc保存的值為模塊驅動中的SENSOR_FUNCTION_STRUCT SensorFuncGC2035這個結構體
if ( ERROR_NONE != ret ) {
kdCISModulePowerOn((CAMERA_DUAL_CAMERA_SENSOR_ENUM)g_invokeSocketIdx[i],(char*)g_invokeSensorNameStr[i],false,CAMERA_HW_DRVNAME1); // 掉電
PK_ERR("SensorOpen");
return ret;
}
}
}
KD_MULTI_FUNCTION_EXIT();
return ERROR_NONE;
}
g_bEnableDriver定義為一個bool型的變量,定義如下static BOOL g_bEnableDriver[KDIMGSENSOR_MAX_INVOKE_DRIVERS] ={FALSE,FALSE},而g_pInvokeSensorFunc的定義為static SENSOR_FUNCTION_STRUCT*g_pInvokeSensorFunc[KDIMGSENSOR_MAX_INVOKE_DRIVERS] = {NULL,NULL};而g_pInvokeSensorFunc的地址跟模塊驅動中的SENSOR_FUNCTION_STRUCT SensorFuncGC2035這個地址指向是相同的,他是作為一個參數傳遞來被調用的,這兩個變量的實現都會在後面KDIMGSENSORIOC_X_SET_DRIVER命令調用時介紹的。當這兩個變量都為true時,就對sensor進行上電,然後就通過ret =g_pInvokeSensorFunc[i]->SensorOpen()進入到了模塊sensor驅動中的open函數。至於上電跟掉電函數kdCISModulePowerOn,在mediatek/custom/ztenj72_we_72_m536_kk/kernel/camera/camera/kd_camera_hw.c文件中,其實上電跟掉電時序就是配置一些GPIO口,然後在把camera的三路電壓按dateshell配置一下上掉電的時間。
b):KDIMGSENSORIOC_X_SET_DRIVER命令:
case KDIMGSENSORIOC_X_SET_DRIVER:
i4RetValue = kdSetDriver((unsigned int*)pBuff);
break;
int kdSetDriver(unsigned int* pDrvIndex)
{
ACDK_KD_SENSOR_INIT_FUNCTION_STRUCT *pSensorList = NULL; // 這是一個保存cameraId跟cameraName的結構體
u32 drvIdx[KDIMGSENSOR_MAX_INVOKE_DRIVERS] = {0,0};
u32 i;
PK_XLOG_INFO("pDrvIndex:0x%08x/0x%08x \n",pDrvIndex[KDIMGSENSOR_INVOKE_DRIVER_0],pDrvIndex[KDIMGSENSOR_INVOKE_DRIVER_1]);
//set driver for MAIN or SUB sensor
if (0 != kdGetSensorInitFuncList(&pSensorList)) //調用這個函數,取得所有添加的sensor的結構的首地址
{
PK_ERR("ERROR:kdGetSensorInitFuncList()\n");
return -EIO;
}
for ( i = KDIMGSENSOR_INVOKE_DRIVER_0; i < KDIMGSENSOR_MAX_INVOKE_DRIVERS ; i++ ) {
//
spin_lock(&kdsensor_drv_lock);
g_bEnableDriver[i] = FALSE;
g_invokeSocketIdx[i] = (CAMERA_DUAL_CAMERA_SENSOR_ENUM)((pDrvIndex[i] & KDIMGSENSOR_DUAL_MASK_MSB)>>KDIMGSENSOR_DUAL_SHIFT); // 保存camera的sensorId
spin_unlock(&kdsensor_drv_lock);
drvIdx[i] = (pDrvIndex[i] & KDIMGSENSOR_DUAL_MASK_LSB);
//
if ( DUAL_CAMERA_NONE_SENSOR == g_invokeSocketIdx[i] ) { continue; }
//ToDo: remove print information
PK_XLOG_INFO("[kdSetDriver] i,g_invokeSocketIdx[%d] = %d :\n",i,i,drvIdx[i]);
PK_XLOG_INFO("[kdSetDriver] i,drvIdx[%d] = %d :\n",i,i,drvIdx[i]);
//
if ( MAX_NUM_OF_SUPPORT_SENSOR > drvIdx[i] ) {
if (NULL == pSensorList[drvIdx[i]].SensorInit) {
PK_ERR("ERROR:kdSetDriver()\n");
return -EIO;
}
pSensorList[drvIdx[i]].SensorInit(&g_pInvokeSensorFunc[i]);// 調用模塊驅動中的init函數,將模塊驅動裡面的SENSOR_FUNCTION_STRUCT SensorFuncOV2685值傳遞給g_pInvokeSensorFunc結構體
if (NULL == g_pInvokeSensorFunc[i]) {
PK_ERR("ERROR:NULL g_pSensorFunc[%d]\n",i);
return -EIO;
}
//
spin_lock(&kdsensor_drv_lock);
g_bEnableDriver[i] = TRUE;
g_CurrentInvokeCam = g_invokeSocketIdx[i];
spin_unlock(&kdsensor_drv_lock);
//get sensor name
memcpy((char*)g_invokeSensorNameStr[i],(char*)pSensorList[drvIdx[i]].drvname,sizeof(pSensorList[drvIdx[i]].drvname));
//return sensor ID
//pDrvIndex[0] = (unsigned int)pSensorList[drvIdx].SensorId;
PK_XLOG_INFO("[kdSetDriver] :[%d][%d][%d][%s][%d]\n",i,g_bEnableDriver[i],g_invokeSocketIdx[i],g_invokeSensorNameStr[i],sizeof(pSensorList[drvIdx[i]].drvname));
}
}
return 0;
}
kdSetDriver這個函數比較重要,上層調用底層的命令應該第一步就是調用這個命令的,函數開頭定義了一個ACDK_KD_SENSOR_INIT_FUNCTION_STRUCT *pSensorList = NULL;這樣的結構體,ACDK_KD_SENSOR_INIT_FUNCTION_STRUCT變量是為src/kd_sensorlist.h文件裡面保存sensor的ID,NAME以及init的結構體,if (0 !=kdGetSensorInitFuncList(&pSensorList))通過調用kdGetSensorInitFuncList函數,將pSensorList的首地址指向src/kd_sensorlist.h裡面ACDK_KD_SENSOR_INIT_FUNCTION_STRUCT變量的首地址。
UINT32 kdGetSensorInitFuncList(ACDK_KD_SENSOR_INIT_FUNCTION_STRUCT **ppSensorList)
{
if (NULL == ppSensorList)
{
PK_DBG("[kdGetSensorInitFuncList]ERROR: NULL ppSensorList\n");
return 1;
}
*ppSensorList = &kdSensorList[0]; // kdSensorList在kd_sensorlist.h文件裡面,就是保存cameraId,cameraNmae,模塊驅動人口函數的結構體
return 0;
}
調用了kdGetSensorInitFuncList後,代碼先將g_bEnableDriver置為FALSE,而這個變量就是在上面的open函數裡面出現過的。然後通過if (NULL ==pSensorList[drvIdx[i]].SensorInit)判斷模塊驅動的init函數是否為NULL,pSensorList[drvIdx[i]].SensorInit(&g_pInvokeSensorFunc[i]);傳遞&g_pInvokeSensorFunc[i]為參數,其中還是傳遞的地址,這就將g_pInvokeSensorFunc的首地址指向了模塊驅動函數中的UINT32 GC2035_YUV_SensorInit(PSENSOR_FUNCTION_STRUCT *pfFunc)這個函數,這就是上面kd_MultiSensorOpen中為什麼g_pInvokeSensorFunc可以直接調用模塊驅動中的open函數,其實就是在這裡實現的。當調用完成後,然後就將camera的ID,NAME等信息保存並將g_bEnableDriver[i]變量置為TRUE。
case KDIMGSENSORIOC_T_OPEN:
i4RetValue = adopt_CAMERA_HW_Open();
break;
inline static int adopt_CAMERA_HW_Open(void)
{
UINT32 err = 0;
KD_IMGSENSOR_PROFILE_INIT();
//power on sensor
//if ((atomic_read(&g_CamHWOpend) == 0) && (atomic_read(&g_CamHWOpend2) == 0)) {
if (atomic_read(&g_CamHWOpend) == 0 ) {
//move into SensorOpen() for 2on1 driver
// turn on power
//kdModulePowerOn((CAMERA_DUAL_CAMERA_SENSOR_ENUM*) g_invokeSocketIdx, g_invokeSensorNameStr,true, CAMERA_HW_DRVNAME);
//wait for power stable
//mDELAY(10);
//KD_IMGSENSOR_PROFILE("kdModulePowerOn");
//
if (g_pSensorFunc) {
err = g_pSensorFunc->SensorOpen();
if(ERROR_NONE != err) {
PK_DBG(" ERROR:SensorOpen(), turn off power \n");
kdModulePowerOn((CAMERA_DUAL_CAMERA_SENSOR_ENUM*) g_invokeSocketIdx, g_invokeSensorNameStr, false, CAMERA_HW_DRVNAME1);
}
}
else {
PK_DBG(" ERROR:NULL g_pSensorFunc\n");
}
KD_IMGSENSOR_PROFILE("SensorOpen");
}
else {
//PK_ERR("adopt_CAMERA_HW_Open Fail, g_CamHWOpend = %d,g_CamHWOpend2 = %d\n ",atomic_read(&g_CamHWOpend),atomic_read(&g_CamHWOpend2) );
PK_ERR("adopt_CAMERA_HW_Open Fail, g_CamHWOpend = %d\n ",atomic_read(&g_CamHWOpend) );
}
if (err == 0 ) {
atomic_set(&g_CamHWOpend, 1);
//atomic_set(&g_CamHWOpend2, 1);
}
return err?-EIO:err;
} /* adopt_CAMERA_HW_Open() */
KDIMGSENSORIOC_T_OPEN命令就是通過調用adopt_CAMERA_HW_Open函數,而adopt_CAMERA_HW_Open函數中會通過err =g_pSensorFunc->SensorOpen();調用到模塊驅動中的open函數。而在這裡就不一一介紹了,下面來看看你看驅動中的各個接口函數有什麼含義,以gc2035驅動為模板分析。
UINT32 GC2035_YUV_SensorInit(PSENSOR_FUNCTION_STRUCT *pfFunc)
{
/* To Do : Check Sensor status here */
if (pfFunc!=NULL)
*pfFunc=&SensorFuncGC2035;
return ERROR_NONE;
} /* Sensor
SENSOR_FUNCTION_STRUCT SensorFuncGC2035=
{
GC2035Open,
GC2035GetInfo,
GC2035GetResolution,
GC2035FeatureControl,
GC2035Control,
GC2035Close
};
GC2035Open:
UINT32 GC2035Open(void)
{
volatile signed char i;
kal_uint16 sensor_id=0;
zoom_factor = 0;
Sleep(10);
SENSORDB("GC2035Open\r\n");
// Read sensor ID to adjust I2C is OK?
for(i=0;i<3;i++)
{
sensor_id = (GC2035_read_cmos_sensor(0xf0) << 8) | GC2035_read_cmos_sensor(0xf1);
if(sensor_id != GC2035_SENSOR_ID) // GC2035_SENSOR_ID = 0x2035
{
return ERROR_SENSOR_CONNECT_FAIL;
}
}
SENSORDB("GC2035 Sensor Read ID OK \r\n");
GC2035_Sensor_Init();
GC2035_Write_More();
Preview_Shutter =GC2035_read_shutter();
return ERROR_NONE;
}
通過函數實現就可以看出來,這裡是在通過i2c控制imagesensor的register,讀取deivceid ,看是否鏈接上對應的imagesensor;
GC2035GetInfo:
UINT32 GC2035GetInfo(MSDK_SCENARIO_ID_ENUM ScenarioId,
MSDK_SENSOR_INFO_STRUCT *pSensorInfo,
MSDK_SENSOR_CONFIG_STRUCT *pSensorConfigData)
{
pSensorInfo->SensorPreviewResolutionX=GC2035_IMAGE_SENSOR_PV_WIDTH;
pSensorInfo->SensorPreviewResolutionY=GC2035_IMAGE_SENSOR_PV_HEIGHT;
pSensorInfo->SensorFullResolutionX=GC2035_IMAGE_SENSOR_FULL_WIDTH;
pSensorInfo->SensorFullResolutionY=GC2035_IMAGE_SENSOR_FULL_HEIGHT;
pSensorInfo->SensorCameraPreviewFrameRate=30;
pSensorInfo->SensorVideoFrameRate=30;
pSensorInfo->SensorStillCaptureFrameRate=10;
pSensorInfo->SensorWebCamCaptureFrameRate=15;
pSensorInfo->SensorResetActiveHigh=FALSE;
pSensorInfo->SensorResetDelayCount=1;
pSensorInfo->SensorOutputDataFormat=SENSOR_OUTPUT_FORMAT_YUYV;
pSensorInfo->SensorClockPolarity=SENSOR_CLOCK_POLARITY_LOW; /*??? */
pSensorInfo->SensorClockFallingPolarity=SENSOR_CLOCK_POLARITY_LOW;
pSensorInfo->SensorHsyncPolarity = SENSOR_CLOCK_POLARITY_LOW;
pSensorInfo->SensorVsyncPolarity = SENSOR_CLOCK_POLARITY_LOW;
pSensorInfo->SensorInterruptDelayLines = 1;
pSensorInfo->CaptureDelayFrame = 4;
pSensorInfo->PreviewDelayFrame = 1; // 2 betty
pSensorInfo->VideoDelayFrame = 0;
pSensorInfo->SensorMasterClockSwitch = 0;
pSensorInfo->SensorDrivingCurrent = ISP_DRIVING_6MA;
pSensorInfo->SensroInterfaceType=SENSOR_INTERFACE_TYPE_PARALLEL;
SENSORDB("GC2035GetInfo\r\n");
switch (ScenarioId)
{
case MSDK_SCENARIO_ID_CAMERA_PREVIEW:
case MSDK_SCENARIO_ID_VIDEO_PREVIEW:
pSensorInfo->SensorClockFreq=22;
pSensorInfo->SensorClockDividCount=3;
pSensorInfo->SensorClockRisingCount= 0;
pSensorInfo->SensorClockFallingCount= 2;
pSensorInfo->SensorPixelClockCount= 3;
pSensorInfo->SensorDataLatchCount= 2;
pSensorInfo->SensorGrabStartX = 2;
pSensorInfo->SensorGrabStartY = 2;
break;
case MSDK_SCENARIO_ID_CAMERA_CAPTURE_JPEG:
pSensorInfo->SensorClockFreq=22;
pSensorInfo->SensorClockDividCount=3;
pSensorInfo->SensorClockRisingCount= 0;
pSensorInfo->SensorClockFallingCount= 2;
pSensorInfo->SensorPixelClockCount= 3;
pSensorInfo->SensorDataLatchCount= 2;
pSensorInfo->SensorGrabStartX = 2;
pSensorInfo->SensorGrabStartY = 2;
break;
default:
pSensorInfo->SensorClockFreq=22;
pSensorInfo->SensorClockDividCount=3;
pSensorInfo->SensorClockRisingCount=0;
pSensorInfo->SensorClockFallingCount=2;
pSensorInfo->SensorPixelClockCount=3;
pSensorInfo->SensorDataLatchCount=2;
pSensorInfo->SensorGrabStartX = 2;
pSensorInfo->SensorGrabStartY = 2;
break;
}
memcpy(pSensorConfigData, &GC2035SensorConfigData, sizeof(MSDK_SENSOR_CONFIG_STRUCT));
return ERROR_NONE;
}
上面的函數一共傳遞進來了3個變量,第一個變量:是控制camera的工作模式,(拍照、攝像等等)第2個參數:主要設置imagesensor的頻率的(時鐘頻率、預覽頻率、以及同步頻率);第3個參數同樣也是camera的設置,其實要看到底是在干嘛,只要看看這個參數是如何定義的就可以了。
GC2035GetResolution:
UINT32 GC2035GetResolution(MSDK_SENSOR_RESOLUTION_INFO_STRUCT *pSensorResolution)
{
SENSORDB("GC2035GetResolution\r\n");
pSensorResolution->SensorFullWidth=GC2035_IMAGE_SENSOR_FULL_WIDTH - 2 * IMAGE_SENSOR_START_GRAB_X;
pSensorResolution->SensorFullHeight=GC2035_IMAGE_SENSOR_FULL_HEIGHT - 2 * IMAGE_SENSOR_START_GRAB_Y;
pSensorResolution->SensorPreviewWidth=GC2035_IMAGE_SENSOR_PV_WIDTH - 2 * IMAGE_SENSOR_START_GRAB_X;
pSensorResolution->SensorPreviewHeight=GC2035_IMAGE_SENSOR_PV_HEIGHT - 2 * IMAGE_SENSOR_START_GRAB_Y;
pSensorResolution->SensorVideoWidth=GC2035_IMAGE_SENSOR_PV_WIDTH - 2 * IMAGE_SENSOR_START_GRAB_X;
pSensorResolution->SensorVideoHeight=GC2035_IMAGE_SENSOR_PV_HEIGHT - 2 * IMAGE_SENSOR_START_GRAB_Y;
return ERROR_NONE;
}
設置camera在預覽模式下的高度、寬度等;
GC2035FeatureControl:
UINT32 GC2035FeatureControl(MSDK_SENSOR_FEATURE_ENUM FeatureId,
UINT8 *pFeaturePara,UINT32 *pFeatureParaLen)
{
UINT16 *pFeatureReturnPara16=(UINT16 *) pFeaturePara;
UINT16 *pFeatureData16=(UINT16 *) pFeaturePara;
UINT32 *pFeatureReturnPara32=(UINT32 *) pFeaturePara;
UINT32 *pFeatureData32=(UINT32 *) pFeaturePara;
MSDK_SENSOR_CONFIG_STRUCT *pSensorConfigData=(MSDK_SENSOR_CONFIG_STRUCT *) pFeaturePara;
MSDK_SENSOR_REG_INFO_STRUCT *pSensorRegData=(MSDK_SENSOR_REG_INFO_STRUCT *) pFeaturePara;
switch (FeatureId)
{
case SENSOR_FEATURE_GET_RESOLUTION:
*pFeatureReturnPara16++=GC2035_IMAGE_SENSOR_FULL_WIDTH;
*pFeatureReturnPara16=GC2035_IMAGE_SENSOR_FULL_HEIGHT;
*pFeatureParaLen=4;
break;
case SENSOR_FEATURE_GET_PERIOD:
*pFeatureReturnPara16++=GC2035_IMAGE_SENSOR_PV_WIDTH;
*pFeatureReturnPara16=GC2035_IMAGE_SENSOR_PV_HEIGHT;
*pFeatureParaLen=4;
break;
case SENSOR_FEATURE_GET_PIXEL_CLOCK_FREQ:
//*pFeatureReturnPara32 = GC2035_sensor_pclk/10;
*pFeatureParaLen=4;
break;
case SENSOR_FEATURE_SET_ESHUTTER:
break;
case SENSOR_FEATURE_SET_NIGHTMODE:
GC2035_night_mode((BOOL) *pFeatureData16);
break;
case SENSOR_FEATURE_SET_GAIN:
case SENSOR_FEATURE_SET_FLASHLIGHT:
break;
case SENSOR_FEATURE_SET_ISP_MASTER_CLOCK_FREQ:
GC2035_isp_master_clock=*pFeatureData32;
break;
case SENSOR_FEATURE_SET_REGISTER:
GC2035_write_cmos_sensor(pSensorRegData->RegAddr, pSensorRegData->RegData);
break;
case SENSOR_FEATURE_GET_REGISTER:
pSensorRegData->RegData = GC2035_read_cmos_sensor(pSensorRegData->RegAddr);
break;
case SENSOR_FEATURE_GET_CONFIG_PARA:
memcpy(pSensorConfigData, &GC2035SensorConfigData, sizeof(MSDK_SENSOR_CONFIG_STRUCT));
*pFeatureParaLen=sizeof(MSDK_SENSOR_CONFIG_STRUCT);
break;
case SENSOR_FEATURE_SET_CCT_REGISTER:
case SENSOR_FEATURE_GET_CCT_REGISTER:
case SENSOR_FEATURE_SET_ENG_REGISTER:
case SENSOR_FEATURE_GET_ENG_REGISTER:
case SENSOR_FEATURE_GET_REGISTER_DEFAULT:
case SENSOR_FEATURE_CAMERA_PARA_TO_SENSOR:
case SENSOR_FEATURE_SENSOR_TO_CAMERA_PARA:
case SENSOR_FEATURE_GET_GROUP_INFO:
case SENSOR_FEATURE_GET_ITEM_INFO:
case SENSOR_FEATURE_SET_ITEM_INFO:
case SENSOR_FEATURE_GET_ENG_INFO:
break;
case SENSOR_FEATURE_GET_GROUP_COUNT:
*pFeatureReturnPara32++=0;
*pFeatureParaLen=4;
break;
case SENSOR_FEATURE_GET_LENS_DRIVER_ID:
// get the lens driver ID from EEPROM or just return LENS_DRIVER_ID_DO_NOT_CARE
// if EEPROM does not exist in camera module.
*pFeatureReturnPara32=LENS_DRIVER_ID_DO_NOT_CARE;
*pFeatureParaLen=4;
break;
case SENSOR_FEATURE_CHECK_SENSOR_ID:
GC2035_GetSensorID(pFeatureData32);
break;
case SENSOR_FEATURE_SET_YUV_CMD:
//printk("GC2035 YUV sensor Setting:%d, %d \n", *pFeatureData32, *(pFeatureData32+1));
GC2035YUVSensorSetting((FEATURE_ID)*pFeatureData32, *(pFeatureData32+1));
break;
case SENSOR_FEATURE_SET_VIDEO_MODE:
GC2035YUVSetVideoMode(*pFeatureData16);
break;
default:
break;
}
return ERROR_NONE;
}
這個是上層會提供featureid,底層通過這個id進行不同case的執行為para和paralen賦值:
GC2035Control:
UINT32 GC2035Control(MSDK_SCENARIO_ID_ENUM ScenarioId, MSDK_SENSOR_EXPOSURE_WINDOW_STRUCT *pImageWindow,
MSDK_SENSOR_CONFIG_STRUCT *pSensorConfigData)
{
switch (ScenarioId)
{
case MSDK_SCENARIO_ID_CAMERA_PREVIEW:
case MSDK_SCENARIO_ID_VIDEO_PREVIEW:
GC2035_sensor_cap_zsd = KAL_FALSE;
GC2035Preview(pImageWindow, pSensorConfigData);
break;
case MSDK_SCENARIO_ID_CAMERA_CAPTURE_JPEG:
GC2035_sensor_cap_zsd = KAL_FALSE;
GC2035Capture(pImageWindow, pSensorConfigData);
break;
case MSDK_SCENARIO_ID_CAMERA_ZSD:
GC2035_sensor_cap_zsd = KAL_TRUE;
GC2035Capture(pImageWindow, pSensorConfigData);
break;
default:
break;
}
return TRUE;
}
這個函數和上面一樣,也是提供控制的一個Interface。
GC2035Close:
UINT32 GC2035Close(void)
{
// CISModulePowerOn(FALSE);
SENSORDB("GC2035Close\r\n");
return ERROR_NONE;
}
這裡close沒有實現任何事情。
ES文件浏覽器連接電腦顯示登錄失敗
ES文件浏覽器連接電腦顯示登錄失敗。之前在連著電腦看著電視,不知道手機怎麼了,現在連接不上,還顯示登錄失敗。現在不知道怎麼辦。小編來給大家演示一下,當ES浏
Android開發仿映客送禮物效果
這裡寫鏈接內容仿映客送小禮物的特效,順便復習一下屬性動畫,話不多說先看效果圖。需求分析可以看到整個動畫有幾部分組成,那我們就把每個部分拆分出來各個擊破。1.要顯示那些內容
Android Accessibility使用及原理簡介
Accessibility是Android從API 4開始提供的一個功能,它主要目的是幫助一些因為有視覺,聽覺,身體障礙而無法完全使用觸摸屏或鈴聲等的用戶來使用Andro
融雲如何實現文件發送(高級進階)
干貨來啦~! 想在聊天中發 小視頻?gif 動圖? 發紅包? 發 自定義表情? 沒有問題!在融雲統統都可以實現! 以上不管是 小視頻 還是 gif 還是 紅包 或者是 自