android13/hardware/rockchip/camera/common/platformdata/PlatformData.cpp

2191 lines
72 KiB
C++

/*
* Copyright (C) 2013-2017 Intel Corporation
* Copyright (c) 2017, Fuzhou Rockchip Electronics Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "PlatformData"
#include "LogHelper.h"
#include "PlatformData.h"
#include "CameraProfiles.h"
#include "CameraMetadataHelper.h"
#include "v4l2dev/v4l2device.h"
#include "UtilityMacros.h"
#include "ChromeCameraProfiles.h"
#include <linux/media.h> // media controller
#include <linux/kdev_t.h> // MAJOR(), MINOR()
#include <string>
#include <sstream>
#include <sys/stat.h>
#include <fstream>
#include <CameraMetadata.h>
#include <rkisp_control_loop.h>
#ifdef CAMERA_RKISP2_SUPPORT
#include "RKISP2CameraCapInfo.h"
#else
#include "RKISP1CameraCapInfo.h"
#endif
// TODO this should come from the crl header file
// crl is a common code module in sensor driver, which contains
// basic functions for driver control
#define CRL_CID_SENSOR_MODE 0x00982954
#include "MediaController.h"
#include "MediaEntity.h"
USING_METADATA_NAMESPACE;
NAMESPACE_DECLARATION {
using std::string;
#define KERNEL_MODULE_LOAD_DELAY 200000
#ifdef MEDIA_CTRL_INIT_DELAYED
#define RETRY_COUNTER 20
#else
#define RETRY_COUNTER 0
#endif
#define FORCE_DISABLE_ISP_RAW_PATH 1
bool PlatformData::mInitialized = false;
CameraProfiles* PlatformData::mInstance = nullptr;
CameraHWInfo* PlatformData::mCameraHWInfo = nullptr;
GcssKeyMap* PlatformData::mGcssKeyMap = nullptr;
CameraMetadata staticMeta;
/**
* Sensor drivers have been registered to media controller
* or to the main device (old style driver)
* The actual main device or media controller is detected at runtime
* (see CameraDetect)
* These are the default values in case is not detected correctly
*/
static const char *DEFAULT_MAIN_DEVICE = "/dev/video0";
/**
* The /var/cache/camera directory hosts the device property file required to
* fill in the tuning cache files generated by the camera HAL at runtime
*/
static const char *CAMERA_CACHE_DIR = "/var/cache/camera/";
/**
* Property file defines product name and manufactory info
* Used for EXIF header of JPEG
* Format: key=value in each line
*/
static const char *CAMERA_PROPERTY_FILE = "camera.prop";
GcssKeyMap::GcssKeyMap()
{
#define GCSS_KEY(key, str) std::make_pair(#str, GCSS_KEY_##key),
#define GCSS_KEY_SECTION_START(key, str, val) GCSS_KEY(key, str)
mMap = {
#include "gcss_keys.h"
};
#undef GCSS_KEY
#undef GCSS_KEY_SECTION_START
}
GcssKeyMap::~GcssKeyMap()
{
}
void GcssKeyMap::gcssKeyMapInsert(std::map<std::string, ia_uid> &customMap)
{
mMap.insert(customMap.begin(), customMap.end());
}
int GcssKeyMap::gcssKeyMapSize()
{
return mMap.size();
}
const char* GcssKeyMap::key2str(const ia_uid key)
{
std::map<std::string, ia_uid>::const_iterator it = mMap.begin();
for (;it != mMap.end(); ++it)
if (it->second == key)
return it->first.c_str();
return mMap.begin()->first.c_str();
}
ia_uid GcssKeyMap::str2key(const std::string &key_str)
{
auto f = mMap.find(key_str);
if (f != mMap.end())
return f->second;
return GCSS_KEY_NA;
}
int resolution_array[][2] =
{
{4096, 3072},
{3264, 2448},
{2688, 1512},
{2592, 1944},
{2096, 1568},
{1920, 1080},
{1632, 1224},
{1600, 1200},
{1296, 972},
{1280, 960},
{1280, 720},
{800, 600},
{640, 480},
{640, 360},
{352, 288},
{320, 240},
{176, 144}
};
int format_array[] =
{
ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED
};
static int construct_stream_config_metadata(
rkisp_metadata_info_t *pMetaInfo,
camera_metadata *metadata)
{
int32_t index = 0;
int32_t config_buf[256];
int format_size = sizeof(format_array) / sizeof(int);
int res_size = sizeof(resolution_array) / sizeof(int) / 2;
for(int i = 0; i < format_size; i++){
for(int j = 0; j < res_size; j++){
if(pMetaInfo->full_size.width >= resolution_array[j][0] &&
pMetaInfo->full_size.height>= resolution_array[j][1]){
config_buf[index++] = format_array[i];
config_buf[index++] = resolution_array[j][0];
config_buf[index++] = resolution_array[j][1];
config_buf[index++] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
}
}
}
LOGD("*******STREAM CONFIG OUTPUT START*****");
for(int k = 0; k < index; k += 4){
LOGD("%d %d %d %d", config_buf[k], config_buf[k+1], config_buf[k+2], config_buf[k+3]);
}
LOGD("*******STREAM CONFIG OUTPUT END*****");
MetadataHelper::updateMetadata(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, config_buf, index);
return 0;
}
static int construct_frame_duration_metadata(
rkisp_metadata_info_t *pMetaInfo,
camera_metadata *metadata)
{
int32_t index = 0;
int64_t config_buf[256];
int64_t bin_fram_duration, full_fram_duration;
int format_size = sizeof(format_array) / sizeof(int);
int res_size = sizeof(resolution_array) / sizeof(int) / 2;
if(pMetaInfo->res_num == 1){
full_fram_duration = (int64_t)((1000.0/pMetaInfo->full_size.fps)*1e6);
}else if(pMetaInfo->res_num == 2){
full_fram_duration = (int64_t)((1000.0/pMetaInfo->full_size.fps)*1e6);
bin_fram_duration = (int64_t)((1000.0/pMetaInfo->binning_size.fps)*1e6);
}
for(int i = 0; i < format_size; i++){
for(int j = 0; j < res_size; j++){
if(pMetaInfo->binning_size.width >= resolution_array[j][0] &&
pMetaInfo->binning_size.height>= resolution_array[j][1]){
config_buf[index++] = format_array[i];
config_buf[index++] = resolution_array[j][0];
config_buf[index++] = resolution_array[j][1];
config_buf[index++] = bin_fram_duration;
}else if(pMetaInfo->full_size.width >= resolution_array[j][0] &&
pMetaInfo->full_size.height>= resolution_array[j][1]){
config_buf[index++] = format_array[i];
config_buf[index++] = resolution_array[j][0];
config_buf[index++] = resolution_array[j][1];
config_buf[index++] = full_fram_duration;
}
}
}
LOGD("*******FRAME DURATION CONFIG OUTPUT START*****");
for(int k = 0; k < index; k += 4){
LOGD("%lld %lld %lld %lld", config_buf[k], config_buf[k+1], config_buf[k+2], config_buf[k+3]);
}
LOGD("*******FRAME DURATION CONFIG OUTPUT END*****");
MetadataHelper::updateMetadata(metadata, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, config_buf, index);
index = 0;
for(int j = 0; j < res_size; j++){
if(pMetaInfo->binning_size.width >= resolution_array[j][0] &&
pMetaInfo->binning_size.height>= resolution_array[j][1]){
config_buf[index++] = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB;
config_buf[index++] = resolution_array[j][0];
config_buf[index++] = resolution_array[j][1];
config_buf[index++] = bin_fram_duration;
}else if(pMetaInfo->full_size.width >= resolution_array[j][0] &&
pMetaInfo->full_size.height>= resolution_array[j][1]){
config_buf[index++] = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB;
config_buf[index++] = resolution_array[j][0];
config_buf[index++] = resolution_array[j][1];
config_buf[index++] = full_fram_duration;
}
}
LOGD("*******STALL DURATION CONFIG OUTPUT START*****");
for(int k = 0; k < index; k += 4){
LOGD("%lld %lld %lld %lld", config_buf[k], config_buf[k+1], config_buf[k+2], config_buf[k+3]);
}
LOGD("*******STALL DURATION CONFIG OUTPUT END*****");
MetadataHelper::updateMetadata(metadata, ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, config_buf, index);
return 0;
}
static int construct_raw_sensor_fps_range_metadata(
rkisp_metadata_info_t *pMetaInfo,
camera_metadata *metadata)
{
int config_buf[256], index = 0;
int iq_min_fps, dyn_min_fps, full_size_fps, binning_size_fps;
full_size_fps = pMetaInfo->full_size.fps;
binning_size_fps = pMetaInfo->binning_size.fps;
iq_min_fps = 1.0 / pMetaInfo->time_range[1];
dyn_min_fps = iq_min_fps < full_size_fps ? iq_min_fps : full_size_fps;
if(pMetaInfo->res_num == 1){
if(full_size_fps < 24)
{
LOGE("Error: size (%dx%d) max fps should be lager than 24, now is %d.",
pMetaInfo->full_size.width, pMetaInfo->full_size.height, full_size_fps);
return -1;
}
config_buf[index++] = dyn_min_fps < 15 ? dyn_min_fps : 15;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
}else if(pMetaInfo->res_num == 2){
if(full_size_fps > binning_size_fps)
{
LOGE("Error: binning size(%dx%d) fps(%d) should be lager than full size(%dx%d) fps (%d)!",
pMetaInfo->binning_size.width, pMetaInfo->binning_size.height, binning_size_fps,
pMetaInfo->full_size.width, pMetaInfo->full_size.height, full_size_fps);
return -1;
}
if(binning_size_fps < 24)
{
LOGE("Error: binning size (%dx%d) max fps should be lager than 24, now is %d.",
pMetaInfo->binning_size.width, pMetaInfo->binning_size.height, binning_size_fps);
return -1;
}
if (full_size_fps == binning_size_fps) {
config_buf[index++] = dyn_min_fps < 15 ? dyn_min_fps : 15;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
}else {
if (full_size_fps < 15){
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = dyn_min_fps;
config_buf[index++] = binning_size_fps;
config_buf[index++] = binning_size_fps;
config_buf[index++] = binning_size_fps;
}else{
config_buf[index++] = 15;
config_buf[index++] = 15;
config_buf[index++] = dyn_min_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = dyn_min_fps;
config_buf[index++] = binning_size_fps;
config_buf[index++] = binning_size_fps;
config_buf[index++] = binning_size_fps;
}
}
}
LOGD("*******FPS RANGE CONFIG OUTPUT START*****");
for(int k = 0; k < index; k += 2){
LOGD("[%d %d]", config_buf[k], config_buf[k+1]);
}
LOGD("*******FPS RANGE CONFIG OUTPUT END*****");
MetadataHelper::updateMetadata(metadata, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, config_buf, index);
return 0;
}
static int construct_soc_sensor_fps_range_metadata(
rkisp_metadata_info_t *pMetaInfo,
camera_metadata *metadata)
{
int config_buf[256], index = 0;
int iq_min_fps, dyn_min_fps, full_size_fps, binning_size_fps;
full_size_fps = pMetaInfo->full_size.fps;
binning_size_fps = pMetaInfo->binning_size.fps;
if(pMetaInfo->res_num == 1){
if(full_size_fps < 24)
{
LOGE("Error: size (%dx%d) max fps should be lager than 24, now is %d.",
pMetaInfo->full_size.width, pMetaInfo->full_size.height, full_size_fps);
return -1;
}
config_buf[index++] = 15;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
}else if(pMetaInfo->res_num == 2){
if(full_size_fps > binning_size_fps)
{
LOGE("Error: binning size(%dx%d) fps(%d) should be lager than full size(%dx%d) fps (%d)!",
pMetaInfo->binning_size.width, pMetaInfo->binning_size.height, binning_size_fps,
pMetaInfo->full_size.width, pMetaInfo->full_size.height, full_size_fps);
return -1;
}
if(binning_size_fps < 24)
{
LOGE("Error: binning size (%dx%d) max fps should be lager than 24, now is %d.",
pMetaInfo->binning_size.width, pMetaInfo->binning_size.height, binning_size_fps);
return -1;
}
if (full_size_fps == binning_size_fps) {
config_buf[index++] = 15;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
}else {
if (full_size_fps < 15){
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = binning_size_fps;
config_buf[index++] = binning_size_fps;
config_buf[index++] = binning_size_fps;
}else{
config_buf[index++] = 15;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = full_size_fps;
config_buf[index++] = binning_size_fps;
config_buf[index++] = binning_size_fps;
config_buf[index++] = binning_size_fps;
}
}
}
LOGD("*******FPS RANGE CONFIG OUTPUT START*****");
for(int k = 0; k < index; k += 2){
LOGD("[%d %d]", config_buf[k], config_buf[k+1]);
}
LOGD("*******FPS RANGE CONFIG OUTPUT END*****");
MetadataHelper::updateMetadata(metadata, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, config_buf, index);
return 0;
}
#define MAX_THUMBSIZE_W (320)
#define MAX_THUMBSIZE_H (240)
int thumbnail_array[][2] =
{
{0, 0},
{128, 96},
{160, 120},
{240, 135},
{240, 180},
{320, 180},
{MAX_THUMBSIZE_W, MAX_THUMBSIZE_H}
};
static int construct_thumbnail_sizes_metadata(
rkisp_metadata_info_t *pMetaInfo,
camera_metadata *metadata)
{
int config_buf[64], index = 0;
int pic_width, pic_height;
int array_size, max_w, max_h, new_h;
float ratio;
array_size = sizeof(resolution_array) / sizeof(int) / 2;
pic_width = pMetaInfo->full_size.width;
pic_height = pMetaInfo->full_size.height;
for(int i = 0; i < array_size; i++){
if ((pMetaInfo->full_size.width >= resolution_array[i][0]) &&
(pMetaInfo->full_size.height >= resolution_array[i][1])){
pic_width = resolution_array[i][0];
pic_height = resolution_array[i][1];
}
break;
}
ratio = (float)pic_width / (float)pic_height;
max_h = MAX_THUMBSIZE_W / ratio;
if (max_h > MAX_THUMBSIZE_H)
max_h = MAX_THUMBSIZE_H;
max_w = max_h * ratio;
array_size = sizeof(thumbnail_array) / sizeof(int) / 2;
for(int i = 0; i < array_size; i++){
if ((thumbnail_array[i][0] * thumbnail_array[i][1]) < (max_w * max_h)) {
config_buf[index++] = thumbnail_array[i][0];
config_buf[index++] = thumbnail_array[i][1];
}else if(((thumbnail_array[i][0] * thumbnail_array[i][1]) ==
(max_w * max_h)) && (thumbnail_array[i][0] <= max_w)){
config_buf[index++] = thumbnail_array[i][0];
config_buf[index++] = thumbnail_array[i][1];
}
}
LOGD("*******THUMBNAIL SIZE CONFIG OUTPUT START*****");
for(int i = 0; i < index; i += 2){
LOGD("[%d %d]", config_buf[i], config_buf[i+1]);
}
LOGD("*******THUMBNAIL SIZE CONFIG OUTPUT END*****");
MetadataHelper::updateMetadata(metadata, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, config_buf, index);
return 0;
}
static void modify_raw_sensor_metadata
(
rkisp_metadata_info_t* metadata_info,
camera_metadata *staticMeta
)
{
int gain_range[2];
int64_t time_range[2];
gain_range[0] = metadata_info->gain_range[0] * 100;
gain_range[1] = metadata_info->gain_range[1] * 100;
LOGD("GAIN RANGE: [%d %d]", gain_range[0], gain_range[1]);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, gain_range, 2);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, gain_range, 2);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, &gain_range[1], 1);
time_range[0] = 100000;//metadata_info->time_range[0] * 1e9;
time_range[1] = 200000000;//metadata_info->time_range[1] * 1e9;
LOGD("TIME RANGE: [%lld %lld]", time_range[0], time_range[1]);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, time_range, 2);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &time_range[1], 1);
int array_size[4];
array_size[0] = 0;
array_size[1] = 0;
array_size[2] = metadata_info->full_size.width;
array_size[3] = metadata_info->full_size.height;
LOGD("ARRAY SIZE: [%d %d %d %d]", array_size[0], array_size[1], array_size[2], array_size[3]);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, array_size, 4);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, &array_size[2], 2);
int jpegmaxsize = metadata_info->full_size.width * metadata_info->full_size.height * 3 / 2;
MetadataHelper::updateMetadata(staticMeta, ANDROID_JPEG_MAX_SIZE, &jpegmaxsize, 1);
construct_thumbnail_sizes_metadata(metadata_info, staticMeta);
construct_stream_config_metadata(metadata_info, staticMeta);
construct_frame_duration_metadata(metadata_info, staticMeta);
construct_raw_sensor_fps_range_metadata(metadata_info, staticMeta);
}
static void modify_soc_sensor_metadata
(
rkisp_metadata_info_t* metadata_info,
camera_metadata *staticMeta)
{
int gain_range[2] = {100, 1600};
int64_t time_range[2] = {100000,666666666};
LOGD("GAIN RANGE: [%d %d]", gain_range[0], gain_range[1]);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, gain_range, 2);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, &gain_range[1], 1);
LOGD("TIME RANGE: [%lld %lld]", time_range[0], time_range[1]);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, time_range, 2);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &time_range[1], 1);
int array_size[4];
array_size[0] = 0;
array_size[1] = 0;
array_size[2] = metadata_info->full_size.width;
array_size[3] = metadata_info->full_size.height;
LOGD("ARRAY SIZE: [%d %d %d %d]", array_size[0], array_size[1], array_size[2], array_size[3]);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, array_size, 4);
MetadataHelper::updateMetadata(staticMeta, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, &array_size[2], 2);
int jpegmaxsize = metadata_info->full_size.width * metadata_info->full_size.height * 3 / 2;
MetadataHelper::updateMetadata(staticMeta, ANDROID_JPEG_MAX_SIZE, &jpegmaxsize, 1);
construct_thumbnail_sizes_metadata(metadata_info, staticMeta);
construct_stream_config_metadata(metadata_info, staticMeta);
construct_frame_duration_metadata(metadata_info, staticMeta);
construct_soc_sensor_fps_range_metadata(metadata_info, staticMeta);
}
/**
* This method is only called once when the HAL library is loaded
*
* At this time we can load the XML config (camera3_prfiles.xml).
*/
void PlatformData::init()
{
LOGI("@%s", __FUNCTION__);
if (mGcssKeyMap) {
delete mGcssKeyMap;
mGcssKeyMap = nullptr;
}
mGcssKeyMap = new GcssKeyMap;
if (mCameraHWInfo) {
delete mCameraHWInfo;
mCameraHWInfo = nullptr;
}
mCameraHWInfo = new CameraHWInfo();
if (mInstance) {
delete mInstance;
mInstance = nullptr;
}
mInstance = new ChromeCameraProfiles(mCameraHWInfo);
int ret = mInstance->init();
if (ret != OK) {
LOGE("Failed to initialize Camera profiles");
deinit();
return;
}
mCameraHWInfo->initAvailableSensorOutputFormats();
int numberOfCameras = PlatformData::numberOfCameras();
if (numberOfCameras == 0 || numberOfCameras > MAX_CPF_CACHED) {
LOGE("Camera HAL Basic Platform initialization failed !!number of camera: %d", numberOfCameras);
deinit();
return;
}
rkisp_metadata_info_t *metadata_info = NULL;
int num = 0;
ret = rkisp_construct_iq_default_metadatas(&metadata_info, &num);
if (ret < 0) {
LOGE("Fail to construct iq default metadatas!");
deinit();
return;
}
if (num <= 0) {
LOGE("construct default metadata count: %d", num);
deinit();
return;
}
/**
* This number currently comes from the number if sections in the XML
* in the future this is not reliable if we want to have multiple cameras
* supported in the same XML.
* TODO: add a common field in the XML that lists the camera's OR
* query from driver at runtime
*/
for (int i = 0; i < numberOfCameras; i++) {
const CameraCapInfo *cci = PlatformData::getCameraCapInfo(i);
if (cci == nullptr)
continue;
mCameraHWInfo->setMultiCameraMode(i);
if(cci->getForceAutoGenAndroidMetas()) {
const struct SensorDriverDescriptor *pDesc;
pDesc = mCameraHWInfo->getSensorDrvDes(i);
if(NULL == pDesc)
continue;
std::string entity_name = pDesc->mModuleIndexStr + "_" + pDesc->mPhyModuleOrient + "_" + pDesc->mModuleRealSensorName;
for(int j = 0; j < num; j++)
{
if(strstr(metadata_info[j].entity_name, entity_name.c_str()))
{
if (cci->sensorType() == SENSOR_TYPE_RAW) {
std::vector<struct FrameSize_t> tuningSize;
struct FrameSize_t frameSize;
#ifdef CAMERA_RKISP2_SUPPORT
rkisp2::RKISP2CameraCapInfo *capInfo = const_cast<rkisp2::RKISP2CameraCapInfo *>(static_cast<const rkisp2::RKISP2CameraCapInfo*>(cci));
#else
RKISP1CameraCapInfo *capInfo = const_cast<RKISP1CameraCapInfo *>(static_cast<const RKISP1CameraCapInfo*>(cci));
#endif
frameSize.width = metadata_info[j].full_size.width;
frameSize.height = metadata_info[j].full_size.height;
tuningSize.push_back(frameSize);
frameSize.width = metadata_info[j].binning_size.width;
frameSize.height = metadata_info[j].binning_size.height;
tuningSize.push_back(frameSize);
capInfo->mSupportTuningSize = tuningSize;
modify_raw_sensor_metadata(&metadata_info[j], getStaticMetadata(i));
}else if(cci->sensorType() == SENSOR_TYPE_SOC){
modify_soc_sensor_metadata(&metadata_info[j], getStaticMetadata(i));
}
}
}
}
}
mInitialized = true;
LOGD("Camera HAL static init - Done!");
}
/**
* This method is only called once when the HAL library is unloaded
*/
void PlatformData::deinit() {
LOGI("@%s", __FUNCTION__);
if (mCameraHWInfo) {
delete mCameraHWInfo;
mCameraHWInfo = nullptr;
}
if (mGcssKeyMap) {
delete mGcssKeyMap;
mGcssKeyMap = nullptr;
}
if (mInstance) {
delete mInstance;
mInstance = nullptr;
}
mInitialized = false;
}
/**
* static acces method to implement the singleton
* mInstance should have been instantiated when the library loaded.
* Having nullptr is a serious error
*/
CameraProfiles *PlatformData::getInstance(void)
{
if (mInstance == nullptr) {
LOGE("@%s Failed to create CameraProfiles instance", __FUNCTION__);
return nullptr;
}
return mInstance;
}
GcssKeyMap* PlatformData::getGcssKeyMap()
{
return mGcssKeyMap;
}
int PlatformData::numberOfCameras(void)
{
CameraProfiles * i = getInstance();
if (!i)
return 0;
int num = (int)i->mStaticMeta.size();
return (num <= MAX_CAMERAS) ? num : MAX_CAMERAS;
}
int PlatformData::getXmlCameraId(int cameraId)
{
CameraProfiles * i = getInstance();
if (!i)
return -1;
return (int)i->getXmlCameraId(cameraId);
}
const CameraCapInfo *PlatformData::getCameraCapInfoForXmlCameraId(int xmlCameraId)
{
CameraProfiles * i = getInstance();
if (!i)
return nullptr;
return i->getCameraCapInfoForXmlCameraId(xmlCameraId);
}
void appendTags(CameraMetadata &meta,int32_t tag,int32_t insert){
auto metaTags = meta.find(tag);
std::vector<int32_t> supportedMetaTags;
supportedMetaTags.reserve(metaTags.count + 1);
supportedMetaTags.insert(supportedMetaTags.end(), metaTags.data.i32,
metaTags.data.i32 + metaTags.count);
supportedMetaTags.push_back(insert);
meta.update(tag, supportedMetaTags.data(),
supportedMetaTags.size());
}
void PlatformData::getCameraInfo(int cameraId, struct camera_info * info)
{
info->facing = facing(cameraId);
info->orientation = orientation(cameraId);
info->device_version = getCameraDeviceAPIVersion();
#ifdef CAMERA_RKISP2_SUPPORT
rkisp2::RKISP2CameraCapInfo* cap = (rkisp2::RKISP2CameraCapInfo *)getCameraCapInfo(cameraId);
staticMeta = getStaticMetadata(cameraId);
if (cap->sensorType() == SENSOR_TYPE_RAW ){
uint8_t mode = 1;
staticMeta.update(RK_NR_FEATURE_3DNR_MODE,&mode,1);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,RK_NR_FEATURE_3DNR_MODE);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,RK_NR_FEATURE_3DNR_MODE);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,RK_NR_FEATURE_3DNR_MODE);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,RK_CONTROL_AIQ_BRIGHTNESS);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,RK_CONTROL_AIQ_BRIGHTNESS);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,RK_CONTROL_AIQ_BRIGHTNESS);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,RK_CONTROL_AIQ_CONTRAST);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,RK_CONTROL_AIQ_CONTRAST);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,RK_CONTROL_AIQ_CONTRAST);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,RK_CONTROL_AIQ_SATURATION);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,RK_CONTROL_AIQ_SATURATION);
appendTags(staticMeta,ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,RK_CONTROL_AIQ_SATURATION);
}
info->static_camera_characteristics = staticMeta.getAndLock();
staticMeta.unlock( info->static_camera_characteristics);
#else
info->static_camera_characteristics = getStaticMetadata(cameraId);
#endif
//For now assume both cameras can operate independently.
//simultaneously open multiple camera may be influenced, please check
//hardware/libhardware/include/hardware/camera_common.h for detail
info->conflicting_devices = NULL;
info->conflicting_devices_length = 0;
info->resource_cost = 50;
LOGI("camera %d resource cost is %d", cameraId,
info->resource_cost);
}
/**
* Function converts the "lens.facing" android static meta data value to
* value needed by camera service
* Camera service uses different values from the android metadata
* Refer system/core/include/system/camera.h
*/
int PlatformData::facing(int cameraId)
{
uint8_t facing;
CameraMetadata staticMeta;
staticMeta = getStaticMetadata(cameraId);
MetadataHelper::getMetadataValue(staticMeta, ANDROID_LENS_FACING, facing);
facing = (facing == FRONT_CAMERA_ID) ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
return facing;
}
int PlatformData::orientation(int cameraId)
{
int orientation;
CameraMetadata staticMeta;
staticMeta = getStaticMetadata(cameraId);
MetadataHelper::getMetadataValue(staticMeta, ANDROID_SENSOR_ORIENTATION, orientation);
return orientation;
}
/**
* Retrieves the partial result count from the static metadata
* This number is the pieces that we return the the result for a single
* capture request. This number is specific to PSL implementations
* It has to be at least 1.
* \param cameraId[IN]: Camera Id that we are querying the value for
* \return value
*/
int PlatformData::getPartialMetadataCount(int cameraId)
{
int partialMetadataCount = 0;
CameraMetadata staticMeta;
staticMeta = getStaticMetadata(cameraId);
MetadataHelper::getMetadataValue(staticMeta,
ANDROID_REQUEST_PARTIAL_RESULT_COUNT, partialMetadataCount);
if (partialMetadataCount <= 0) {
LOGW("Invalid value (%d) for ANDROID_REQUEST_PARTIAL_RESULT_COUNT"
"FIX your config", partialMetadataCount);
partialMetadataCount = 1;
}
return partialMetadataCount;
}
camera_metadata_t * PlatformData::getStaticMetadata(int cameraId)
{
if (cameraId >= numberOfCameras()) {
LOGE("ERROR @%s: Invalid camera: %d", __FUNCTION__, cameraId);
cameraId = 0;
return nullptr;
}
CameraProfiles * i = getInstance();
if (!i)
return nullptr;
camera_metadata_t * metadata = i->mStaticMeta[cameraId];
return metadata;
}
camera_metadata_t *PlatformData::getDefaultMetadata(int cameraId, int requestType)
{
if (cameraId >= numberOfCameras()) {
LOGE("ERROR @%s: Invalid camera: %d", __FUNCTION__, cameraId);
cameraId = 0;
}
CameraProfiles * i = getInstance();
if (!i)
return nullptr;
return i->constructDefaultMetadata(cameraId, requestType);
}
const CameraCapInfo * PlatformData::getCameraCapInfo(int cameraId)
{
// Use MAX_CAMERAS instead of numberOfCameras() as it will cause a recursive loop
if (cameraId > MAX_CAMERAS) {
LOGE("ERROR @%s: Invalid camera: %d", __FUNCTION__, cameraId);
cameraId = 0;
}
CameraProfiles * i = getInstance();
if (!i)
return nullptr;
return i->getCameraCapInfo(cameraId);
}
/**
* getDeviceIds
* returns a vector of strings with a list of names to identify the device
* the HAL is running on.
* The list has the most specific name first and then more generic names as
* fallback
* The values for this strings are:
* - If the platform supports spid the first string is a concatenation of the
* vendor_id + platform_family_id + produc_line_id. This is always the first
* for backwards compatibility reasons.
*
* This list can be used to find the correct configuration file: camera XML
* (camera3_profiles)
*
* Please Note:
* If in non-spid platforms the identifiers in the system properties are not
* precise enough and a new property is used, it should be returned first.
*
*/
status_t PlatformData::getDeviceIds(std::vector<string> &names)
{
char prop[PATH_MAX] = {0};
static const char *deviceIdKeys[] = {
"ro.product.device",
"ro.product.board",
"ro.board.platform",
};
static const int DEVICE_ID_KEYS_COUNT =
(sizeof(deviceIdKeys)/sizeof(deviceIdKeys[0]));
for (int i = 0; i < DEVICE_ID_KEYS_COUNT ; i++) {
if (LogHelper::__getEnviromentValue(deviceIdKeys[i], prop, sizeof(prop)) == 0)
continue;
names.push_back(string(prop));
}
return OK;
}
CameraHwType PlatformData::getCameraHwType(int cameraId)
{
CameraProfiles * i = getInstance();
if (!i)
return SUPPORTED_HW_UNKNOWN;
return i->getCameraHwforId(cameraId);
}
const char* PlatformData::boardName(void)
{
return mCameraHWInfo->boardName();
}
const char* PlatformData::productName(void)
{
return mCameraHWInfo->productName();
}
const char* PlatformData::manufacturerName(void)
{
return mCameraHWInfo->manufacturerName();
}
bool PlatformData::supportDualVideo(void)
{
LOGD("@%s mCameraHWInfo->mSupportDualVideo(%s)", __FUNCTION__, mCameraHWInfo->mSupportDualVideo?"true":"false");
return mCameraHWInfo->supportDualVideo();
}
int PlatformData::getCameraDeviceAPIVersion(void)
{
return mCameraHWInfo->getCameraDeviceAPIVersion();
}
bool PlatformData::supportExtendedMakernote(void)
{
return mCameraHWInfo->supportExtendedMakernote();
}
bool PlatformData::supportFullColorRange(void)
{
return mCameraHWInfo->supportFullColorRange();
}
bool PlatformData::supportIPUAcceleration(void)
{
return mCameraHWInfo->supportIPUAcceleration();
}
unsigned int PlatformData::getNumOfCPUCores()
{
unsigned int cpuCores = 1;
char buf[20];
FILE *cpuOnline = fopen("/sys/devices/system/cpu/online", "r");
if (cpuOnline) {
CLEAR(buf);
size_t size = fread(buf, 1, sizeof(buf), cpuOnline);
if (size != sizeof(buf)) {
LOGW("Failed to read number of CPU's ");
}
buf[sizeof(buf) - 1] = '\0';
char *p = strchr(buf, '-');
if (p)
cpuCores = 1 + atoi(p + 1);
else
cpuCores = 1;
fclose(cpuOnline);
}
LOGI("@%s, line:%d, cpu core number:%d", __FUNCTION__, __LINE__, cpuCores);
return cpuCores;
}
/**
* getActivePixelArray
*
* retrieves the Active Pixel Array (APA) static metadata entry and initializes
* a CameraWindow structure correctly from it.
* APA is defined as a rectangle that stores the values as
* (xmin,ymin, width,height)
*
* \param cameraId [IN]: Camera id of the device to query the APA
* \returns CameraWindow initialized with the APA or empty if it was not found
* but this should not happen
*/
CameraWindow PlatformData::getActivePixelArray(int cameraId)
{
CameraWindow apa;
camera_metadata_ro_entry entry;
CLEAR(apa);
const camera_metadata *staticMeta = getStaticMetadata(cameraId);
if (CC_UNLIKELY(staticMeta == nullptr)) {
LOGE("@%s: Invalid camera id (%d) could not get static metadata",
__FUNCTION__, cameraId);
return apa;
}
find_camera_metadata_ro_entry(staticMeta,
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
&entry);
if (entry.count >= 4) {
ia_coordinate topLeft;
INIT_COORDINATE(topLeft,entry.data.i32[0],entry.data.i32[1]);
apa.init(topLeft,
entry.data.i32[2], //width
entry.data.i32[3], //height
0);
} else {
LOGE("could not find ACTIVE_ARRAY_SIZE- INVALID XML configuration!!");
}
return apa;
}
float PlatformData::getStepEv(int cameraId)
{
// Get the ev step
CameraMetadata staticMeta;
float stepEV = 1 / 3.0f;
int count = 0;
staticMeta = getStaticMetadata(cameraId);
camera_metadata_rational_t* aeCompStep =
(camera_metadata_rational_t*)MetadataHelper::getMetadataValues(staticMeta,
ANDROID_CONTROL_AE_COMPENSATION_STEP,
TYPE_RATIONAL,
&count);
if (count == 1 && aeCompStep != nullptr) {
stepEV = (float)aeCompStep->numerator / aeCompStep->denominator;
}
return stepEV;
}
CameraHWInfo::CameraHWInfo() :
mMainDevicePathName(DEFAULT_MAIN_DEVICE),
mHasMediaController(false)
{
mBoardName = "<not set>";
mProductName = "<not_set>";
mManufacturerName = "<not set>";
mCameraDeviceAPIVersion = CAMERA_DEVICE_API_VERSION_3_3;
mSupportDualVideo = true;
mSupportExtendedMakernote = false;
mSupportFullColorRange = true;
mSupportIPUAcceleration = false;
// -1 means mPreviewHALFormat is not set
mPreviewHALFormat = -1;
CLEAR(mDeviceInfo);
}
status_t CameraHWInfo::init(const std::vector<std::string> &mediaDevicePath)
{
mMediaControllerPathName = mediaDevicePath;
//readProperty();
mMediaCtlElementNames.clear();
getMediaCtlElementNames(mMediaCtlElementNames, true);
return initDriverList();
}
status_t CameraHWInfo::initAvailableSensorOutputFormats(void)
{
status_t ret = OK;
SensorFormat tmpOutputFormats;
LOGI("@%s", __FUNCTION__);
mSensorOutputFormats.clear();
int numberOfCameras = PlatformData::numberOfCameras();
int i = 0;
for (i = 0; i < numberOfCameras; i++) {
getAvailableSensorOutputFormats(i, tmpOutputFormats, true);
mSensorOutputFormats[i] = tmpOutputFormats;
}
LOGI("@%s mSensorOutputFormats.size()(%d)", __FUNCTION__, mSensorOutputFormats.size());
return ret;
}
status_t CameraHWInfo::initDriverList()
{
LOGI("@%s", __FUNCTION__);
status_t ret = OK;
if (mSensorInfo.size() > 0) {
// We only need to go through the drivers once
return OK;
}
// check whether we are in a platform that supports media controller (mc)
// or in one where a main device (md) can enumerate the sensors
struct stat sb;
for (int retryTimes = RETRY_COUNTER; retryTimes >= 0; retryTimes--) {
if (retryTimes > 0) {
// Because module loading is delayed also need to delay HAL initialization
usleep(KERNEL_MODULE_LOAD_DELAY);
}
}
for (auto mcPathName : mMediaControllerPathName) {
int mcExist = stat(mcPathName.c_str(), &sb);
LOGI("mMediaControllerPathName %s\n", mcPathName.c_str());
if (mcExist == 0) {
mHasMediaController = true;
ret = findMediaControllerSensors(mcPathName);
//ret |= findMediaDeviceInfo(mcPathName);
for (auto &it : mSensorInfo)
ret |= findAttachedSubdevs(mcPathName, it);
} else {
LOGE("Could not find sensor names");
ret = NO_INIT;
}
}
for (unsigned i = 0 ;i < mSensorInfo.size(); ++i)
LOGI("@%s, mSensorName:%s, mDeviceName:%s, port:%d", __FUNCTION__,
mSensorInfo[i].mSensorName.c_str(), mSensorInfo[i].mDeviceName.c_str(), mSensorInfo[i].mIspPort);
return ret;
}
status_t CameraHWInfo::readProperty()
{
std::string cameraPropertyPath = std::string(CAMERA_CACHE_DIR) + std::string(CAMERA_PROPERTY_FILE);
std::fstream props(cameraPropertyPath.c_str(), std::ios::in);
if (!props.is_open()) {
LOGW("Failed to load camera property file.");
return UNKNOWN_ERROR;
}
const std::string kManufacturer = "ro.product.manufacturer";
const std::string kModel = "ro.product.model";
const std::string kDelimiter = "=";
std::map<std::string, std::string> properties;
while (!props.eof()) {
size_t pos;
std::string line, key, value;
std::getline(props, line);
pos = line.find(kDelimiter);
if (pos != std::string::npos) {
key = line.substr(0, pos);
value = line.substr(pos + 1);
properties[key] = value;
LOGI("%s, new key,value: %s,%s", __FUNCTION__, key.c_str(), value.c_str());
}
}
if (properties.find(kManufacturer) != properties.end()) {
mManufacturerName = properties[kManufacturer];
}
if (properties.find(kModel) != properties.end()) {
mProductName = properties[kModel];
}
return OK;
}
status_t CameraHWInfo::parseModuleInfo(const std::string &entity_name,
SensorDriverDescriptor &drv_info)
{
status_t ret = OK;
// sensor entity name format SHOULD be like this:
// m00_b_ov13850 1-0010
if (entity_name.empty())
return UNKNOWN_ERROR;
int parse_index = 0;
if (entity_name.at(parse_index) != 'm') {
LOGE("%d:parse sensor entity name %s error at %d, please check sensor driver !",
__LINE__, entity_name.c_str(), parse_index);
return UNKNOWN_ERROR;
}
std::string index_str = entity_name.substr (parse_index, 3);
drv_info.mModuleIndexStr = index_str;
parse_index += 3;
if (entity_name.at(parse_index) != '_') {
LOGE("%d:parse sensor entity name %s error at %d, please check sensor driver !",
__LINE__, entity_name.c_str(), parse_index);
return UNKNOWN_ERROR;
}
parse_index++;
if (entity_name.at(parse_index) != 'b' &&
entity_name.at(parse_index) != 'f') {
LOGE("%d:parse sensor entity name %s error at %d, please check sensor driver !",
__LINE__, entity_name.c_str(), parse_index);
return UNKNOWN_ERROR;
}
drv_info.mPhyModuleOrient = entity_name.at(parse_index);
parse_index++;
if (entity_name.at(parse_index) != '_') {
LOGE("%d:parse sensor entity name %s error at %d, please check sensor driver !",
__LINE__, entity_name.c_str(), parse_index);
return UNKNOWN_ERROR;
}
parse_index++;
std::size_t real_name_end = string::npos;
if ((real_name_end = entity_name.find(' ')) == string::npos) {
LOGE("%d:parse sensor entity name %s error at %d, please check sensor driver !",
__LINE__, entity_name.c_str(), parse_index);
return UNKNOWN_ERROR;
}
std::string real_name_str = entity_name.substr(parse_index, real_name_end - parse_index);
drv_info.mModuleRealSensorName = real_name_str;
LOGD("%s:%d, real sensor name %s, module ori %c, module id %s",
__FUNCTION__, __LINE__, drv_info.mModuleRealSensorName.c_str(),
drv_info.mPhyModuleOrient, drv_info.mModuleIndexStr.c_str());
return ret;
}
status_t CameraHWInfo::findAttachedSubdevs(const std::string &mcPath,
struct SensorDriverDescriptor &drv_info)
{
status_t ret = OK;
int portId = 0;
status_t status = UNKNOWN_ERROR;
std::size_t pos = string::npos;
bool find_lens = false;
unsigned lens_major;
unsigned lens_minor;
int find_flashlight = 0;
unsigned flashlight_major[SENSOR_ATTACHED_FLASH_MAX_NUM];
unsigned flashlight_minor[SENSOR_ATTACHED_FLASH_MAX_NUM];
struct media_entity_desc entity;
std::string last_fl_entity_str;
LOGI("@%s", __FUNCTION__);
int fd = open(mcPath.c_str(), O_RDONLY);
if (fd == -1) {
LOGW("Could not openg media controller device: %s!", strerror(errno));
return ENXIO;
}
CLEAR(entity);
do {
// Go through the list of media controller entities
entity.id |= MEDIA_ENT_ID_FLAG_NEXT;
if (ioctl(fd, MEDIA_IOC_ENUM_ENTITIES, &entity) < 0) {
if (errno == EINVAL) {
// Ending up here when no more entities left.
// Will simply 'break' if everything was ok
if (mSensorInfo.size() == 0) {
// No registered drivers found
LOGE("ERROR no sensor driver registered in media controller!");
ret = NO_INIT;
}
} else {
LOGE("ERROR in browsing media controller entities: %s!", strerror(errno));
ret = FAILED_TRANSACTION;
}
break;
} else {
if (entity.type == MEDIA_ENT_T_V4L2_SUBDEV_LENS) {
if ((entity.name[0] == 'm') &&
strncmp(entity.name, drv_info.mModuleIndexStr.c_str(), 3) == 0) {
if (find_lens == true)
LOGW("one module can attach only one lens now");
find_lens = true;
lens_major = entity.v4l.major;
lens_minor = entity.v4l.minor;
LOGD("%s:%d, found lens %s attatched to sensor %s",
__FUNCTION__, __LINE__, entity.name, drv_info.mSensorName.c_str());
}
}
if (entity.type == MEDIA_ENT_T_V4L2_SUBDEV_FLASH) {
if ((entity.name[0] == 'm') &&
strncmp(entity.name, drv_info.mModuleIndexStr.c_str(), 3) == 0) {
if (find_flashlight >= SENSOR_ATTACHED_FLASH_MAX_NUM) {
LOGW("%s:%d, one module can attach %d flashlight",
__FUNCTION__, __LINE__, SENSOR_ATTACHED_FLASH_MAX_NUM);
continue;
}
flashlight_major[find_flashlight] = entity.v4l.major;
flashlight_minor[find_flashlight] = entity.v4l.minor;
// sort the flash order, make sure led0 befor led1
if (find_flashlight > 0) {
char* cur_flash_index_str = strstr(entity.name, "_led");
const char* last_flash_index_str = strstr(last_fl_entity_str.c_str(),
"_led");
if (cur_flash_index_str && last_flash_index_str) {
int cur_flash_index = atoi(cur_flash_index_str + 4);
int last_flash_index = atoi(last_flash_index_str + 4);
if (cur_flash_index < last_flash_index) {
int tmp = flashlight_major[find_flashlight];
flashlight_major[find_flashlight] =
flashlight_major[find_flashlight - 1];
flashlight_major[find_flashlight - 1] = tmp;
tmp = flashlight_minor[find_flashlight];
flashlight_minor[find_flashlight] =
flashlight_minor[find_flashlight - 1];
flashlight_minor[find_flashlight - 1] = tmp;
}
} else
LOGW("%s:%d, wrong flashlight name format %s, %s",
__FUNCTION__, __LINE__,
entity.name,last_fl_entity_str.c_str());
}
last_fl_entity_str = entity.name;
find_flashlight++;
LOGD("%s:%d,found flashlight %s attatched to sensor %s",
__FUNCTION__, __LINE__, entity.name, drv_info.mSensorName.c_str());
}
}
}
} while (!ret);
if (close(fd)) {
LOGE("ERROR in closing media controller: %s!", strerror(errno));
if (!ret) ret = EPERM;
}
if (ret)
return ret;
string subdevPathName = "/dev/v4l-subdev";
string subdevPathNameN;
for (int n = 0; n < MAX_SUBDEV_ENUMERATE; n++) {
subdevPathNameN = subdevPathName + std::to_string(n);
struct stat fileInfo;
CLEAR(fileInfo);
if (!find_lens && !find_flashlight)
break;
if (stat(subdevPathNameN.c_str(), &fileInfo) < 0) {
if (errno == ENOENT) {
// We end up here when there is no Nth subdevice
// but there might be more subdevices, so continue.
// For an example if there are v4l subdevices 0, 4, 5 and 6
// we come here for subdevices 1, 2 and 3.
LOGI("Subdev missing: \"%s\"!", subdevPathNameN.c_str());
continue;
} else {
LOGE("ERROR querying sensor subdev filestat for \"%s\": %s!",
subdevPathNameN.c_str(), strerror(errno));
return FAILED_TRANSACTION;
}
}
if (find_lens && ((lens_major == MAJOR(fileInfo.st_rdev)) &&
(lens_minor == MINOR(fileInfo.st_rdev)))) {
drv_info.mModuleLensDevName = subdevPathNameN;
find_lens = false;
}
if (find_flashlight > 0) {
drv_info.mFlashNum = find_flashlight;
for (int i = 0; i < SENSOR_ATTACHED_FLASH_MAX_NUM; i++) {
if ((flashlight_major[i] == MAJOR(fileInfo.st_rdev)) &&
(flashlight_minor[i] == MINOR(fileInfo.st_rdev)))
drv_info.mModuleFlashDevName[i] = subdevPathNameN;
}
}
}
return ret;
}
const bool compareFuncForSensorInfo(struct SensorDriverDescriptor s1, struct SensorDriverDescriptor s2) {
return (s1.mModuleIndexStr < s2.mModuleIndexStr);
}
status_t CameraHWInfo::findMediaControllerSensors(const std::string &mcPath)
{
status_t ret = OK;
int fd = open(mcPath.c_str(), O_RDONLY);
if (fd == -1) {
LOGW("Could not openg media controller device: %s!", strerror(errno));
return ENXIO;
}
struct media_entity_desc entity;
CLEAR(entity);
do {
// Go through the list of media controller entities
entity.id |= MEDIA_ENT_ID_FLAG_NEXT;
if (ioctl(fd, MEDIA_IOC_ENUM_ENTITIES, &entity) < 0) {
if (errno == EINVAL) {
// Ending up here when no more entities left.
// Will simply 'break' if everything was ok
if (mSensorInfo.size() == 0) {
// No registered drivers found
LOGE("ERROR no sensor driver registered in media controller!");
ret = NO_INIT;
}
} else {
LOGE("ERROR in browsing media controller entities: %s!", strerror(errno));
ret = FAILED_TRANSACTION;
}
break;
} else {
if (entity.type == MEDIA_ENT_T_V4L2_SUBDEV_SENSOR) {
// A driver has been found!
// The driver is using sensor name when registering
// to media controller (we will truncate that to
// first space, if any)
SensorDriverDescriptor drvInfo;
drvInfo.mSensorName = entity.name;
drvInfo.mSensorDevType = SENSOR_DEVICE_MC;
drvInfo.mFlashNum = 0;;
unsigned major = entity.v4l.major;
unsigned minor = entity.v4l.minor;
// Go through the subdevs one by one, see which one
// corresponds to this driver (if there is an error,
// the looping ends at 'while')
if ((ret = parseModuleInfo(drvInfo.mSensorName, drvInfo)) == 0)
ret = initDriverListHelper(major, minor, mcPath, drvInfo);
}
}
} while (!ret);
std:sort(mSensorInfo.begin(), mSensorInfo.end(), compareFuncForSensorInfo);
if (close(fd)) {
LOGE("ERROR in closing media controller: %s!", strerror(errno));
if (!ret) ret = EPERM;
}
return ret;
}
status_t CameraHWInfo::findMediaDeviceInfo(const std::string& mcPath)
{
status_t ret = OK;
int fd = open(mcPath.c_str(), O_RDONLY);
if (fd == -1) {
LOGW("Could not openg media controller device: %s!", strerror(errno));
return UNKNOWN_ERROR;
}
CLEAR(mDeviceInfo);
if (ioctl(fd, MEDIA_IOC_DEVICE_INFO, &mDeviceInfo) < 0) {
LOGE("ERROR in browsing media device information: %s!", strerror(errno));
ret = FAILED_TRANSACTION;
} else {
LOGI("Media device: %s", mDeviceInfo.driver);
}
if (close(fd)) {
LOGE("ERROR in closing media controller: %s!", strerror(errno));
if (!ret) {
ret = PERMISSION_DENIED;
}
}
return ret;
}
/**
* Function to get the CSI port number a sensor is connected to.
* Use mediacontroller to go through the links to find the CSI entity
* and trim the port number from it.
*
* \param[IN] deviceName sensor full name
* \param[OUT] portId CSI port number
*
*/
status_t CameraHWInfo::getCSIPortID(const std::string &deviceName, const std::string &mcPath, int &portId)
{
LOGI("@%s", __FUNCTION__);
status_t status = NO_ERROR;
std::shared_ptr<MediaEntity> mediaEntity = nullptr;
std::vector<string> names;
string name;
std::vector<string> nameTemplateVec;
// Kernel driver should follow one of these 3 templates to report the CSI port, else this
// parsing will fail. The format is <...CSI...> port-number
const char *CSI_RX_PORT_NAME_TEMPLATE1 = "CSI-2";
const char *CSI_RX_PORT_NAME_TEMPLATE2 = "CSI2-port";
const char *CSI_RX_PORT_NAME_TEMPLATE3 = "TPG";
const char *CSI_RX_PORT_NAME_TEMPLATE4 = "mipi-dphy-rx";
const char *CSI_RX_PORT_NAME_TEMPLATE5 = "rockchip-csi2-dphy";
nameTemplateVec.push_back(CSI_RX_PORT_NAME_TEMPLATE1);
nameTemplateVec.push_back(CSI_RX_PORT_NAME_TEMPLATE2);
nameTemplateVec.push_back(CSI_RX_PORT_NAME_TEMPLATE3);
nameTemplateVec.push_back(CSI_RX_PORT_NAME_TEMPLATE4);
nameTemplateVec.push_back(CSI_RX_PORT_NAME_TEMPLATE5);
std::shared_ptr<MediaController> mediaCtl = std::make_shared<MediaController>(mcPath.c_str());
if (!mediaCtl) {
LOGE("Error creating MediaController");
return UNKNOWN_ERROR;
}
status = mediaCtl->init();
if (status != NO_ERROR) {
LOGE("Error initializing Media Controller");
return status;
}
status = mediaCtl->getMediaEntity(mediaEntity, deviceName.c_str());
if (status != NO_ERROR) {
LOGE("Failed to get media entity by sensor name %s", deviceName.c_str());
return status;
}
if (mediaEntity->getType() != SUBDEV_SENSOR) {
LOGE("Media entity not sensor type");
return UNKNOWN_ERROR;
}
// Traverse the sinks till we get CSI port
while (1) {
names.clear();
status = mediaCtl->getSinkNamesForEntity(mediaEntity, names);
if (status != NO_ERROR) {
LOGE("Error getting sink names for entity %s", mediaEntity->getName());
return UNKNOWN_ERROR;
}
// For sensor type there will be only one sink
if (names.size() != 1) {
LOGW("Number of sinks for sensor not 1 it is %zu", names.size());
}
if (names.size() == 0) {
LOGW("No sink names available for %s", deviceName.c_str());
return 0;
}
name = names[0];
size_t pos = 0;
for (auto &nameTemplate : nameTemplateVec) {
// check if name is CSI port
if ((pos = name.find(nameTemplate)) != string::npos) {
LOGI("found CSI port name = %s", name.c_str());
// Trim the port id from CSI port name
name = name.substr(pos + nameTemplate.length());
portId = atoi(name.c_str());
if (portId < 0) {
LOGE("Error getting port id %d", portId);
return UNKNOWN_ERROR;
}
return status;
}
}
// Get media entity for new name
mediaEntity = nullptr;
status = mediaCtl->getMediaEntity(mediaEntity, name.c_str());
if (status != NO_ERROR) {
LOGE("Failed to get media entity by name %s", name.c_str());
break;
}
}
return status;
}
const bool compareFuncForSensorFormat(struct v4l2_subdev_frame_size_enum s1, struct v4l2_subdev_frame_size_enum s2) {
return (s1.max_width <= s2.max_width &&
s1.max_height <= s2.max_height);
}
status_t CameraHWInfo::getSensorEntityName(int32_t cameraId,
std::string &sensorEntityName) const
{
status_t ret = NO_ERROR;
string sensorName;
sensorEntityName = "none";
#ifdef CAMERA_RKISP2_SUPPORT
const rkisp2::RKISP2CameraCapInfo *cap = rkisp2::getRKISP2CameraCapInfo(cameraId);
#else
const RKISP1CameraCapInfo *cap = getRKISP1CameraCapInfo(cameraId);
#endif
if (!cap) {
LOGE("Can't get Sensor name");
return UNKNOWN_ERROR;
} else {
sensorName = cap->getSensorName();
}
std::vector<std::string> elementNames;
PlatformData::getCameraHWInfo()->getMediaCtlElementNames(elementNames);
for (auto &it: elementNames) {
if (it.find(sensorName) != std::string::npos &&
it.find(cap->mModuleIndexStr) != std::string::npos)
sensorEntityName = it;
}
if(sensorEntityName == "none") {
LOGE("@%s : Sensor name %s is case sensitive, Please check it in Camera3_profiles.xml with driver sensor name!!!",
__FUNCTION__, sensorName.c_str());
return UNKNOWN_ERROR;
}
LOGD("@%s : sensorName:%s, sensorEntityName:%s", __FUNCTION__, sensorName.c_str(), sensorEntityName.c_str());
return OK;
}
status_t CameraHWInfo::getAvailableSensorOutputFormats(int32_t cameraId,
SensorFormat &OutputFormats, bool isFirst) const
{
status_t ret = NO_ERROR;
const char *devname;
std::string sDevName;
OutputFormats.clear();
struct v4l2_subdev_format aFormat;
LOGI("@%s cameraId(%d) isFirst(%s)", __FUNCTION__, cameraId, isFirst?"true":"false");
string sensorEntityName = "none";
if(!isFirst) {
if (mSensorOutputFormats.find(cameraId) != mSensorOutputFormats.end()) {
LOGI("@%s cameraId(%d)", __FUNCTION__, mSensorOutputFormats.find(cameraId)->first);
OutputFormats = mSensorOutputFormats.find(cameraId)->second;
}
return ret;
}
ret = getSensorEntityName(cameraId, sensorEntityName);
if (ret != NO_ERROR)
return UNKNOWN_ERROR;
for (size_t i = 0; i < mSensorInfo.size(); i++) {
if (sensorEntityName.find(mSensorInfo[i].mSensorName) == std::string::npos)
continue;
std::ostringstream stringStream;
stringStream << "/dev/" << mSensorInfo[i].mDeviceName.c_str();
sDevName = stringStream.str();
}
devname = sDevName.c_str();
LOGI("@%s, sensor name: %s, subdev path: %s.", __FUNCTION__, sensorEntityName.c_str(), devname);
std::shared_ptr<V4L2Subdevice> device = std::make_shared<V4L2Subdevice>(devname);
if (device.get() == nullptr) {
LOGE("Couldn't open device %s", devname);
return UNKNOWN_ERROR;
}
ret = device->open();
if (ret != NO_ERROR) {
LOGE("Error opening device (%s)", devname);
return ret;
}
/* get sensor original format */
ret = device->getFormat(aFormat);
if (ret != NO_ERROR) {
LOGE("Error getFormat ret:%d (%s)", ret, devname);
}
std::vector<uint32_t> formats;
device->queryFormats(0, formats);
struct v4l2_subdev_selection aSelection;
status_t status = NO_ERROR;
std::vector<struct v4l2_subdev_frame_size_enum> fse;
struct SensorFrameSize frameSize;
for (auto it = formats.begin(); it != formats.end(); ++it) {
device->getSensorFormats(0, *it, fse);
//sort from smallest to biggest
std:sort(fse.begin(), fse.end(), compareFuncForSensorFormat);
}
//enum all supported framesize
for (auto iter = fse.begin(); iter != fse.end(); ++iter) {
//set matched fmt first for getSelection correct
status = device->setFormat(iter->pad, iter->min_width,
iter->min_height, iter->code,
0, 0);
if (status < 0) {
LOGW("setFormat failed, may be not realized, ignore selection!");
}
status |= device->getSelection(aSelection);
if (status >= 0) {
frameSize.left = aSelection.r.left;
frameSize.top = aSelection.r.top;
frameSize.min_width = aSelection.r.width;
frameSize.min_height = aSelection.r.height;
frameSize.max_width = aSelection.r.width;
frameSize.max_height = aSelection.r.height;
} else {
LOGW("getSelection failed, may be not realized, use default selection!");
frameSize.left = 0;
frameSize.top = 0;
frameSize.min_width = (*iter).min_width;
frameSize.min_height = (*iter).min_height;
frameSize.max_width = (*iter).max_width;
frameSize.max_height = (*iter).max_height;
}
LOGD("@%s %d: code: 0x%x, frame size:"
"Min(%dx%d) Max(%dx%d), left/top(%d,%d)",
__FUNCTION__, __LINE__,
iter->code, frameSize.min_width, frameSize.min_height,
frameSize.max_width, frameSize.max_height, frameSize.left, frameSize.top);
OutputFormats[iter->code].push_back(frameSize);
}
//set sensor to original fmt
status = device->setFormat(aFormat.pad, aFormat.format.width,
aFormat.format.height, aFormat.format.code,
0, 0);
if(!formats.size() || !fse.size()) {
LOGE("@%s %s: Enum sensor frame size failed", __FUNCTION__, devname);
ret = UNKNOWN_ERROR;
}
ret |= device->close();
if (ret != NO_ERROR)
LOGE("Error closing device (%s)", devname);
return ret;
}
status_t CameraHWInfo::getSensorBayerPattern(int32_t cameraId,
int32_t &bayerPattern) const
{
status_t ret = NO_ERROR;
const char *devname;
std::string sDevName;
string sensorEntityName = "none";
ret = getSensorEntityName(cameraId, sensorEntityName);
if (ret != NO_ERROR)
return UNKNOWN_ERROR;
for (size_t i = 0; i < mSensorInfo.size(); i++) {
if (sensorEntityName.find(mSensorInfo[i].mSensorName) == std::string::npos)
continue;
std::ostringstream stringStream;
stringStream << "/dev/" << mSensorInfo[i].mDeviceName.c_str();
sDevName = stringStream.str();
}
devname = sDevName.c_str();
std::shared_ptr<V4L2Subdevice> device = std::make_shared<V4L2Subdevice>(devname);
if (device.get() == nullptr) {
LOGE("Couldn't open device %s", devname);
return UNKNOWN_ERROR;
}
ret = device->open();
if (ret != NO_ERROR) {
LOGE("Error opening device (%s)", devname);
return ret;
}
std::vector<uint32_t> formats;
device->queryFormats(0, formats);
if(!formats.size()) {
LOGE("@%s %s: Enum sensor format failed", __FUNCTION__, devname);
ret = UNKNOWN_ERROR;
}
for (auto it = formats.begin(); it != formats.end(); ++it) {
bayerPattern = *it;
}
ret |= device->close();
if (ret != NO_ERROR)
LOGE("Error closing device (%s)", devname);
return ret;
}
const struct SensorDriverDescriptor* CameraHWInfo::getSensorDrvDes(int32_t cameraId) const
{
#ifdef CAMERA_RKISP2_SUPPORT
const rkisp2::RKISP2CameraCapInfo *cap = rkisp2::getRKISP2CameraCapInfo(cameraId);
#else
const RKISP1CameraCapInfo *cap = getRKISP1CameraCapInfo(cameraId);
#endif
if (!cap) {
LOGE("Can't get Sensor cap info !");
return NULL;
}
for (auto& des : mSensorInfo) {
if ((des.mModuleIndexStr == cap->mModuleIndexStr) && (des.mSensorName == cap->mSensorName))
return &des;
}
return NULL;
}
void CameraHWInfo::setMultiCameraMode(int32_t cameraId)
{
#ifdef CAMERA_RKISP2_SUPPORT
const rkisp2::RKISP2CameraCapInfo *cap = rkisp2::getRKISP2CameraCapInfo(cameraId);
if (!cap) {
LOGE("Can't get Sensor cap info !");
return ;
}
if (cap->getMultiCameraMode()) {
ALOGD("Set multi camera mode!");
mSupportDualVideo = true;
}
#endif
return ;
}
status_t CameraHWInfo::getSensorFrameDuration(int32_t cameraId, int32_t &duration) const
{
status_t ret = NO_ERROR;
const char *devname;
std::string sDevName;
string sensorEntityName = "none";
ret = getSensorEntityName(cameraId, sensorEntityName);
if (ret != NO_ERROR)
return UNKNOWN_ERROR;
for (size_t i = 0; i < mSensorInfo.size(); i++) {
if (sensorEntityName.find(mSensorInfo[i].mSensorName) == std::string::npos)
continue;
std::ostringstream stringStream;
stringStream << "/dev/" << mSensorInfo[i].mDeviceName.c_str();
sDevName = stringStream.str();
}
devname = sDevName.c_str();
std::shared_ptr<V4L2Subdevice> device = std::make_shared<V4L2Subdevice>(devname);
if (device.get() == nullptr) {
LOGE("Couldn't open device %s", devname);
return UNKNOWN_ERROR;
}
ret = device->open();
if (ret != NO_ERROR) {
LOGE("Error opening device (%s)", devname);
return ret;
}
device->getSensorFrameDuration(duration);
ret |= device->close();
if (ret != NO_ERROR)
LOGE("Error closing device (%s)", devname);
return ret;
}
status_t CameraHWInfo::getDvTimings(int32_t cameraId,
struct v4l2_dv_timings &timings) const
{
status_t ret = NO_ERROR;
const char *devname;
std::string sDevName;
string sensorEntityName = "none";
ret = getSensorEntityName(cameraId, sensorEntityName);
if (ret != NO_ERROR)
return UNKNOWN_ERROR;
for (size_t i = 0; i < mSensorInfo.size(); i++) {
if (sensorEntityName.find(mSensorInfo[i].mSensorName) == std::string::npos)
continue;
std::ostringstream stringStream;
stringStream << "/dev/" << mSensorInfo[i].mDeviceName.c_str();
sDevName = stringStream.str();
}
devname = sDevName.c_str();
std::shared_ptr<V4L2Subdevice> device = std::make_shared<V4L2Subdevice>(devname);
if (device.get() == nullptr) {
LOGE("Couldn't open device %s", devname);
return UNKNOWN_ERROR;
}
ret = device->open();
if (ret != NO_ERROR) {
LOGE("Error opening device (%s)", devname);
return ret;
}
ret = device->queryDvTimings(timings);
if (ret != NO_ERROR) {
LOGE("Error queryDvTimings ret:%d (%s)", ret, devname);
}
ret |= device->close();
if (ret != NO_ERROR)
LOGE("Error closing device (%s)", devname);
return ret;
}
status_t CameraHWInfo::getSensorFormat(int32_t cameraId,
struct v4l2_subdev_format &aFormat) const
{
status_t ret = NO_ERROR;
const char *devname;
std::string sDevName;
string sensorEntityName = "none";
ret = getSensorEntityName(cameraId, sensorEntityName);
if (ret != NO_ERROR)
return UNKNOWN_ERROR;
for (size_t i = 0; i < mSensorInfo.size(); i++) {
if (sensorEntityName.find(mSensorInfo[i].mSensorName) == std::string::npos)
continue;
std::ostringstream stringStream;
stringStream << "/dev/" << mSensorInfo[i].mDeviceName.c_str();
sDevName = stringStream.str();
}
devname = sDevName.c_str();
std::shared_ptr<V4L2Subdevice> device = std::make_shared<V4L2Subdevice>(devname);
if (device.get() == nullptr) {
LOGE("Couldn't open device %s", devname);
return UNKNOWN_ERROR;
}
ret = device->open();
if (ret != NO_ERROR) {
LOGE("Error opening device (%s)", devname);
return ret;
}
ret = device->getFormat(aFormat);
if (ret != NO_ERROR) {
LOGE("Error getFormat ret:%d (%s)", ret, devname);
}
ret |= device->close();
if (ret != NO_ERROR)
LOGE("Error closing device (%s)", devname);
return ret;
}
/**
* Get all available sensor modes from the driver
*
* Function gets all currently available sensor modes from the driver
* and returns them in a vector.
*
* \param[in] sensorName Name of the sensor
* \param[out] sensorModes Vector of available sensor modes for this sensor
*/
status_t CameraHWInfo::getAvailableSensorModes(const std::string &sensorName,
SensorModeVector &sensorModes) const
{
status_t ret = NO_ERROR;
const char *devname;
std::string sDevName;
for (size_t i = 0; i < mSensorInfo.size(); i++) {
if (mSensorInfo[i].mSensorName != sensorName)
continue;
std::ostringstream stringStream;
stringStream << "/dev/" << mSensorInfo[i].mDeviceName.c_str();
sDevName = stringStream.str();
}
devname = sDevName.c_str();
std::shared_ptr<V4L2Subdevice> device = std::make_shared<V4L2Subdevice>(devname);
if (device.get() == nullptr) {
LOGE("Couldn't open device %s", devname);
return UNKNOWN_ERROR;
}
ret = device->open();
if (ret != NO_ERROR) {
LOGE("Error opening device (%s)", devname);
return ret;
}
// Get query control for sensor mode to determine max value
v4l2_queryctrl sensorModeControl;
CLEAR(sensorModeControl);
sensorModeControl.id = CRL_CID_SENSOR_MODE;
ret = device->queryControl(sensorModeControl);
if (ret != NO_ERROR) {
LOGE("Couldn't get sensor mode range");
device->close();
return UNKNOWN_ERROR;
}
uint32_t max = sensorModeControl.maximum;
v4l2_querymenu menu;
CLEAR(menu);
menu.id = CRL_CID_SENSOR_MODE;
// Loop through menu and add indexes and names to vector
for (menu.index = 0; menu.index <= max; menu.index++) {
ret = (device->queryMenu(menu));
if (ret != NO_ERROR) {
LOGE("Error opening query menu at index: %d", menu.index);
} else {
sensorModes.push_back(std::make_pair(uint32_t(menu.index),
reinterpret_cast<char *>(menu.name)));
}
}
ret = device->close();
if (ret != NO_ERROR)
LOGE("Error closing device (%s)", devname);
return NO_ERROR;
}
void CameraHWInfo::getMediaCtlElementNames(std::vector<std::string> &elementNames, bool isFirst) const
{
if(!isFirst) {
elementNames = mMediaCtlElementNames;
return ;
}
// TODO: return all media devices's elements now, maybe just return
// specific media device's elements
for (auto mcPath : mMediaControllerPathName) {
int fd = open(mcPath.c_str(), O_RDONLY);
CheckError(fd == -1, VOID_VALUE, "@%s, Could not open media controller device: %s",
__FUNCTION__, strerror(errno));
struct media_entity_desc entity;
CLEAR(entity);
entity.id |= MEDIA_ENT_ID_FLAG_NEXT;
while (ioctl(fd, MEDIA_IOC_ENUM_ENTITIES, &entity) >= 0) {
elementNames.push_back(std::string(entity.name));
LOGI("@%s, entity name:%s, id:%d", __FUNCTION__, entity.name, entity.id);
entity.id |= MEDIA_ENT_ID_FLAG_NEXT;
}
CheckError(close(fd) > 0, VOID_VALUE, "@%s, Error in closing media controller: %s",
__FUNCTION__, strerror(errno));
}
}
bool CameraHWInfo::isIspSupportRawPath() const
{
#if FORCE_DISABLE_ISP_RAW_PATH == 0
for (auto &it: mMediaCtlElementNames) {
if (it.find("rawpath") != std::string::npos)
return true;
}
#endif
return false;
}
std::string CameraHWInfo::getFullMediaCtlElementName(const std::vector<std::string> elementNames,
const char *value) const
{
for (auto &it: elementNames) {
if (it.find(value) != std::string::npos) {
LOGI("@%s, find match element name: %s, new name: %s",
__FUNCTION__, value, it.c_str());
return it;
}
}
LOGE("@%s, No match element name is found for %s!", __FUNCTION__, value);
return value;
}
status_t
CameraHWInfo::initDriverListHelper(unsigned major, unsigned minor,
const std::string &mcPath,
SensorDriverDescriptor& drvInfo)
{
LOGI("@%s", __FUNCTION__);
int portId = 0;
status_t status = UNKNOWN_ERROR;
std::size_t pos = string::npos;
string subdevPathName = "/dev/v4l-subdev";
string subdevPathNameN;
for (int n = 0; n < MAX_SUBDEV_ENUMERATE; n++) {
subdevPathNameN = subdevPathName + std::to_string(n);
struct stat fileInfo;
CLEAR(fileInfo);
if (stat(subdevPathNameN.c_str(), &fileInfo) < 0) {
if (errno == ENOENT) {
// We end up here when there is no Nth subdevice
// but there might be more subdevices, so continue.
// For an example if there are v4l subdevices 0, 4, 5 and 6
// we come here for subdevices 1, 2 and 3.
LOGI("Subdev missing: \"%s\"!", subdevPathNameN.c_str());
continue;
} else {
LOGE("ERROR querying sensor subdev filestat for \"%s\": %s!",
subdevPathNameN.c_str(), strerror(errno));
return FAILED_TRANSACTION;
}
}
if ((major == MAJOR(fileInfo.st_rdev)) && (minor == MINOR(fileInfo.st_rdev))) {
drvInfo.mDeviceName = subdevPathNameN;
pos = subdevPathNameN.rfind('/');
if (pos != std::string::npos)
drvInfo.mDeviceName = subdevPathNameN.substr(pos + 1);
drvInfo.mIspPort = (ISP_PORT)n; // Unused for media-ctl sensors
status = getCSIPortID(drvInfo.mSensorName, mcPath, portId);
if (status != NO_ERROR) {
LOGE("error getting CSI port id %d", portId);
return status;
}
/*
* Parse i2c address from sensor name.
* It is the last word in the sensor name string, so find last
* space and take the rest of the string.
*/
pos = drvInfo.mSensorName.rfind(" ");
if (pos != string::npos)
drvInfo.mI2CAddress = drvInfo.mSensorName.substr(pos + 1);
/*
* Now that we are done using the sensor name cut the name to
* first space, to get the actual name. First we check if
* it is tpg.
*/
size_t i = drvInfo.mSensorName.find("TPG");
if (CC_LIKELY(i != std::string::npos)) {
drvInfo.mSensorName = drvInfo.mSensorName.substr(i,3);
drvInfo.csiPort = portId;
/* Because of several ports for TPG in media entity,
* just use port 0 as source input.
*/
if (drvInfo.csiPort == 0)
mSensorInfo.push_back(drvInfo);
} else {
i = drvInfo.mSensorName.find(" ");
if (CC_LIKELY(i != std::string::npos)) {
//drvInfo.mSensorName = drvInfo.mSensorName.substr(0, i);
drvInfo.mSensorName = drvInfo.mModuleRealSensorName;
} else {
LOGW("Could not extract sensor name correctly");
}
drvInfo.mParentMediaDev = mcPath;
drvInfo.csiPort = portId;
mSensorInfo.push_back(drvInfo);
}
LOGI("Registered sensor driver \"%s\" found for sensor \"%s\", CSI port:%d",
drvInfo.mDeviceName.c_str(), drvInfo.mSensorName.c_str(),
drvInfo.csiPort);
// All ok
break;
}
}
return OK;
}
} NAMESPACE_DECLARATION_END