blob: 8bb42a42f550be9833afb6e7fe4703be550cb121 [file] [log] [blame]
/*
**
** Copyright 2017, Samsung Electronics Co. LTD
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
/* #define LOG_NDEBUG 0 */
#define LOG_TAG "ExynosCameraFrameFactoryPreview"
#include <log/log.h>
#include "ExynosCameraFrameFactoryPreview.h"
namespace android {
ExynosCameraFrameFactoryPreview::~ExynosCameraFrameFactoryPreview()
{
status_t ret = NO_ERROR;
if (m_shot_ext != NULL) {
delete m_shot_ext;
m_shot_ext = NULL;
}
ret = destroy();
if (ret != NO_ERROR)
CLOGE("destroy fail");
}
status_t ExynosCameraFrameFactoryPreview::create()
{
Mutex::Autolock lock(ExynosCameraStreamMutex::getInstance()->getStreamMutex());
CLOGI("");
status_t ret = NO_ERROR;
ret = ExynosCameraFrameFactoryBase::create();
if (ret != NO_ERROR) {
CLOGE("Pipe create fail, ret(%d)", ret);
return ret;
}
/* EOS */
ret = m_pipes[PIPE_3AA]->setControl(V4L2_CID_IS_END_OF_STREAM, 1);
if (ret != NO_ERROR) {
CLOGE("PIPE_%d V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", PIPE_3AA, ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* s_ctrl HAL version for selecting dvfs table */
ret = m_pipes[PIPE_3AA]->setControl(V4L2_CID_IS_HAL_VERSION, IS_HAL_VER_3_2);
if (ret < 0)
CLOGW("V4L2_CID_IS_HAL_VERSION is fail");
ret = m_transitState(FRAME_FACTORY_STATE_CREATE);
return ret;
}
status_t ExynosCameraFrameFactoryPreview::postcreate(void)
{
CLOGI("");
status_t ret = NO_ERROR;
ret = ExynosCameraFrameFactoryBase::postcreate();
if (ret != NO_ERROR) {
CLOGE("Pipe create fail, ret(%d)", ret);
return ret;
}
/* EOS */
ret = m_pipes[PIPE_3AA]->setControl(V4L2_CID_IS_END_OF_STREAM, 1);
if (ret != NO_ERROR) {
CLOGE("PIPE_%d V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", PIPE_3AA, ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* s_ctrl HAL version for selecting dvfs table */
ret = m_pipes[PIPE_3AA]->setControl(V4L2_CID_IS_HAL_VERSION, IS_HAL_VER_3_2);
if (ret < 0)
CLOGW("WARN(%s): V4L2_CID_IS_HAL_VERSION is fail", __FUNCTION__);
ret = m_transitState(FRAME_FACTORY_STATE_CREATE);
return ret;
}
status_t ExynosCameraFrameFactoryPreview::fastenAeStable(int32_t numFrames, ExynosCameraBuffer *buffers)
{
CLOGI(" Start");
status_t ret = NO_ERROR;
status_t totalRet = NO_ERROR;
ExynosCameraFrameSP_sptr_t newFrame = NULL;
ExynosCameraFrameEntity *newEntity = NULL;
frame_queue_t instantQ;
int hwSensorW = 0, hwSensorH = 0;
int bcropX = 0, bcropY = 0, bcropW = 0, bcropH = 0;
int hwPreviewW = 0, hwPreviewH = 0;
int index = 0;
uint32_t minFrameRate, maxFrameRate, sensorFrameRate = 0;
struct v4l2_streamparm streamParam;
int pipeId = PIPE_3AA;
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_FLITE;
}
if (numFrames == 0) {
CLOGW("umFrames is %d, we skip fastenAeStable", numFrames);
return NO_ERROR;
}
if (m_parameters->getCameraId() == CAMERA_ID_FRONT) {
sensorFrameRate = FASTEN_AE_FPS_FRONT;
#ifdef SAMSUNG_COLOR_IRIS
if (m_parameters->getShotMode() == SAMSUNG_ANDROID_CONTROL_SHOOTING_MODE_COLOR_IRIS) {
index = FASTEN_AE_SIZE_INDEX_COLOR_IRIS;
sensorFrameRate = FASTEN_AE_FPS_COLOR_IRIS_FRONT;
}
#endif
} else {
sensorFrameRate = FASTEN_AE_FPS;
}
/* 1. Initialize pipes */
m_parameters->getFastenAeStableSensorSize(&hwSensorW, &hwSensorH, index);
m_parameters->getFastenAeStableBcropSize(&bcropW, &bcropH, index);
m_parameters->getFastenAeStableBdsSize(&hwPreviewW, &hwPreviewH, index);
bcropX = ALIGN_UP(((hwSensorW - bcropW) >> 1), 2);
bcropY = ALIGN_UP(((hwSensorH - bcropH) >> 1), 2);
if (bcropW < hwPreviewW || bcropH < hwPreviewH) {
CLOGD("bayerCropSize %dx%d is smaller than BDSSize %dx%d. Force bayerCropSize",
bcropW, bcropH, hwPreviewW, hwPreviewH);
hwPreviewW = bcropW;
hwPreviewH = bcropH;
}
/*
* we must set flite'setupPipe on 3aa_pipe.
* then, setControl/getControl about BNS size
*/
ret = m_initPipesFastenAeStable(numFrames, hwSensorW, hwSensorH, sensorFrameRate);
if (ret != NO_ERROR) {
CLOGE("m_initPipesFastenAeStable() fail, ret(%d)", ret);
return ret;
}
ret = m_initFlitePipe(hwSensorW, hwSensorH, sensorFrameRate);
if (ret != NO_ERROR) {
CLOGE("m_initFlitePipes() fail, ret(%d)", ret);
return ret;
}
for (int i = 0; i < numFrames; i++) {
/* 2. Generate instant frames */
newFrame = m_frameMgr->createFrame(m_parameters, i);
if (newFrame == NULL) {
CLOGE("newFrame is NULL");
ret = INVALID_OPERATION;
goto cleanup;
}
ret = m_initFrameMetadata(newFrame);
if (ret != NO_ERROR)
CLOGE("frame(%d) metadata initialize fail", i);
newEntity = new ExynosCameraFrameEntity(PIPE_3AA, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED);
newFrame->addSiblingEntity(NULL, newEntity);
newFrame->setNumRequestPipe(1);
newEntity->setSrcBuf(buffers[i]);
/* 3. Set metadata for instant on */
camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buffers[i].addr[buffers[i].getMetaPlaneIndex()]);
if (shot_ext != NULL) {
int aeRegionX = (hwSensorW) / 2;
int aeRegionY = (hwSensorH) / 2;
newFrame->getMetaData(shot_ext);
m_parameters->duplicateCtrlMetadata((void *)shot_ext);
m_activityControl->activityBeforeExecFunc(PIPE_3AA, (void *)&buffers[i]);
setMetaCtlAeTargetFpsRange(shot_ext, sensorFrameRate, sensorFrameRate);
setMetaCtlSensorFrameDuration(shot_ext, (uint64_t)((1000 * 1000 * 1000) / (uint64_t)sensorFrameRate));
/* set afMode into INFINITY */
shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_CANCEL;
shot_ext->shot.ctl.aa.vendor_afmode_option &= (0 << AA_AFMODE_OPTION_BIT_MACRO);
setMetaCtlAeRegion(shot_ext, aeRegionX, aeRegionY, aeRegionX, aeRegionY, 0);
/* Set video mode off for fastAE */
setMetaVideoMode(shot_ext, AA_VIDEOMODE_OFF);
/* Set 3AS size */
enum NODE_TYPE nodeType = getNodeType(PIPE_3AA);
int nodeNum = m_deviceInfo[PIPE_3AA].nodeNum[nodeType];
if (nodeNum <= 0) {
CLOGE(" invalid nodeNum(%d). so fail", nodeNum);
ret = INVALID_OPERATION;
goto cleanup;
}
setMetaNodeLeaderVideoID(shot_ext, nodeNum - FIMC_IS_VIDEO_BAS_NUM);
setMetaNodeLeaderRequest(shot_ext, false);
setMetaNodeLeaderInputSize(shot_ext, bcropX, bcropY, bcropW, bcropH);
/* Set 3AP size */
nodeType = getNodeType(PIPE_3AP);
nodeNum = m_deviceInfo[PIPE_3AA].nodeNum[nodeType];
if (nodeNum <= 0) {
CLOGE(" invalid nodeNum(%d). so fail", nodeNum);
ret = INVALID_OPERATION;
goto cleanup;
}
int perframePosition = 0;
setMetaNodeCaptureVideoID(shot_ext, perframePosition, nodeNum - FIMC_IS_VIDEO_BAS_NUM);
setMetaNodeCaptureRequest(shot_ext, perframePosition, false);
setMetaNodeCaptureOutputSize(shot_ext, perframePosition, 0, 0, hwPreviewW, hwPreviewH);
/* Set ISPC/ISPP size (optional) */
if (m_flag3aaIspOTF != HW_CONNECTION_MODE_M2M) {
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
/* Case of ISP-MCSC M2M */
nodeType = getNodeType(PIPE_ISPC);
} else {
/* Case of ISP-MCSC OTF */
nodeType = getNodeType(PIPE_MCSC0);
}
nodeNum = m_deviceInfo[PIPE_3AA].nodeNum[nodeType];
if (nodeNum <= 0) {
CLOGE(" invalid nodeNum(%d). so fail", nodeNum);
ret = INVALID_OPERATION;
goto cleanup;
}
perframePosition = 1; /* 3AP:0, ISPC/ISPP:1 */
setMetaNodeCaptureVideoID(shot_ext, perframePosition, nodeNum - FIMC_IS_VIDEO_BAS_NUM);
setMetaNodeCaptureRequest(shot_ext, perframePosition, false);
setMetaNodeCaptureOutputSize(shot_ext, perframePosition, 0, 0, hwPreviewW, hwPreviewH);
}
}
/* 4. Push instance frames to pipe */
ret = pushFrameToPipe(newFrame, PIPE_3AA);
if (ret != NO_ERROR) {
CLOGE(" pushFrameToPipeFail, ret(%d)", ret);
goto cleanup;
}
CLOGD("Instant shot - FD(%d, %d)", buffers[i].fd[0], buffers[i].fd[1]);
instantQ.pushProcessQ(&newFrame);
}
/* 5. Pipe instant on */
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_FLITE]->instantOn(0);
if (ret != NO_ERROR) {
CLOGE(" FLITE On fail, ret(%d)", ret);
goto cleanup;
}
}
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_VRA]->start();
if (ret != NO_ERROR) {
CLOGE("VRA start fail, ret(%d)", ret);
goto cleanup;
}
}
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_MCSC]->start();
if (ret != NO_ERROR) {
CLOGE("MCSC start fail, ret(%d)", ret);
goto cleanup;
}
}
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[INDEX(PIPE_ISP)]->start();
if (ret < 0) {
CLOGE("ISP start fail, ret(%d)", ret);
goto cleanup;
}
}
ret = m_pipes[PIPE_3AA]->instantOn(numFrames);
if (ret != NO_ERROR) {
CLOGE("3AA instantOn fail, ret(%d)", ret);
goto cleanup;
}
/* 6. SetControl to sensor instant on */
ret = m_pipes[pipeId]->setControl(V4L2_CID_IS_S_STREAM, (1 | numFrames << SENSOR_INSTANT_SHIFT));
if (ret != NO_ERROR) {
CLOGE("instantOn fail, ret(%d)", ret);
goto cleanup;
}
cleanup:
totalRet |= ret;
/* 7. Pipe instant off */
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_FLITE]->instantOff();
if (ret != NO_ERROR) {
CLOGE(" FLITE Off fail, ret(%d)", ret);
}
}
ret = m_pipes[PIPE_3AA]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000);
if (ret != NO_ERROR) {
CLOGE("3AA force done fail, ret(%d)", ret);
}
ret = m_pipes[PIPE_3AA]->instantOff();
if (ret != NO_ERROR) {
CLOGE("3AA instantOff fail, ret(%d)", ret);
}
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[INDEX(PIPE_ISP)]->stop();
if (ret != NO_ERROR) {
CLOGE("ISP stop fail, ret(%d)", ret);
}
}
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_MCSC]->stop();
if (ret != NO_ERROR) {
CLOGE("MCSC stop fail, ret(%d)", ret);
}
}
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_VRA]->stop();
if (ret != NO_ERROR) {
CLOGE("VRA stop fail, ret(%d)", ret);
}
}
/* 8. Rollback framerate after fastenfeenable done */
/* setParam for Frame rate : must after setInput on Flite */
memset(&streamParam, 0x0, sizeof(v4l2_streamparm));
m_parameters->getPreviewFpsRange(&minFrameRate, &maxFrameRate);
sensorFrameRate = maxFrameRate;
streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
streamParam.parm.capture.timeperframe.numerator = 1;
streamParam.parm.capture.timeperframe.denominator = sensorFrameRate;
CLOGI("set framerate (denominator=%d)", sensorFrameRate);
ret = setParam(&streamParam, pipeId);
if (ret != NO_ERROR) {
CLOGE("setParam(%d) fail, ret(%d)", pipeId, ret);
return INVALID_OPERATION;
}
/* 9. Clean up all frames */
for (int i = 0; i < numFrames; i++) {
newFrame = NULL;
if (instantQ.getSizeOfProcessQ() == 0)
break;
ret = instantQ.popProcessQ(&newFrame);
if (ret != NO_ERROR) {
CLOGE("pop instantQ fail, ret(%d)", ret);
continue;
}
if (newFrame == NULL) {
CLOGE("newFrame is NULL,");
continue;
}
newFrame = NULL;
}
CLOGI("Done");
ret |= totalRet;
return ret;
}
status_t ExynosCameraFrameFactoryPreview::initPipes(void)
{
CLOGI("");
status_t ret = NO_ERROR;
int hwSensorW = 0, hwSensorH = 0;
uint32_t minFrameRate = 0, maxFrameRate = 0, sensorFrameRate = 0;
m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH);
m_parameters->getPreviewFpsRange(&minFrameRate, &maxFrameRate);
sensorFrameRate = maxFrameRate;
/* setDeviceInfo does changing path */
ret = m_setupConfig();
if (ret != NO_ERROR) {
CLOGE("m_setupConfig() fail");
return ret;
}
/*
* we must set flite'setupPipe on 3aa_pipe.
* then, setControl/getControl about BNS size
*/
ret = m_initPipes(sensorFrameRate);
if (ret != NO_ERROR) {
CLOGE("m_initPipes() fail");
return ret;
}
ret = m_initFlitePipe(hwSensorW, hwSensorH, sensorFrameRate);
if (ret != NO_ERROR) {
CLOGE("m_initFlitePipe() fail");
return ret;
}
m_frameCount = 0;
ret = m_transitState(FRAME_FACTORY_STATE_INIT);
return ret;
}
status_t ExynosCameraFrameFactoryPreview::mapBuffers(void)
{
status_t ret = NO_ERROR;
ret = m_pipes[PIPE_3AA]->setMapBuffer();
if (ret != NO_ERROR) {
CLOGE("3AA mapBuffer fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_ISP]->setMapBuffer();
if (ret != NO_ERROR) {
CLOGE("ISP mapBuffer fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
}
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_MCSC]->setMapBuffer();
if (ret != NO_ERROR) {
CLOGE("MCSC mapBuffer fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
}
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_VRA]->setMapBuffer();
if (ret != NO_ERROR) {
CLOGE("MCSC mapBuffer fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
}
CLOGI("Map buffer Success!");
return NO_ERROR;
}
status_t ExynosCameraFrameFactoryPreview::preparePipes(void)
{
/* NOTE: Prepare for 3AA is moved after ISP stream on */
/* we must not qbuf before stream on, when sensor group. */
return NO_ERROR;
}
status_t ExynosCameraFrameFactoryPreview::startInitialThreads(void)
{
status_t ret = NO_ERROR;
CLOGI("start pre-ordered initial pipe thread");
if (m_sensorStandby == false) {
if (m_request[PIPE_VC0]
&& m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
ret = startThread(PIPE_FLITE);
if (ret != NO_ERROR)
return ret;
}
ret = startThread(PIPE_3AA);
if (ret != NO_ERROR)
return ret;
}
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
ret = startThread(PIPE_ISP);
if (ret != NO_ERROR)
return ret;
}
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
ret = startThread(PIPE_MCSC);
if (ret != NO_ERROR)
return ret;
}
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
ret = startThread(PIPE_VRA);
if (ret != NO_ERROR)
return ret;
}
return NO_ERROR;
}
status_t ExynosCameraFrameFactoryPreview::setStopFlag(void)
{
CLOGI("");
status_t ret = NO_ERROR;
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[PIPE_FLITE]->setStopFlag();
}
if (m_pipes[PIPE_3AA]->flagStart() == true)
ret = m_pipes[PIPE_3AA]->setStopFlag();
if (m_pipes[PIPE_ISP]->flagStart() == true)
ret = m_pipes[PIPE_ISP]->setStopFlag();
if (m_pipes[PIPE_DCP]->flagStart() == true)
ret = m_pipes[PIPE_DCP]->setStopFlag();
if (m_pipes[PIPE_MCSC]->flagStart() == true)
ret = m_pipes[PIPE_MCSC]->setStopFlag();
if (m_pipes[PIPE_VRA]->flagStart() == true)
ret = m_pipes[PIPE_VRA]->setStopFlag();
return NO_ERROR;
}
status_t ExynosCameraFrameFactoryPreview::sensorStandby(bool flagStandby)
{
Mutex::Autolock lock(m_sensorStandbyLock);
ExynosCameraAutoTimer autoTimer(__FUNCTION__);
status_t ret = NO_ERROR;
int pipeId = -1;
if (m_sensorStandby == flagStandby) {
CLOGI("already sensor standby(%d)", flagStandby);
return ret;
}
m_sensorStandby = flagStandby;
if (m_parameters->getHwConnectionMode(PIPE_FLITE, PIPE_3AA) == true) {
pipeId = PIPE_3AA;
} else {
pipeId = PIPE_FLITE;
}
ret = m_pipes[pipeId]->sensorStandby(flagStandby);
if (ret != NO_ERROR) {
CLOGE("Sensor standby(%s) fail! ret(%d)",
(flagStandby?"On":"Off"), ret);
}
CLOGI("Sensor Standby(%s) End", (flagStandby?"On":"Off"));
return ret;
}
status_t ExynosCameraFrameFactoryPreview::m_setDeviceInfo(void)
{
CLOGI("");
int pipeId = -1;
int node3aa = -1, node3ac = -1, node3ap = -1;
int nodeIsp = -1, nodeIspc = -1, nodeIspp = -1;
int nodeMcsc = -1, nodeMcscp0 = -1, nodeMcscp1 = -1, nodeMcscp2 = -1, nodeMcscp5 = -1;
int nodeVra = -1;
int previousPipeId = -1;
int vraSrcPipeId = -1;
enum NODE_TYPE nodeType = INVALID_NODE;
bool flagStreamLeader = true;
m_initDeviceInfo(PIPE_FLITE);
m_initDeviceInfo(PIPE_3AA);
m_initDeviceInfo(PIPE_ISP);
m_initDeviceInfo(PIPE_MCSC);
m_initDeviceInfo(PIPE_VRA);
#ifdef USE_DUAL_CAMERA
if (m_parameters->getDualMode() == true
&& (getCameraId() == CAMERA_ID_BACK_1 || getCameraId() == CAMERA_ID_FRONT_1)) {
node3aa = FIMC_IS_VIDEO_31S_NUM;
node3ac = FIMC_IS_VIDEO_31C_NUM;
node3ap = FIMC_IS_VIDEO_31P_NUM;
} else
#endif
{
node3aa = FIMC_IS_VIDEO_30S_NUM;
node3ac = FIMC_IS_VIDEO_30C_NUM;
node3ap = FIMC_IS_VIDEO_30P_NUM;
}
nodeIsp = FIMC_IS_VIDEO_I0S_NUM;
nodeIspc = FIMC_IS_VIDEO_I0C_NUM;
nodeIspp = FIMC_IS_VIDEO_I0P_NUM;
nodeMcsc = FIMC_IS_VIDEO_M0S_NUM;
nodeMcscp0 = FIMC_IS_VIDEO_M0P_NUM;
nodeMcscp1 = FIMC_IS_VIDEO_M1P_NUM;
nodeMcscp2 = FIMC_IS_VIDEO_M2P_NUM;
nodeMcscp5 = FIMC_IS_VIDEO_M5P_NUM;
nodeVra = FIMC_IS_VIDEO_VRA_NUM;
/*
* FLITE
*/
bool flagQuickSwitchFlag = false;
#ifdef SAMSUNG_QUICK_SWITCH
flagQuickSwitchFlag = m_parameters->getQuickSwitchFlag();
#endif
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_FLITE;
} else {
pipeId = PIPE_3AA;
}
/* FLITE */
nodeType = getNodeType(PIPE_FLITE);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_FLITE;
m_deviceInfo[pipeId].nodeNum[nodeType] = getFliteNodenum(m_cameraId, false, flagQuickSwitchFlag);
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "FLITE", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[nodeType], false, flagStreamLeader, m_flagReprocessing);
/* Other nodes is not stream leader */
flagStreamLeader = false;
/* VC0 for bayer */
nodeType = getNodeType(PIPE_VC0);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_VC0;
m_deviceInfo[pipeId].nodeNum[nodeType] = getFliteCaptureNodenum(m_cameraId, m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_FLITE)]);
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "BAYER", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_FLITE)], false, flagStreamLeader, m_flagReprocessing);
#ifdef SUPPORT_DEPTH_MAP
/* VC1 for depth */
if (m_parameters->isDepthMapSupported()) {
nodeType = getNodeType(PIPE_VC1);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_VC1;
m_deviceInfo[pipeId].nodeNum[nodeType] = getDepthVcNodeNum(m_cameraId, false);
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "DEPTH", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_FLITE)], false, flagStreamLeader, m_flagReprocessing);
}
#endif // SUPPORT_DEPTH_MAP
/*
* 3AA
*/
previousPipeId = pipeId;
pipeId = PIPE_3AA;
/* 3AS */
nodeType = getNodeType(PIPE_3AA);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AA;
m_deviceInfo[pipeId].nodeNum[nodeType] = node3aa;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[previousPipeId].nodeNum[getNodeType(PIPE_VC0)], m_flagFlite3aaOTF, flagStreamLeader, m_flagReprocessing);
/* 3AC */
nodeType = getNodeType(PIPE_3AC);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC;
m_deviceInfo[pipeId].nodeNum[nodeType] = node3ac;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], true, flagStreamLeader, m_flagReprocessing);
/* 3AP */
nodeType = getNodeType(PIPE_3AP);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AP;
m_deviceInfo[pipeId].nodeNum[nodeType] = node3ap;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], true, flagStreamLeader, m_flagReprocessing);
/*
* ISP
*/
previousPipeId = pipeId;
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_ISP;
}
/* ISPS */
nodeType = getNodeType(PIPE_ISP);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISP;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeIsp;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[previousPipeId].nodeNum[getNodeType(PIPE_3AP)], m_flag3aaIspOTF, flagStreamLeader, m_flagReprocessing);
/* ISPC */
nodeType = getNodeType(PIPE_ISPC);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPC;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeIspc;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "ISP_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISP)], true, flagStreamLeader, m_flagReprocessing);
/* ISPP */
nodeType = getNodeType(PIPE_ISPP);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPP;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeIspp;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "ISP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISP)], true, flagStreamLeader, m_flagReprocessing);
/*
* MCSC
*/
previousPipeId = pipeId;
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_MCSC;
}
/* MCSC */
nodeType = getNodeType(PIPE_MCSC);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcsc;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "MCSC_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[previousPipeId].nodeNum[getNodeType(PIPE_ISPC)], m_flagIspMcscOTF, flagStreamLeader, m_flagReprocessing);
/* MCSC0 */
nodeType = getNodeType(PIPE_MCSC0);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC0;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcscp0;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "MCSC_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC)], true, flagStreamLeader, m_flagReprocessing);
/* MCSC1 */
nodeType = getNodeType(PIPE_MCSC1);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC1;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcscp1;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "MCSC_PREVIEW_CALLBACK", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC)], true, flagStreamLeader, m_flagReprocessing);
/* MCSC2 */
nodeType = getNodeType(PIPE_MCSC2);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC2;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcscp2;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "MCSC_RECORDING", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC)], true, flagStreamLeader, m_flagReprocessing);
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
/* MCSC5 */
nodeType = getNodeType(PIPE_MCSC5);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC5;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcscp5;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "MCSC_DS", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC)], true, flagStreamLeader, m_flagReprocessing);
/*
* VRA
*/
previousPipeId = pipeId;
vraSrcPipeId = PIPE_MCSC5;
pipeId = PIPE_VRA;
nodeType = getNodeType(PIPE_VRA);
m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_VRA;
m_deviceInfo[pipeId].nodeNum[nodeType] = nodeVra;
m_deviceInfo[pipeId].bufferManagerType[nodeType] = BUFFER_MANAGER_ION_TYPE;
strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "VRA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1);
m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[previousPipeId].nodeNum[getNodeType(vraSrcPipeId)], m_flagMcscVraOTF, flagStreamLeader, m_flagReprocessing);
}
return NO_ERROR;
}
status_t ExynosCameraFrameFactoryPreview::m_initPipes(uint32_t frameRate)
{
CLOGI("");
status_t ret = NO_ERROR;
camera_pipe_info_t pipeInfo[MAX_NODE];
camera_pipe_info_t nullPipeInfo;
int pipeId = -1;
enum NODE_TYPE nodeType = INVALID_NODE;
enum NODE_TYPE leaderNodeType = OUTPUT_NODE;
int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0;
int yuvWidth[ExynosCameraParameters::YUV_MAX] = {0};
int yuvHeight[ExynosCameraParameters::YUV_MAX] = {0};
int yuvFormat[ExynosCameraParameters::YUV_MAX] = {0};
int dsWidth = MAX_VRA_INPUT_WIDTH;
int dsHeight = MAX_VRA_INPUT_HEIGHT;
int dsFormat = m_parameters->getHwVraInputFormat();;
int yuvBufferCnt[ExynosCameraParameters::YUV_MAX] = {0};
int bayerFormat = m_parameters->getBayerFormat(PIPE_3AA);
int hwVdisformat = m_parameters->getHWVdisFormat();
int perFramePos = 0;
int yuvIndex = -1;
struct ExynosConfigInfo *config = m_parameters->getConfig();
ExynosRect bnsSize;
ExynosRect bcropSize;
ExynosRect bdsSize;
ExynosRect tempRect;
#ifdef DEBUG_RAWDUMP
if (m_parameters->checkBayerDumpEnable()) {
bayerFormat = CAMERA_DUMP_BAYER_FORMAT;
}
#endif
m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH);
m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH);
m_parameters->getPreviewBayerCropSize(&bnsSize, &bcropSize, false);
m_parameters->getPreviewBdsSize(&bdsSize, false);
CLOGI("MaxSensorSize %dx%d bayerFormat %x",
maxSensorW, maxSensorH, bayerFormat);
CLOGI("BnsSize %dx%d BcropSize %dx%d BdsSize %dx%d",
bnsSize.w, bnsSize.h, bcropSize.w, bcropSize.h, bdsSize.w, bdsSize.h);
CLOGI("DS Size %dx%d Format %x Buffer count %d",
dsWidth, dsHeight, dsFormat, config->current->bufInfo.num_vra_buffers);
for (int i = ExynosCameraParameters::YUV_0; i < ExynosCameraParameters::YUV_MAX; i++) {
m_parameters->getHwYuvSize(&yuvWidth[i], &yuvHeight[i], i);
yuvFormat[i] = m_parameters->getYuvFormat(i);
yuvBufferCnt[i] = m_parameters->getYuvBufferCount(i);
CLOGI("YUV[%d] Size %dx%d Format %x Buffer count %d",
i, yuvWidth[i], yuvHeight[i], yuvFormat[i], yuvBufferCnt[i]);
}
/*
* FLITE
*/
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_FLITE;
} else {
pipeId = PIPE_3AA;
}
/* setParam for Frame rate : must after setInput on Flite */
struct v4l2_streamparm streamParam;
memset(&streamParam, 0x0, sizeof(v4l2_streamparm));
streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
streamParam.parm.capture.timeperframe.numerator = 1;
streamParam.parm.capture.timeperframe.denominator = frameRate;
CLOGI("Set framerate (denominator=%d)", frameRate);
ret = setParam(&streamParam, pipeId);
if (ret != NO_ERROR) {
CLOGE("FLITE setParam(frameRate(%d), pipeId(%d)) fail", frameRate, pipeId);
return INVALID_OPERATION;
}
ret = m_setSensorSize(pipeId, hwSensorW, hwSensorH);
if (ret != NO_ERROR) {
CLOGE("m_setSensorSize(pipeId(%d), hwSensorW(%d), hwSensorH(%d)) fail", pipeId, hwSensorW, hwSensorH);
return ret;
}
/* FLITE */
nodeType = getNodeType(PIPE_FLITE);
/* set v4l2 buffer size */
tempRect.fullW = 32;
tempRect.fullH = 64;
tempRect.colorFormat = bayerFormat;
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers;
/* Set output node default info */
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_FLITE);
/* BAYER */
nodeType = getNodeType(PIPE_VC0);
perFramePos = PERFRAME_BACK_VC0_POS;
/* set v4l2 buffer size */
tempRect.fullW = hwSensorW;
tempRect.fullH = hwSensorH;
tempRect.colorFormat = bayerFormat;
/* set v4l2 video node bytes per plane */
pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat);
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers;
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
#ifdef SUPPORT_DEPTH_MAP
if (m_parameters->isDepthMapSupported()) {
/* Depth Map Configuration */
int depthMapW = 0, depthMapH = 0;
int depthMapFormat = DEPTH_MAP_FORMAT;
ret = m_parameters->getDepthMapSize(&depthMapW, &depthMapH);
if (ret != NO_ERROR) {
CLOGE("Failed to getDepthMapSize");
return ret;
}
CLOGI("DepthMapSize %dx%d", depthMapW, depthMapH);
tempRect.fullW = depthMapW;
tempRect.fullH = depthMapH;
tempRect.colorFormat = depthMapFormat;
nodeType = getNodeType(PIPE_VC1);
pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, tempRect.colorFormat);
pipeInfo[nodeType].bufInfo.count = NUM_DEPTHMAP_BUFFERS;
SET_CAPTURE_DEVICE_BASIC_INFO();
}
#endif
/* setup pipe info to FLITE pipe */
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("FLITE setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* clear pipeInfo for next setupPipe */
for (int i = 0; i < MAX_NODE; i++)
pipeInfo[i] = nullPipeInfo;
}
/*
* 3AA
*/
pipeId = PIPE_3AA;
/* 3AS */
nodeType = getNodeType(PIPE_3AA);
bayerFormat = m_parameters->getBayerFormat(PIPE_3AA);
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_OTF) {
/* set v4l2 buffer size */
tempRect.fullW = 32;
tempRect.fullH = 64;
tempRect.colorFormat = bayerFormat;
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers;
} else if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
/* set v4l2 buffer size */
tempRect.fullW = hwSensorW;
tempRect.fullH = hwSensorH;
tempRect.colorFormat = bayerFormat;
/* set v4l2 video node bytes per plane */
pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat);
#if 0
switch (bayerFormat) {
case V4L2_PIX_FMT_SBGGR16:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN);
break;
case V4L2_PIX_FMT_SBGGR12:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 3 / 2), CAMERA_16PX_ALIGN);
break;
case V4L2_PIX_FMT_SBGGR10:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 5 / 4), CAMERA_16PX_ALIGN);
break;
default:
CLOGW("Invalid bayer format(%d)", bayerFormat);
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN);
break;
}
#endif
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers;
}
/* Set output node default info */
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_3AA);
/* 3AC */
nodeType = getNodeType(PIPE_3AC);
perFramePos = PERFRAME_BACK_3AC_POS;
bayerFormat = m_parameters->getBayerFormat(PIPE_3AC);
/* set v4l2 buffer size */
tempRect.fullW = bcropSize.w;
tempRect.fullH = bcropSize.h;
tempRect.colorFormat = bayerFormat;
pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat);
#if 0
/* set v4l2 video node bytes per plane */
switch (bayerFormat) {
case V4L2_PIX_FMT_SBGGR16:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN);
break;
case V4L2_PIX_FMT_SBGGR12:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 3 / 2), CAMERA_16PX_ALIGN);
break;
case V4L2_PIX_FMT_SBGGR10:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 5 / 4), CAMERA_16PX_ALIGN);
break;
default:
CLOGW("Invalid bayer format(%d)", bayerFormat);
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN);
break;
}
#endif
/* set v4l2 video node buffer count */
switch(m_parameters->getReprocessingBayerMode()) {
case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON:
case REPROCESSING_BAYER_MODE_PURE_DYNAMIC:
case REPROCESSING_BAYER_MODE_NONE:
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers;
break;
case REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON:
case REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC:
if (m_parameters->isSupportZSLInput()) {
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers;
} else {
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_sensor_buffers;
}
break;
default:
CLOGE("Invalid reprocessing mode(%d)", m_parameters->getReprocessingBayerMode());
}
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
/* 3AP */
nodeType = getNodeType(PIPE_3AP);
perFramePos = PERFRAME_BACK_3AP_POS;
bayerFormat = m_parameters->getBayerFormat(PIPE_3AP);
/* set v4l2 buffer size */
tempRect.fullW = bdsSize.w;
tempRect.fullH = bdsSize.h;
tempRect.colorFormat = bayerFormat;
/* set v4l2 video node bytes per plane */
pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat);
#if 0
switch (bayerFormat) {
case V4L2_PIX_FMT_SBGGR16:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN);
break;
case V4L2_PIX_FMT_SBGGR12:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 3 / 2), CAMERA_16PX_ALIGN);
break;
case V4L2_PIX_FMT_SBGGR10:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 5 / 4), CAMERA_16PX_ALIGN);
break;
default:
CLOGW("Invalid bayer format(%d)", bayerFormat);
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN);
break;
}
#endif
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers;
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
/* setup pipe info to 3AA pipe */
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("3AA setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* clear pipeInfo for next setupPipe */
for (int i = 0; i < MAX_NODE; i++)
pipeInfo[i] = nullPipeInfo;
}
/*
* ISP
*/
/* ISPS */
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_ISP;
nodeType = getNodeType(PIPE_ISP);
bayerFormat = m_parameters->getBayerFormat(PIPE_ISP);
/* set v4l2 buffer size */
tempRect.fullW = bdsSize.w;
tempRect.fullH = bdsSize.h;
tempRect.colorFormat = bayerFormat;
/* set v4l2 video node bytes per plane */
pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat);
#if 0
switch (bayerFormat) {
case V4L2_PIX_FMT_SBGGR16:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN);
break;
case V4L2_PIX_FMT_SBGGR12:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 3 / 2), CAMERA_16PX_ALIGN);
break;
case V4L2_PIX_FMT_SBGGR10:
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 5 / 4), CAMERA_16PX_ALIGN);
break;
default:
CLOGW("Invalid bayer format(%d)", bayerFormat);
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN);
break;
}
#endif
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers;
/* Set output node default info */
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_ISP);
}
/* ISPC */
nodeType = getNodeType(PIPE_ISPC);
perFramePos = PERFRAME_BACK_ISPC_POS;
/* set v4l2 buffer size */
tempRect.fullW = bdsSize.w;
tempRect.fullH = bdsSize.h;
tempRect.colorFormat = hwVdisformat;
pipeInfo[nodeType].pixelSize = CAMERA_PIXEL_SIZE_PACKED_10BIT;
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers;;
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
/* ISPP */
nodeType = getNodeType(PIPE_ISPP);
perFramePos = PERFRAME_BACK_ISPP_POS;
/* set v4l2 buffer size */
tempRect.fullW = bdsSize.w;
tempRect.fullH = bdsSize.h;
tempRect.colorFormat = hwVdisformat;
pipeInfo[nodeType].pixelSize = CAMERA_PIXEL_SIZE_PACKED_10BIT;
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers;;
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
/* setup pipe info to ISP pipe */
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("ISP setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* clear pipeInfo for next setupPipe */
for (int i = 0; i < MAX_NODE; i++)
pipeInfo[i] = nullPipeInfo;
}
/*
* MCSC
*/
/* MCSC */
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_MCSC;
nodeType = getNodeType(PIPE_MCSC);
/* set v4l2 buffer size */
tempRect.fullW = bdsSize.w;
tempRect.fullH = bdsSize.h;
tempRect.colorFormat = hwVdisformat;
/* set v4l2 video node bytes per plane */
pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 2, CAMERA_16PX_ALIGN);
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers;
/* Set output node default info */
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_MCSC);
}
/* MCSC0 */
nodeType = getNodeType(PIPE_MCSC0);
perFramePos = PERFRAME_BACK_SCP_POS;
yuvIndex = ExynosCameraParameters::YUV_0;
m_parameters->setYuvOutPortId(PIPE_MCSC0, yuvIndex);
/* set v4l2 buffer size */
tempRect.fullW = yuvWidth[yuvIndex];
tempRect.fullH = yuvHeight[yuvIndex];
tempRect.colorFormat = yuvFormat[yuvIndex];
/* set v4l2 video node bytes per plane */
#ifdef USE_BUFFER_WITH_STRIDE
/* to use stride for preview buffer, set the bytesPerPlane */
pipeInfo[nodeType].bytesPerPlane[0] = yuvWidth[yuvIndex];
#endif
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = yuvBufferCnt[yuvIndex];
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
/* MCSC1 */
nodeType = getNodeType(PIPE_MCSC1);
perFramePos = PERFRAME_BACK_MCSC1_POS;
yuvIndex = ExynosCameraParameters::YUV_1;
m_parameters->setYuvOutPortId(PIPE_MCSC1, yuvIndex);
/* set v4l2 buffer size */
tempRect.fullW = yuvWidth[yuvIndex];
tempRect.fullH = yuvHeight[yuvIndex];
tempRect.colorFormat = yuvFormat[yuvIndex];
/* set v4l2 video node bytes per plane */
#ifdef USE_BUFFER_WITH_STRIDE
/* to use stride for preview buffer, set the bytesPerPlane */
pipeInfo[nodeType].bytesPerPlane[0] = yuvWidth[yuvIndex];
#endif
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = yuvBufferCnt[yuvIndex];
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
/* MCSC2 */
nodeType = getNodeType(PIPE_MCSC2);
perFramePos = PERFRAME_BACK_MCSC2_POS;
yuvIndex = ExynosCameraParameters::YUV_2;
m_parameters->setYuvOutPortId(PIPE_MCSC2, yuvIndex);
/* set v4l2 buffer size */
tempRect.fullW = yuvWidth[yuvIndex];
tempRect.fullH = yuvHeight[yuvIndex];
tempRect.colorFormat = yuvFormat[yuvIndex];
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = yuvBufferCnt[yuvIndex];
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
/* MCSC5 */
nodeType = getNodeType(PIPE_MCSC5);
perFramePos = PERFRAME_BACK_MCSC5_POS;
/* set v4l2 buffer size */
tempRect.fullW = dsWidth;
tempRect.fullH = dsHeight;
tempRect.colorFormat = dsFormat;
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_vra_buffers;
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("MCSC setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* clear pipeInfo for next setupPipe */
for (int i = 0; i < MAX_NODE; i++)
pipeInfo[i] = nullPipeInfo;
}
/* VRA */
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_VRA;
nodeType = getNodeType(PIPE_VRA);
/* set v4l2 buffer size */
tempRect.fullW = dsWidth;
tempRect.fullH = dsHeight;
tempRect.colorFormat = dsFormat;
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_vra_buffers;
/* Set output node default info */
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_VRA);
}
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("MCSC setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
return NO_ERROR;
}
status_t ExynosCameraFrameFactoryPreview::m_initPipesFastenAeStable(int32_t numFrames,
int hwSensorW,
int hwSensorH,
uint32_t frameRate)
{
status_t ret = NO_ERROR;
camera_pipe_info_t pipeInfo[MAX_NODE];
camera_pipe_info_t nullPipeInfo;
int pipeId = -1;
enum NODE_TYPE nodeType = INVALID_NODE;
enum NODE_TYPE leaderNodeType = OUTPUT_NODE;
ExynosRect tempRect;
int bayerFormat = m_parameters->getBayerFormat(PIPE_3AA);
int hwVdisformat = m_parameters->getHWVdisFormat();
int hwPreviewFormat = (m_parameters->getHwPreviewFormat() == 0) ? V4L2_PIX_FMT_NV21M : m_parameters->getHwPreviewFormat();
int vraWidth = MAX_VRA_INPUT_WIDTH, vraHeight = MAX_VRA_INPUT_HEIGHT;
int vraFormat = m_parameters->getHwVraInputFormat();
int perFramePos = 0;
#ifdef DEBUG_RAWDUMP
if (m_parameters->checkBayerDumpEnable()) {
bayerFormat = CAMERA_DUMP_BAYER_FORMAT;
}
#endif
/*
* FLITE
*/
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_FLITE;
} else {
pipeId = PIPE_3AA;
}
/* setParam for Frame rate : must after setInput on Flite */
struct v4l2_streamparm streamParam;
memset(&streamParam, 0x0, sizeof(v4l2_streamparm));
streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
streamParam.parm.capture.timeperframe.numerator = 1;
streamParam.parm.capture.timeperframe.denominator = frameRate;
CLOGI("Set framerate (denominator=%d)", frameRate);
ret = setParam(&streamParam, pipeId);
if (ret != NO_ERROR) {
CLOGE("FLITE setParam(frameRate(%d), pipeId(%d)) fail", frameRate, pipeId);
return INVALID_OPERATION;
}
ret = m_setSensorSize(pipeId, hwSensorW, hwSensorH);
if (ret != NO_ERROR) {
CLOGE("m_setSensorSize(pipeId(%d), hwSensorW(%d), hwSensorH(%d)) fail", pipeId, hwSensorW, hwSensorH);
return ret;
}
/* FLITE */
nodeType = getNodeType(PIPE_FLITE);
/* set v4l2 buffer size */
tempRect.fullW = 32;
tempRect.fullH = 64;
tempRect.colorFormat = bayerFormat;
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = numFrames;
/* Set output node default info */
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_FLITE);
/* BAYER */
nodeType = getNodeType(PIPE_VC0);
perFramePos = PERFRAME_BACK_VC0_POS;
/* set v4l2 buffer size */
tempRect.fullW = hwSensorW;
tempRect.fullH = hwSensorH;
tempRect.colorFormat = bayerFormat;
/* packed bayer bytesPerPlane */
pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat);
/* set v4l2 video node buffer count */
pipeInfo[nodeType].bufInfo.count = numFrames;
/* Set capture node default info */
SET_CAPTURE_DEVICE_BASIC_INFO();
#ifdef SUPPORT_DEPTH_MAP
if (m_parameters->isDepthMapSupported()) {
/* Depth Map Configuration */
int depthMapW = 0, depthMapH = 0;
int depthMapFormat = DEPTH_MAP_FORMAT;
ret = m_parameters->getDepthMapSize(&depthMapW, &depthMapH);
if (ret != NO_ERROR) {
CLOGE("Failed to getDepthMapSize");
return ret;
}
CLOGI("DepthMapSize %dx%d", depthMapW, depthMapH);
tempRect.fullW = depthMapW;
tempRect.fullH = depthMapH;
tempRect.colorFormat = depthMapFormat;
nodeType = getNodeType(PIPE_VC1);
pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, tempRect.colorFormat);
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
}
#endif
/* setup pipe info to FLITE pipe */
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("FLITE setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* clear pipeInfo for next setupPipe */
for (int i = 0; i < MAX_NODE; i++)
pipeInfo[i] = nullPipeInfo;
}
pipeId = PIPE_3AA;
/* 3AS */
nodeType = getNodeType(PIPE_3AA);
bayerFormat = m_parameters->getBayerFormat(PIPE_3AA);
/* set v4l2 buffer size */
tempRect.fullW = 32;
tempRect.fullH = 64;
tempRect.colorFormat = bayerFormat;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_3AA);
/* 3AC */
nodeType = getNodeType(PIPE_3AC);
perFramePos = PERFRAME_BACK_3AC_POS;
bayerFormat = m_parameters->getBayerFormat(PIPE_3AC);
/* set v4l2 buffer size */
tempRect.fullW = hwSensorW;
tempRect.fullH = hwSensorH;
tempRect.colorFormat = bayerFormat;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
/* 3AP */
nodeType = getNodeType(PIPE_3AP);
perFramePos = PERFRAME_BACK_3AP_POS;
bayerFormat = m_parameters->getBayerFormat(PIPE_3AP);
tempRect.colorFormat = bayerFormat;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
/* setup pipe info to 3AA pipe */
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("3AA setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* clear pipeInfo for next setupPipe */
for (int i = 0; i < MAX_NODE; i++)
pipeInfo[i] = nullPipeInfo;
}
/* ISPS */
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_ISP;
nodeType = getNodeType(PIPE_ISP);
bayerFormat = m_parameters->getBayerFormat(PIPE_ISP);
tempRect.colorFormat = bayerFormat;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_ISP);
}
/* ISPC */
nodeType = getNodeType(PIPE_ISPC);
perFramePos = PERFRAME_BACK_ISPC_POS;
tempRect.colorFormat = hwVdisformat;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
/* ISPP */
nodeType = getNodeType(PIPE_ISPP);
perFramePos = PERFRAME_BACK_ISPP_POS;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
/* setup pipe info to ISP pipe */
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("ISP setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* clear pipeInfo for next setupPipe */
for (int i = 0; i < MAX_NODE; i++)
pipeInfo[i] = nullPipeInfo;
}
/* MCSC */
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_MCSC;
nodeType = getNodeType(PIPE_MCSC);
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_MCSC);
}
/* MCSC0 */
nodeType = getNodeType(PIPE_MCSC0);
perFramePos = PERFRAME_BACK_SCP_POS;
tempRect.colorFormat = hwPreviewFormat;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
/* MCSC1 */
nodeType = getNodeType(PIPE_MCSC1);
perFramePos = PERFRAME_BACK_MCSC1_POS;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
/* MCSC2 */
nodeType = getNodeType(PIPE_MCSC2);
perFramePos = PERFRAME_BACK_MCSC2_POS;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
/* MCSC5 */
nodeType = getNodeType(PIPE_MCSC5);
perFramePos = PERFRAME_BACK_MCSC5_POS;
tempRect.fullW = vraWidth;
tempRect.fullH = vraHeight;
tempRect.colorFormat = vraFormat;
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_CAPTURE_DEVICE_BASIC_INFO();
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("MCSC setupPipe fail, ret(%d)", ret);
/* TODO: exception handling */
return INVALID_OPERATION;
}
/* VRA */
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_VRA;
nodeType = getNodeType(PIPE_VRA);
pipeInfo[nodeType].bufInfo.count = numFrames;
SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_VRA);
ret = m_pipes[pipeId]->setupPipe(pipeInfo, m_sensorIds[pipeId]);
if (ret != NO_ERROR) {
CLOGE("VRA setupPipe fail, ret(%d)", ret);
return INVALID_OPERATION;
}
}
return NO_ERROR;
}
status_t ExynosCameraFrameFactoryPreview::m_fillNodeGroupInfo(ExynosCameraFrameSP_sptr_t frame)
{
camera2_node_group node_group_info_flite;
camera2_node_group node_group_info_3aa;
camera2_node_group node_group_info_isp;
camera2_node_group node_group_info_mcsc;
camera2_node_group node_group_info_vra;
camera2_node_group *node_group_info_temp;
float zoomRatio = m_parameters->getZoomRatio();
int pipeId = -1;
int nodePipeId = -1;
uint32_t perframePosition = 0;
int yuvFormat[ExynosCameraParameters::YUV_MAX] = {0};
int yuvIndex = -1;
for (int i = ExynosCameraParameters::YUV_0; i < ExynosCameraParameters::YUV_MAX; i++) {
yuvFormat[i] = m_parameters->getYuvFormat(i);
}
memset(&node_group_info_flite, 0x0, sizeof(camera2_node_group));
memset(&node_group_info_3aa, 0x0, sizeof(camera2_node_group));
memset(&node_group_info_isp, 0x0, sizeof(camera2_node_group));
memset(&node_group_info_mcsc, 0x0, sizeof(camera2_node_group));
memset(&node_group_info_vra, 0x0, sizeof(camera2_node_group));
if (m_flagFlite3aaOTF != HW_CONNECTION_MODE_M2M) {
/* 3AA */
pipeId = PIPE_3AA;
perframePosition = 0;
node_group_info_temp = &node_group_info_flite;
node_group_info_temp->leader.request = 1;
node_group_info_temp->leader.pixelformat = m_parameters->getBayerFormat(pipeId);
nodePipeId = PIPE_VC0;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getBayerFormat(nodePipeId);
perframePosition++;
#ifdef SUPPORT_DEPTH_MAP
/* VC1 for depth */
if (m_parameters->getUseDepthMap() == true) {
nodePipeId = PIPE_VC1;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = DEPTH_MAP_FORMAT;
perframePosition++;
}
#endif // SUPPORT_DEPTH_MAP
nodePipeId = PIPE_3AC;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getBayerFormat(nodePipeId);
perframePosition++;
nodePipeId = PIPE_3AP;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getBayerFormat(nodePipeId);
perframePosition++;
} else {
/* FLITE */
pipeId = PIPE_FLITE;
perframePosition = 0;
node_group_info_temp = &node_group_info_flite;
node_group_info_temp->leader.request = 1;
node_group_info_temp->leader.pixelformat = m_parameters->getBayerFormat(pipeId);
nodePipeId = PIPE_VC0;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getBayerFormat(nodePipeId);
perframePosition++;
#ifdef SUPPORT_DEPTH_MAP
/* VC1 for depth */
if (m_parameters->getUseDepthMap() == true) {
nodePipeId = PIPE_VC1;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getBayerFormat(nodePipeId);
perframePosition++;
}
#endif // SUPPORT_DEPTH_MAP
/* 3AA */
pipeId = PIPE_3AA;
perframePosition = 0;
node_group_info_temp = &node_group_info_3aa;
node_group_info_temp->leader.request = 1;
node_group_info_temp->leader.pixelformat = m_parameters->getBayerFormat(pipeId);
nodePipeId = PIPE_3AC;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getBayerFormat(nodePipeId);
perframePosition++;
nodePipeId = PIPE_3AP;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getBayerFormat(nodePipeId);
perframePosition++;
}
/* ISP */
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_ISP;
perframePosition = 0;
node_group_info_temp = &node_group_info_isp;
node_group_info_temp->leader.request = 1;
node_group_info_temp->leader.pixelformat = m_parameters->getBayerFormat(pipeId);
}
nodePipeId = PIPE_ISPC;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getHWVdisFormat();
perframePosition++;
/* MCSC */
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
pipeId = PIPE_MCSC;
perframePosition = 0;
node_group_info_temp = &node_group_info_mcsc;
node_group_info_temp->leader.request = 1;
node_group_info_temp->leader.pixelformat = m_parameters->getHWVdisFormat();
}
nodePipeId = PIPE_MCSC0;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
yuvIndex = ExynosCameraParameters::YUV_0;
node_group_info_temp->capture[perframePosition].pixelformat = yuvFormat[yuvIndex];
perframePosition++;
nodePipeId = PIPE_MCSC1;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
yuvIndex = ExynosCameraParameters::YUV_1;
node_group_info_temp->capture[perframePosition].pixelformat = yuvFormat[yuvIndex];
perframePosition++;
nodePipeId = PIPE_MCSC2;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
yuvIndex = ExynosCameraParameters::YUV_2;
node_group_info_temp->capture[perframePosition].pixelformat = yuvFormat[yuvIndex];
perframePosition++;
/* VRA */
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
nodePipeId = PIPE_MCSC5;
node_group_info_temp->capture[perframePosition].request = m_request[nodePipeId];
node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(nodePipeId)] - FIMC_IS_VIDEO_BAS_NUM;
node_group_info_temp->capture[perframePosition].pixelformat = m_parameters->getHwVraInputFormat();
perframePosition++;
pipeId = PIPE_VRA;
perframePosition = 0;
node_group_info_temp = &node_group_info_vra;
node_group_info_temp->leader.request = 1;
node_group_info_temp->leader.pixelformat = m_parameters->getHwVraInputFormat();
}
frame->setZoomRatio(zoomRatio);
if (m_flagFlite3aaOTF == HW_CONNECTION_MODE_M2M) {
updateNodeGroupInfo(
PIPE_FLITE,
m_parameters,
&node_group_info_flite);
frame->storeNodeGroupInfo(&node_group_info_flite, PERFRAME_INFO_FLITE);
updateNodeGroupInfo(
PIPE_3AA,
m_parameters,
&node_group_info_3aa);
frame->storeNodeGroupInfo(&node_group_info_3aa, PERFRAME_INFO_3AA);
} else {
updateNodeGroupInfo(
PIPE_3AA,
m_parameters,
&node_group_info_flite);
frame->storeNodeGroupInfo(&node_group_info_flite, PERFRAME_INFO_FLITE);
}
if (m_flag3aaIspOTF == HW_CONNECTION_MODE_M2M) {
updateNodeGroupInfo(
PIPE_ISP,
m_parameters,
&node_group_info_isp);
frame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP);
}
if (m_flagIspMcscOTF == HW_CONNECTION_MODE_M2M) {
updateNodeGroupInfo(
PIPE_MCSC,
m_parameters,
&node_group_info_mcsc);
frame->storeNodeGroupInfo(&node_group_info_mcsc, PERFRAME_INFO_MCSC);
}
if (m_flagMcscVraOTF == HW_CONNECTION_MODE_M2M) {
updateNodeGroupInfo(
PIPE_VRA,
m_parameters,
&node_group_info_vra);
frame->storeNodeGroupInfo(&node_group_info_vra, PERFRAME_INFO_VRA);
}
return NO_ERROR;
}
void ExynosCameraFrameFactoryPreview::m_init(void)
{
m_flagReprocessing = false;
m_shot_ext = new struct camera2_shot_ext;
}
}; /* namespace android */