From dce4beb17982faeb6308bd7ee86c684514a3bad3 Mon Sep 17 00:00:00 2001 From: Nipun Kwatra Date: Tue, 27 Jul 2010 22:21:44 -0700 Subject: Support for video size to be different from captured picture size. Adding support to allow video size to be different than the supported picture sizes. A picture size larger than the demanded video size is chosen. Captured pictures are then cropped to the size of the desired video size. Cropping is done from the the center portion of the picture. Change-Id: I6bcbe16f94b6ecbcf28b7f46826a81b4b6b8cbc3 --- include/media/stagefright/CameraSourceTimeLapse.h | 33 +++++++ media/libstagefright/Android.mk | 1 + media/libstagefright/CameraSourceTimeLapse.cpp | 112 ++++++++++++++++++---- 3 files changed, 127 insertions(+), 19 deletions(-) diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h index fa11b3ed5e57..8ea532c75a09 100644 --- a/include/media/stagefright/CameraSourceTimeLapse.h +++ b/include/media/stagefright/CameraSourceTimeLapse.h @@ -49,6 +49,23 @@ private: // If false, will use the videocamera frames instead. bool mUseStillCameraForTimeLapse; + // Size of picture taken from still camera. This may be larger than the size + // of the video, as still camera may not support the exact video resolution + // demanded. See setPictureSizeToClosestSupported(). + int32_t mPictureWidth; + int32_t mPictureHeight; + + // size of the encoded video. + int32_t mVideoWidth; + int32_t mVideoHeight; + + // True if we need to crop the still camera image to get the video frame. + bool mNeedCropping; + + // Start location of the cropping rectangle. + int32_t mCropRectStartX; + int32_t mCropRectStartY; + // Time between capture of two frames during time lapse recording // Negative value indicates that timelapse is disabled. int64_t mTimeBetweenTimeLapseFrameCaptureUs; @@ -107,6 +124,22 @@ private: virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, const sp &data); + // The still camera may not support the demanded video width and height. + // We look for the supported picture sizes from the still camera and + // choose the size with either dimensions higher than the corresponding video + // dimensions. The still picture will be cropped to get the video frame. + void setPictureSizeToClosestSupported(int32_t width, int32_t height); + + // Computes the offset of the rectangle from where to start cropping the + // still image into the video frame. We choose the center of the image to be + // cropped. The offset is stored in (mCropRectStartX, mCropRectStartY). + bool computeCropRectangleOffset(); + + // Crops the source data into a smaller image starting at + // (mCropRectStartX, mCropRectStartY) and of the size of the video frame. + // The data is returned into a newly allocated IMemory. + sp cropYUVImage(const sp &source_data); + // When video camera is used for time lapse capture, returns true // until enough time has passed for the next time lapse frame. When // the frame needs to be encoded, it returns false and also modifies diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index bf5643d00e86..0708eec6f535 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -57,6 +57,7 @@ LOCAL_SHARED_LIBRARIES := \ libsonivox \ libvorbisidec \ libsurfaceflinger_client \ + libstagefright_yuv \ libcamera_client LOCAL_STATIC_LIBRARIES := \ diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp index c6186f61a129..a01450bc160f 100644 --- a/media/libstagefright/CameraSourceTimeLapse.cpp +++ b/media/libstagefright/CameraSourceTimeLapse.cpp @@ -24,9 +24,13 @@ #include #include #include +#include +#include #include #include +#include #include +#include "OMX_Video.h" namespace android { @@ -72,7 +76,11 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(const sp &camera, mSkipCurrentFrame(false) { LOGV("starting time lapse mode"); - if(mUseStillCameraForTimeLapse) { + mVideoWidth = width; + mVideoHeight = height; + if (mUseStillCameraForTimeLapse) { + setPictureSizeToClosestSupported(width, height); + mNeedCropping = computeCropRectangleOffset(); mMeta->setInt32(kKeyWidth, width); mMeta->setInt32(kKeyHeight, height); } @@ -81,6 +89,31 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(const sp &camera, CameraSourceTimeLapse::~CameraSourceTimeLapse() { } +void CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) { + // TODO: Currently fixed to the highest resolution. + // Need to poll the camera and set accordingly. + mPictureWidth = 2048; + mPictureHeight = 1536; +} + +bool CameraSourceTimeLapse::computeCropRectangleOffset() { + if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) { + return false; + } + + CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight)); + + int32_t widthDifference = mPictureWidth - mVideoWidth; + int32_t heightDifference = mPictureHeight - mVideoHeight; + + mCropRectStartX = widthDifference/2; + mCropRectStartY = heightDifference/2; + + LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY); + + return true; +} + // static void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { CameraSourceTimeLapse *source = static_cast(me); @@ -90,7 +123,7 @@ void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { void CameraSourceTimeLapse::threadTimeLapseEntry() { while(mStarted) { - if(mCameraIdle) { + if (mCameraIdle) { LOGV("threadTimeLapseEntry: taking picture"); CHECK_EQ(OK, mCamera->takePicture()); mCameraIdle = false; @@ -103,20 +136,15 @@ void CameraSourceTimeLapse::threadTimeLapseEntry() { } void CameraSourceTimeLapse::startCameraRecording() { - if(mUseStillCameraForTimeLapse) { + if (mUseStillCameraForTimeLapse) { LOGV("start time lapse recording using still camera"); - int32_t width; - int32_t height; - mMeta->findInt32(kKeyWidth, &width); - mMeta->findInt32(kKeyHeight, &height); - int64_t token = IPCThreadState::self()->clearCallingIdentity(); String8 s = mCamera->getParameters(); IPCThreadState::self()->restoreCallingIdentity(token); CameraParameters params(s); - params.setPictureSize(width, height); + params.setPictureSize(mPictureWidth, mPictureHeight); mCamera->setParameters(params.flatten()); mCameraIdle = true; @@ -134,7 +162,7 @@ void CameraSourceTimeLapse::startCameraRecording() { } void CameraSourceTimeLapse::stopCameraRecording() { - if(mUseStillCameraForTimeLapse) { + if (mUseStillCameraForTimeLapse) { void *dummy; pthread_join(mThreadTimeLapse, &dummy); } else { @@ -143,7 +171,7 @@ void CameraSourceTimeLapse::stopCameraRecording() { } void CameraSourceTimeLapse::releaseRecordingFrame(const sp& frame) { - if(!mUseStillCameraForTimeLapse) { + if (!mUseStillCameraForTimeLapse) { mCamera->releaseRecordingFrame(frame); } } @@ -158,6 +186,13 @@ sp CameraSourceTimeLapse::createIMemoryCopy(const sp &source_d return newMemory; } +// Allocates IMemory of final type MemoryBase with the given size. +sp allocateIMemory(size_t size) { + sp newMemoryHeap = new MemoryHeapBase(size); + sp newMemory = new MemoryBase(newMemoryHeap, 0, size); + return newMemory; +} + // static void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) { CameraSourceTimeLapse *source = static_cast(me); @@ -182,12 +217,45 @@ void CameraSourceTimeLapse::restartPreview() { pthread_attr_destroy(&attr); } +sp CameraSourceTimeLapse::cropYUVImage(const sp &source_data) { + // find the YUV format + int32_t srcFormat; + CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); + YUVImage::YUVFormat yuvFormat; + if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + yuvFormat = YUVImage::YUV420SemiPlanar; + } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) { + yuvFormat = YUVImage::YUV420Planar; + } + + // allocate memory for cropped image and setup a canvas using it. + sp croppedImageMemory = allocateIMemory( + YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight)); + YUVImage yuvImageCropped(yuvFormat, + mVideoWidth, mVideoHeight, + (uint8_t *)croppedImageMemory->pointer()); + YUVCanvas yuvCanvasCrop(yuvImageCropped); + + YUVImage yuvImageSource(yuvFormat, + mPictureWidth, mPictureHeight, + (uint8_t *)source_data->pointer()); + yuvCanvasCrop.CopyImageRect( + Rect(mCropRectStartX, mCropRectStartY, + mCropRectStartX + mVideoWidth, + mCropRectStartY + mVideoHeight), + 0, 0, + yuvImageSource); + + return croppedImageMemory; +} + void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp &data) { - if(msgType == CAMERA_MSG_COMPRESSED_IMAGE) { + if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) { // takePicture will complete after this callback, so restart preview. restartPreview(); + return; } - if(msgType != CAMERA_MSG_RAW_IMAGE) { + if (msgType != CAMERA_MSG_RAW_IMAGE) { return; } @@ -200,12 +268,18 @@ void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp &dat } else { timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; } - sp dataCopy = createIMemoryCopy(data); - dataCallbackTimestamp(timestampUs, msgType, dataCopy); + + if (mNeedCropping) { + sp croppedImageData = cropYUVImage(data); + dataCallbackTimestamp(timestampUs, msgType, croppedImageData); + } else { + sp dataCopy = createIMemoryCopy(data); + dataCallbackTimestamp(timestampUs, msgType, dataCopy); + } } bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { - if(mSkipCurrentFrame) { + if (mSkipCurrentFrame) { mSkipCurrentFrame = false; return true; } else { @@ -214,8 +288,8 @@ bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { } bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { - if(!mUseStillCameraForTimeLapse) { - if(mLastTimeLapseFrameRealTimestampUs == 0) { + if (!mUseStillCameraForTimeLapse) { + if (mLastTimeLapseFrameRealTimestampUs == 0) { // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs // to current time (timestampUs) and save frame data. LOGV("dataCallbackTimestamp timelapse: initial frame"); @@ -244,7 +318,7 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, const sp &data) { - if(!mUseStillCameraForTimeLapse) { + if (!mUseStillCameraForTimeLapse) { mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); } CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); -- cgit v1.2.3-59-g8ed1b