summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--api/current.txt1
-rw-r--r--core/java/android/hardware/camera2/CameraCharacteristics.java73
-rw-r--r--core/java/android/hardware/camera2/CameraDevice.java18
-rw-r--r--core/java/android/hardware/camera2/CaptureRequest.java16
-rw-r--r--core/java/android/hardware/camera2/CaptureResult.java16
-rw-r--r--core/java/android/hardware/camera2/impl/CameraMetadataNative.java22
-rw-r--r--core/java/android/hardware/camera2/params/StreamConfigurationMap.java151
-rw-r--r--core/jni/android_view_Surface.cpp43
-rw-r--r--core/jni/include/android_runtime/android_view_Surface.h3
-rw-r--r--graphics/java/android/graphics/ImageFormat.java9
-rw-r--r--media/java/android/media/Image.java7
-rw-r--r--media/java/android/media/ImageReader.java2
-rw-r--r--media/java/android/media/ImageUtils.java7
-rw-r--r--media/jni/android_media_Utils.cpp24
-rw-r--r--media/jni/android_media_Utils.h2
15 files changed, 342 insertions, 52 deletions
diff --git a/api/current.txt b/api/current.txt
index 98d30a3fd8eb..0db96fabf30a 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -14172,6 +14172,7 @@ package android.graphics {
field public static final int DEPTH_POINT_CLOUD = 257; // 0x101
field public static final int FLEX_RGBA_8888 = 42; // 0x2a
field public static final int FLEX_RGB_888 = 41; // 0x29
+ field public static final int HEIC = 1212500294; // 0x48454946
field public static final int JPEG = 256; // 0x100
field public static final int NV16 = 16; // 0x10
field public static final int NV21 = 17; // 0x11
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index 1881a0cd32e8..0e4ff78af1e0 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -1159,9 +1159,10 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* <li>Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
* one corresponding size that has the same aspect ratio in availableThumbnailSizes,
* and vice versa.</li>
- * <li>All non-<code>(0, 0)</code> sizes will have non-zero widths and heights.
- * This key is available on all devices.</li>
+ * <li>All non-<code>(0, 0)</code> sizes will have non-zero widths and heights.</li>
* </ul>
+ * <p>This list is also used as supported thumbnail sizes for HEIC image format capture.</p>
+ * <p>This key is available on all devices.</p>
*
* @see CaptureRequest#JPEG_THUMBNAIL_SIZE
*/
@@ -3838,6 +3839,74 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
public static final Key<int[]> DISTORTION_CORRECTION_AVAILABLE_MODES =
new Key<int[]>("android.distortionCorrection.availableModes", int[].class);
+ /**
+ * <p>The available HEIC (ISO/IEC 23008-12) stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+ * <p>If the camera device supports HEIC image format, it will support identical set of stream
+ * combinations involving HEIC image format, compared to the combinations involving JPEG
+ * image format as required by the device's hardware level and capabilities.</p>
+ * <p>All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats.
+ * Configuring JPEG and HEIC streams at the same time is not supported.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfiguration[]> HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfiguration[]>("android.heic.availableHeicStreamConfigurations", android.hardware.camera2.params.StreamConfiguration[].class);
+
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for HEIC output formats.</p>
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
+ * android.scaler.availableStallDurations for more details about
+ * calculating the max frame rate.</p>
+ * <p><b>Units</b>: (format, width, height, ns) x n</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.heic.availableHeicMinFrameDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
+
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for HEIC streams.</p>
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>This functions similarly to
+ * android.scaler.availableStallDurations for HEIC
+ * streams.</p>
+ * <p>All HEIC output stream formats may have a nonzero stall
+ * duration.</p>
+ * <p><b>Units</b>: (format, width, height, ns) x n</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> HEIC_AVAILABLE_HEIC_STALL_DURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.heic.availableHeicStallDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
+
/*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
* End generated code
*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
diff --git a/core/java/android/hardware/camera2/CameraDevice.java b/core/java/android/hardware/camera2/CameraDevice.java
index 9c213f2f27a5..20fc53fee2e6 100644
--- a/core/java/android/hardware/camera2/CameraDevice.java
+++ b/core/java/android/hardware/camera2/CameraDevice.java
@@ -356,12 +356,6 @@ public abstract class CameraDevice implements AutoCloseable {
* </table><br>
* </p>
*
- * <p>MONOCHROME-capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}
- * includes {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME MONOCHROME}) devices
- * supporting {@link android.graphics.ImageFormat#Y8 Y8} support substituting {@code YUV}
- * streams with {@code Y8} in all guaranteed stream combinations for the device's hardware level
- * and capabilities.</p>
- *
* <p>FULL-level ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
* {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL}) devices
* support at least the following stream combinations in addition to those for
@@ -435,6 +429,18 @@ public abstract class CameraDevice implements AutoCloseable {
* </table><br>
* </p>
*
+ * <p>MONOCHROME-capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}
+ * includes {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME MONOCHROME}) devices
+ * supporting {@link android.graphics.ImageFormat#Y8 Y8} support substituting {@code YUV}
+ * streams with {@code Y8} in all guaranteed stream combinations for the device's hardware level
+ * and capabilities.</p>
+ *
+ * <p>Devices capable of outputting HEIC formats ({@link StreamConfigurationMap#getOutputFormats}
+ * contains {@link android.graphics.ImageFormat#HEIC}) will support substituting {@code JPEG}
+ * streams with {@code HEIC} in all guaranteed stream combinations for the device's hardware
+ * level and capabilities. Calling createCaptureSession with both JPEG and HEIC outputs is not
+ * supported.</p>
+ *
* <p>Clients can access the above mandatory stream combination tables via
* {@link android.hardware.camera2.params.MandatoryStreamCombination}.</p>
*
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index 3d3a916bae7a..525070103c2c 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -2126,6 +2126,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* <p>Setting a location object in a request will include the GPS coordinates of the location
* into any JPEG images captured based on the request. These coordinates can then be
* viewed by anyone who receives the JPEG image.</p>
+ * <p>This tag is also used for HEIC image capture.</p>
* <p>This key is available on all devices.</p>
*/
@PublicKey
@@ -2136,6 +2137,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>GPS coordinates to include in output JPEG
* EXIF.</p>
+ * <p>This tag is also used for HEIC image capture.</p>
* <p><b>Range of valid values:</b><br>
* (-180 - 180], [-90,90], [-inf, inf]</p>
* <p>This key is available on all devices.</p>
@@ -2147,6 +2149,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>32 characters describing GPS algorithm to
* include in EXIF.</p>
+ * <p>This tag is also used for HEIC image capture.</p>
* <p>This key is available on all devices.</p>
* @hide
*/
@@ -2156,6 +2159,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>Time GPS fix was made to include in
* EXIF.</p>
+ * <p>This tag is also used for HEIC image capture.</p>
* <p><b>Units</b>: UTC in seconds since January 1, 1970</p>
* <p>This key is available on all devices.</p>
* @hide
@@ -2195,6 +2199,10 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* </code></pre>
* <p>For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
* also be set to EXTERNAL. The above code is not relevant in such case.</p>
+ * <p>This tag is also used to describe the orientation of the HEIC image capture, in which
+ * case the rotation is reflected by
+ * {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}, and not by
+ * rotating the image data itself.</p>
* <p><b>Units</b>: Degrees in multiples of 90</p>
* <p><b>Range of valid values:</b><br>
* 0, 90, 180, 270</p>
@@ -2209,7 +2217,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>Compression quality of the final JPEG
* image.</p>
- * <p>85-95 is typical usage range.</p>
+ * <p>85-95 is typical usage range. This tag is also used to describe the quality
+ * of the HEIC image capture.</p>
* <p><b>Range of valid values:</b><br>
* 1-100; larger is higher quality</p>
* <p>This key is available on all devices.</p>
@@ -2221,6 +2230,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>Compression quality of JPEG
* thumbnail.</p>
+ * <p>This tag is also used to describe the quality of the HEIC image capture.</p>
* <p><b>Range of valid values:</b><br>
* 1-100; larger is higher quality</p>
* <p>This key is available on all devices.</p>
@@ -2253,6 +2263,10 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* orientation is requested. LEGACY device will always report unrotated thumbnail
* size.</li>
* </ul>
+ * <p>The tag is also used as thumbnail size for HEIC image format capture, in which case the
+ * the thumbnail rotation is reflected by
+ * {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}, and not by
+ * rotating the thumbnail data itself.</p>
* <p><b>Range of valid values:</b><br>
* {@link CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES android.jpeg.availableThumbnailSizes}</p>
* <p>This key is available on all devices.</p>
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index 8982b40be29b..13ad092f6efd 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -2450,6 +2450,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* <p>Setting a location object in a request will include the GPS coordinates of the location
* into any JPEG images captured based on the request. These coordinates can then be
* viewed by anyone who receives the JPEG image.</p>
+ * <p>This tag is also used for HEIC image capture.</p>
* <p>This key is available on all devices.</p>
*/
@PublicKey
@@ -2460,6 +2461,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>GPS coordinates to include in output JPEG
* EXIF.</p>
+ * <p>This tag is also used for HEIC image capture.</p>
* <p><b>Range of valid values:</b><br>
* (-180 - 180], [-90,90], [-inf, inf]</p>
* <p>This key is available on all devices.</p>
@@ -2471,6 +2473,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>32 characters describing GPS algorithm to
* include in EXIF.</p>
+ * <p>This tag is also used for HEIC image capture.</p>
* <p>This key is available on all devices.</p>
* @hide
*/
@@ -2480,6 +2483,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>Time GPS fix was made to include in
* EXIF.</p>
+ * <p>This tag is also used for HEIC image capture.</p>
* <p><b>Units</b>: UTC in seconds since January 1, 1970</p>
* <p>This key is available on all devices.</p>
* @hide
@@ -2519,6 +2523,10 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* </code></pre>
* <p>For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
* also be set to EXTERNAL. The above code is not relevant in such case.</p>
+ * <p>This tag is also used to describe the orientation of the HEIC image capture, in which
+ * case the rotation is reflected by
+ * {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}, and not by
+ * rotating the image data itself.</p>
* <p><b>Units</b>: Degrees in multiples of 90</p>
* <p><b>Range of valid values:</b><br>
* 0, 90, 180, 270</p>
@@ -2533,7 +2541,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>Compression quality of the final JPEG
* image.</p>
- * <p>85-95 is typical usage range.</p>
+ * <p>85-95 is typical usage range. This tag is also used to describe the quality
+ * of the HEIC image capture.</p>
* <p><b>Range of valid values:</b><br>
* 1-100; larger is higher quality</p>
* <p>This key is available on all devices.</p>
@@ -2545,6 +2554,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>Compression quality of JPEG
* thumbnail.</p>
+ * <p>This tag is also used to describe the quality of the HEIC image capture.</p>
* <p><b>Range of valid values:</b><br>
* 1-100; larger is higher quality</p>
* <p>This key is available on all devices.</p>
@@ -2577,6 +2587,10 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* orientation is requested. LEGACY device will always report unrotated thumbnail
* size.</li>
* </ul>
+ * <p>The tag is also used as thumbnail size for HEIC image format capture, in which case the
+ * the thumbnail rotation is reflected by
+ * {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}, and not by
+ * rotating the thumbnail data itself.</p>
* <p><b>Range of valid values:</b><br>
* {@link CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES android.jpeg.availableThumbnailSizes}</p>
* <p>This key is available on all devices.</p>
diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
index 7877a4d51313..65026b6feb9f 100644
--- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
+++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
@@ -1133,6 +1133,9 @@ public class CameraMetadataNative implements Parcelable {
/*dynamicDepthConfigurations*/ null,
/*dynamicDepthMinFrameDurations*/ null,
/*dynamicDepthStallDurations*/ null,
+ /*heicconfiguration*/ null,
+ /*heicminduration*/ null,
+ /*heicstallduration*/ null,
/*highspeedvideoconfigurations*/ null,
/*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]);
break;
@@ -1144,6 +1147,9 @@ public class CameraMetadataNative implements Parcelable {
/*dynamicDepthConfigurations*/ null,
/*dynamicDepthMinFrameDurations*/ null,
/*dynamicDepthStallDurations*/ null,
+ /*heicconfiguration*/ null,
+ /*heicminduration*/ null,
+ /*heicstallduration*/ null,
highSpeedVideoConfigurations,
/*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]);
break;
@@ -1155,6 +1161,9 @@ public class CameraMetadataNative implements Parcelable {
/*dynamicDepthConfigurations*/ null,
/*dynamicDepthMinFrameDurations*/ null,
/*dynamicDepthStallDurations*/ null,
+ /*heicconfiguration*/ null,
+ /*heicminduration*/ null,
+ /*heicstallduration*/ null,
/*highSpeedVideoConfigurations*/ null,
inputOutputFormatsMap, listHighResolution, supportsPrivate[i]);
break;
@@ -1166,6 +1175,9 @@ public class CameraMetadataNative implements Parcelable {
/*dynamicDepthConfigurations*/ null,
/*dynamicDepthMinFrameDurations*/ null,
/*dynamicDepthStallDurations*/ null,
+ /*heicconfiguration*/ null,
+ /*heicminduration*/ null,
+ /*heicstallduration*/ null,
/*highSpeedVideoConfigurations*/ null,
/*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]);
}
@@ -1230,6 +1242,12 @@ public class CameraMetadataNative implements Parcelable {
CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] dynamicDepthStallDurations = getBase(
CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
+ StreamConfiguration[] heicConfigurations = getBase(
+ CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+ StreamConfigurationDuration[] heicMinFrameDurations = getBase(
+ CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS);
+ StreamConfigurationDuration[] heicStallDurations = getBase(
+ CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS);
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase(
CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
ReprocessFormatsMap inputOutputFormatsMap = getBase(
@@ -1239,7 +1257,9 @@ public class CameraMetadataNative implements Parcelable {
configurations, minFrameDurations, stallDurations,
depthConfigurations, depthMinFrameDurations, depthStallDurations,
dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
- dynamicDepthStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap,
+ dynamicDepthStallDurations, heicConfigurations,
+ heicMinFrameDurations, heicStallDurations,
+ highSpeedVideoConfigurations, inputOutputFormatsMap,
listHighResolution);
}
diff --git a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
index a22e008a65fd..996f9978a612 100644
--- a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
+++ b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
@@ -79,6 +79,22 @@ public final class StreamConfigurationMap {
* @param configurations a non-{@code null} array of {@link StreamConfiguration}
* @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
* @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
+ * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
+ * @param depthMinFrameDurations a non-{@code null} array of depth
+ * {@link StreamConfigurationDuration}
+ * @param depthStallDurations a non-{@code null} array of depth
+ * {@link StreamConfigurationDuration}
+ * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
+ * {@link StreamConfiguration}
+ * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
+ * {@link StreamConfigurationDuration}
+ * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
+ * {@link StreamConfigurationDuration}
+ * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
+ * @param heicMinFrameDurations a non-{@code null} array of heic
+ * {@link StreamConfigurationDuration}
+ * @param heicStallDurations a non-{@code null} array of heic
+ * {@link StreamConfigurationDuration}
* @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
* camera device does not support high speed video recording
* @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
@@ -98,14 +114,19 @@ public final class StreamConfigurationMap {
StreamConfiguration[] dynamicDepthConfigurations,
StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
StreamConfigurationDuration[] dynamicDepthStallDurations,
+ StreamConfiguration[] heicConfigurations,
+ StreamConfigurationDuration[] heicMinFrameDurations,
+ StreamConfigurationDuration[] heicStallDurations,
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
ReprocessFormatsMap inputOutputFormatsMap,
boolean listHighResolution) {
this(configurations, minFrameDurations, stallDurations,
depthConfigurations, depthMinFrameDurations, depthStallDurations,
dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
- dynamicDepthStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap,
- listHighResolution, /*enforceImplementationDefined*/ true);
+ dynamicDepthStallDurations,
+ heicConfigurations, heicMinFrameDurations, heicStallDurations,
+ highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution,
+ /*enforceImplementationDefined*/ true);
}
/**
@@ -117,6 +138,22 @@ public final class StreamConfigurationMap {
* @param configurations a non-{@code null} array of {@link StreamConfiguration}
* @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
* @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
+ * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
+ * @param depthMinFrameDurations a non-{@code null} array of depth
+ * {@link StreamConfigurationDuration}
+ * @param depthStallDurations a non-{@code null} array of depth
+ * {@link StreamConfigurationDuration}
+ * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
+ * {@link StreamConfiguration}
+ * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
+ * {@link StreamConfigurationDuration}
+ * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
+ * {@link StreamConfigurationDuration}
+ * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
+ * @param heicMinFrameDurations a non-{@code null} array of heic
+ * {@link StreamConfigurationDuration}
+ * @param heicStallDurations a non-{@code null} array of heic
+ * {@link StreamConfigurationDuration}
* @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
* camera device does not support high speed video recording
* @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
@@ -138,14 +175,23 @@ public final class StreamConfigurationMap {
StreamConfiguration[] dynamicDepthConfigurations,
StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
StreamConfigurationDuration[] dynamicDepthStallDurations,
+ StreamConfiguration[] heicConfigurations,
+ StreamConfigurationDuration[] heicMinFrameDurations,
+ StreamConfigurationDuration[] heicStallDurations,
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
ReprocessFormatsMap inputOutputFormatsMap,
boolean listHighResolution,
boolean enforceImplementationDefined) {
+ if (configurations == null &&
+ depthConfigurations == null &&
+ heicConfigurations == null) {
+ throw new NullPointerException("At least one of color/depth/heic configurations " +
+ "must not be null");
+ }
+
if (configurations == null) {
// If no color configurations exist, ensure depth ones do
- checkArrayElementsNotNull(depthConfigurations, "depthConfigurations");
mConfigurations = new StreamConfiguration[0];
mMinFrameDurations = new StreamConfigurationDuration[0];
mStallDurations = new StreamConfigurationDuration[0];
@@ -183,6 +229,19 @@ public final class StreamConfigurationMap {
"dynamicDepthStallDurations");
}
+ if (heicConfigurations == null) {
+ mHeicConfigurations = new StreamConfiguration[0];
+ mHeicMinFrameDurations = new StreamConfigurationDuration[0];
+ mHeicStallDurations = new StreamConfigurationDuration[0];
+ } else {
+ mHeicConfigurations = checkArrayElementsNotNull(heicConfigurations,
+ "heicConfigurations");
+ mHeicMinFrameDurations = checkArrayElementsNotNull(heicMinFrameDurations,
+ "heicMinFrameDurations");
+ mHeicStallDurations = checkArrayElementsNotNull(heicStallDurations,
+ "heicStallDurations");
+ }
+
if (highSpeedVideoConfigurations == null) {
mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
} else {
@@ -235,6 +294,17 @@ public final class StreamConfigurationMap {
mDynamicDepthOutputFormats.get(config.getFormat()) + 1);
}
+ // For each heic format, track how many sizes there are available to configure
+ for (StreamConfiguration config : mHeicConfigurations) {
+ if (!config.isOutput()) {
+ // Ignoring input depth configs
+ continue;
+ }
+
+ mHeicOutputFormats.put(config.getFormat(),
+ mHeicOutputFormats.get(config.getFormat()) + 1);
+ }
+
if (configurations != null && enforceImplementationDefined &&
mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
throw new AssertionError(
@@ -302,7 +372,16 @@ public final class StreamConfigurationMap {
if (mInputOutputFormatsMap == null) {
return new int[0];
}
- return mInputOutputFormatsMap.getOutputs(inputFormat);
+
+ int[] outputs = mInputOutputFormatsMap.getOutputs(inputFormat);
+ if (mHeicOutputFormats.size() > 0) {
+ // All reprocessing formats map contain JPEG.
+ int[] outputsWithHeic = Arrays.copyOf(outputs, outputs.length+1);
+ outputsWithHeic[outputs.length] = ImageFormat.HEIC;
+ return outputsWithHeic;
+ } else {
+ return outputs;
+ }
}
/**
@@ -366,6 +445,8 @@ public final class StreamConfigurationMap {
return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
} else if (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
return mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0;
+ } else if (dataspace == HAL_DATASPACE_HEIF) {
+ return mHeicOutputFormats.indexOfKey(internalFormat) >= 0;
} else {
return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
}
@@ -479,6 +560,7 @@ public final class StreamConfigurationMap {
StreamConfiguration[] configs =
surfaceDataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
surfaceDataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
+ surfaceDataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
mConfigurations;
for (StreamConfiguration config : configs) {
if (config.getFormat() == surfaceFormat && config.isOutput()) {
@@ -512,9 +594,10 @@ public final class StreamConfigurationMap {
int dataspace = imageFormatToDataspace(format);
StreamConfiguration[] configs =
- dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
- dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
- mConfigurations;
+ dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
+ dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
+ dataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
+ mConfigurations;
for (StreamConfiguration config : configs) {
if ((config.getFormat() == internalFormat) && config.isOutput() &&
config.getSize().equals(size)) {
@@ -1033,6 +1116,9 @@ public final class StreamConfigurationMap {
Arrays.equals(mDynamicDepthMinFrameDurations,
other.mDynamicDepthMinFrameDurations) &&
Arrays.equals(mDynamicDepthStallDurations, other.mDynamicDepthStallDurations) &&
+ Arrays.equals(mHeicConfigurations, other.mHeicConfigurations) &&
+ Arrays.equals(mHeicMinFrameDurations, other.mHeicMinFrameDurations) &&
+ Arrays.equals(mHeicStallDurations, other.mHeicStallDurations) &&
Arrays.equals(mHighSpeedVideoConfigurations,
other.mHighSpeedVideoConfigurations);
}
@@ -1049,7 +1135,9 @@ public final class StreamConfigurationMap {
mConfigurations, mMinFrameDurations, mStallDurations,
mDepthConfigurations, mDepthMinFrameDurations, mDepthStallDurations,
mDynamicDepthConfigurations, mDynamicDepthMinFrameDurations,
- mDynamicDepthStallDurations, mHighSpeedVideoConfigurations);
+ mDynamicDepthStallDurations, mHeicConfigurations,
+ mHeicMinFrameDurations, mHeicStallDurations,
+ mHighSpeedVideoConfigurations);
}
// Check that the argument is supported by #getOutputFormats or #getInputFormats
@@ -1068,6 +1156,10 @@ public final class StreamConfigurationMap {
if (mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
return format;
}
+ } else if (internalDataspace == HAL_DATASPACE_HEIF) {
+ if (mHeicOutputFormats.indexOfKey(internalFormat) >= 0) {
+ return format;
+ }
} else {
if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
return format;
@@ -1108,8 +1200,9 @@ public final class StreamConfigurationMap {
case HAL_PIXEL_FORMAT_Y16:
return format;
case ImageFormat.JPEG:
+ case ImageFormat.HEIC:
throw new IllegalArgumentException(
- "ImageFormat.JPEG is an unknown internal format");
+ "An unknown internal format: " + format);
default:
return checkArgumentFormat(format);
}
@@ -1267,6 +1360,8 @@ public final class StreamConfigurationMap {
* <ul>
* <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
* <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
+ * <li>ImageFormat.DEPTH_JPEG => HAL_PIXEL_FORMAT_BLOB
+ * <li>ImageFormat.HEIC => HAL_PIXEL_FORMAT_BLOB
* <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
* </ul>
* </p>
@@ -1292,6 +1387,7 @@ public final class StreamConfigurationMap {
case ImageFormat.JPEG:
case ImageFormat.DEPTH_POINT_CLOUD:
case ImageFormat.DEPTH_JPEG:
+ case ImageFormat.HEIC:
return HAL_PIXEL_FORMAT_BLOB;
case ImageFormat.DEPTH16:
return HAL_PIXEL_FORMAT_Y16;
@@ -1312,6 +1408,7 @@ public final class StreamConfigurationMap {
* <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
* <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
* <li>ImageFormat.DEPTH_JPEG => HAL_DATASPACE_DYNAMIC_DEPTH
+ * <li>ImageFormat.HEIC => HAL_DATASPACE_HEIF
* <li>others => HAL_DATASPACE_UNKNOWN
* </ul>
* </p>
@@ -1343,6 +1440,8 @@ public final class StreamConfigurationMap {
return HAL_DATASPACE_DEPTH;
case ImageFormat.DEPTH_JPEG:
return HAL_DATASPACE_DYNAMIC_DEPTH;
+ case ImageFormat.HEIC:
+ return HAL_DATASPACE_HEIF;
default:
return HAL_DATASPACE_UNKNOWN;
}
@@ -1394,14 +1493,17 @@ public final class StreamConfigurationMap {
!output ? mInputFormats :
dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthOutputFormats :
+ dataspace == HAL_DATASPACE_HEIF ? mHeicOutputFormats :
highRes ? mHighResOutputFormats :
mOutputFormats;
int sizesCount = formatsMap.get(format);
if ( ((!output || (dataspace == HAL_DATASPACE_DEPTH ||
- dataspace == HAL_DATASPACE_DYNAMIC_DEPTH)) && sizesCount == 0) ||
+ dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
+ dataspace == HAL_DATASPACE_HEIF)) && sizesCount == 0) ||
(output && (dataspace != HAL_DATASPACE_DEPTH &&
- dataspace != HAL_DATASPACE_DYNAMIC_DEPTH) &&
+ dataspace != HAL_DATASPACE_DYNAMIC_DEPTH &&
+ dataspace != HAL_DATASPACE_HEIF) &&
mAllOutputFormats.get(format) == 0)) {
// Only throw if this is really not supported at all
throw new IllegalArgumentException("format not available");
@@ -1413,10 +1515,12 @@ public final class StreamConfigurationMap {
StreamConfiguration[] configurations =
(dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
(dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
+ (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
mConfigurations;
StreamConfigurationDuration[] minFrameDurations =
(dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
(dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthMinFrameDurations :
+ (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
mMinFrameDurations;
for (StreamConfiguration config : configurations) {
@@ -1445,7 +1549,8 @@ public final class StreamConfigurationMap {
}
// Dynamic depth streams can have both fast and also high res modes.
- if ((sizeIndex != sizesCount) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH)) {
+ if ((sizeIndex != sizesCount) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
+ dataspace == HAL_DATASPACE_HEIF)) {
if (sizeIndex > sizesCount) {
throw new AssertionError(
@@ -1485,6 +1590,9 @@ public final class StreamConfigurationMap {
// Only one publicly dynamic depth format is available.
formats[i++] = ImageFormat.DEPTH_JPEG;
}
+ if (mHeicOutputFormats.size() > 0) {
+ formats[i++] = ImageFormat.HEIC;
+ }
}
if (formats.length != i) {
throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
@@ -1529,10 +1637,14 @@ public final class StreamConfigurationMap {
case DURATION_MIN_FRAME:
return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
(dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ?
- mDynamicDepthMinFrameDurations : mMinFrameDurations;
+ mDynamicDepthMinFrameDurations :
+ (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
+ mMinFrameDurations;
+
case DURATION_STALL:
return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthStallDurations :
(dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthStallDurations :
+ (dataspace == HAL_DATASPACE_HEIF) ? mHeicStallDurations :
mStallDurations;
default:
throw new IllegalArgumentException("duration was invalid");
@@ -1546,6 +1658,7 @@ public final class StreamConfigurationMap {
if (output) {
size += mDepthOutputFormats.size();
size += mDynamicDepthOutputFormats.size();
+ size += mHeicOutputFormats.size();
}
return size;
@@ -1569,6 +1682,7 @@ public final class StreamConfigurationMap {
StreamConfiguration[] configurations =
(dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
(dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
+ (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
mConfigurations;
for (int i = 0; i < configurations.length; i++) {
@@ -1767,6 +1881,8 @@ public final class StreamConfigurationMap {
return "RAW_DEPTH";
case ImageFormat.PRIVATE:
return "PRIVATE";
+ case ImageFormat.HEIC:
+ return "HEIC";
default:
return "UNKNOWN";
}
@@ -1795,7 +1911,7 @@ public final class StreamConfigurationMap {
private static final int HAL_DATASPACE_DEPTH = 0x1000;
private static final int HAL_DATASPACE_DYNAMIC_DEPTH = 0x1002;
-
+ private static final int HAL_DATASPACE_HEIF = 0x1003;
private static final long DURATION_20FPS_NS = 50000000L;
/**
* @see #getDurations(int, int)
@@ -1815,6 +1931,10 @@ public final class StreamConfigurationMap {
private final StreamConfigurationDuration[] mDynamicDepthMinFrameDurations;
private final StreamConfigurationDuration[] mDynamicDepthStallDurations;
+ private final StreamConfiguration[] mHeicConfigurations;
+ private final StreamConfigurationDuration[] mHeicMinFrameDurations;
+ private final StreamConfigurationDuration[] mHeicStallDurations;
+
private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
private final ReprocessFormatsMap mInputOutputFormatsMap;
@@ -1834,6 +1954,9 @@ public final class StreamConfigurationMap {
private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
/** internal format -> num dynamic depth output sizes mapping, for HAL_DATASPACE_DYNAMIC_DEPTH */
private final SparseIntArray mDynamicDepthOutputFormats = new SparseIntArray();
+ /** internal format -> num heic output sizes mapping, for HAL_DATASPACE_HEIF */
+ private final SparseIntArray mHeicOutputFormats = new SparseIntArray();
+
/** High speed video Size -> FPS range count mapping*/
private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
new HashMap<Size, Integer>();
diff --git a/core/jni/android_view_Surface.cpp b/core/jni/android_view_Surface.cpp
index 67a56ae2b52d..464f24901eb1 100644
--- a/core/jni/android_view_Surface.cpp
+++ b/core/jni/android_view_Surface.cpp
@@ -58,6 +58,8 @@
namespace android {
+using ui::Dataspace;
+
static const char* const OutOfResourcesException =
"android/view/Surface$OutOfResourcesException";
@@ -132,6 +134,7 @@ int android_view_Surface_mapPublicFormatToHalFormat(PublicFormat f) {
case PublicFormat::JPEG:
case PublicFormat::DEPTH_POINT_CLOUD:
case PublicFormat::DEPTH_JPEG:
+ case PublicFormat::HEIC:
return HAL_PIXEL_FORMAT_BLOB;
case PublicFormat::DEPTH16:
return HAL_PIXEL_FORMAT_Y16;
@@ -146,32 +149,44 @@ int android_view_Surface_mapPublicFormatToHalFormat(PublicFormat f) {
android_dataspace android_view_Surface_mapPublicFormatToHalDataspace(
PublicFormat f) {
+ Dataspace dataspace;
switch(f) {
case PublicFormat::JPEG:
- return HAL_DATASPACE_V0_JFIF;
+ dataspace = Dataspace::V0_JFIF;
+ break;
case PublicFormat::DEPTH_POINT_CLOUD:
case PublicFormat::DEPTH16:
case PublicFormat::RAW_DEPTH:
- return HAL_DATASPACE_DEPTH;
+ dataspace = Dataspace::DEPTH;
+ break;
case PublicFormat::RAW_SENSOR:
case PublicFormat::RAW_PRIVATE:
case PublicFormat::RAW10:
case PublicFormat::RAW12:
- return HAL_DATASPACE_ARBITRARY;
+ dataspace = Dataspace::ARBITRARY;
+ break;
case PublicFormat::YUV_420_888:
case PublicFormat::NV21:
case PublicFormat::YV12:
- return HAL_DATASPACE_V0_JFIF;
+ dataspace = Dataspace::V0_JFIF;
+ break;
case PublicFormat::DEPTH_JPEG:
- return static_cast<android_dataspace> (HAL_DATASPACE_DYNAMIC_DEPTH);
+ dataspace = Dataspace::DYNAMIC_DEPTH;
+ break;
+ case PublicFormat::HEIC:
+ dataspace = Dataspace::HEIF;
+ break;
default:
// Most formats map to UNKNOWN
- return HAL_DATASPACE_UNKNOWN;
+ dataspace = Dataspace::UNKNOWN;
+ break;
}
+ return static_cast<android_dataspace>(dataspace);
}
PublicFormat android_view_Surface_mapHalFormatDataspaceToPublicFormat(
int format, android_dataspace dataSpace) {
+ Dataspace ds = static_cast<Dataspace>(dataSpace);
switch(format) {
case HAL_PIXEL_FORMAT_RGBA_8888:
case HAL_PIXEL_FORMAT_RGBX_8888:
@@ -187,8 +202,8 @@ PublicFormat android_view_Surface_mapHalFormatDataspaceToPublicFormat(
// Enums overlap in both name and value
return static_cast<PublicFormat>(format);
case HAL_PIXEL_FORMAT_RAW16:
- switch (dataSpace) {
- case HAL_DATASPACE_DEPTH:
+ switch (ds) {
+ case Dataspace::DEPTH:
return PublicFormat::RAW_DEPTH;
default:
return PublicFormat::RAW_SENSOR;
@@ -210,8 +225,8 @@ PublicFormat android_view_Surface_mapHalFormatDataspaceToPublicFormat(
return PublicFormat::PRIVATE;
case HAL_PIXEL_FORMAT_Y16:
// Dataspace-dependent
- switch (dataSpace) {
- case HAL_DATASPACE_DEPTH:
+ switch (ds) {
+ case Dataspace::DEPTH:
return PublicFormat::DEPTH16;
default:
// Assume non-depth Y16 is just Y16.
@@ -220,11 +235,13 @@ PublicFormat android_view_Surface_mapHalFormatDataspaceToPublicFormat(
break;
case HAL_PIXEL_FORMAT_BLOB:
// Dataspace-dependent
- switch (dataSpace) {
- case HAL_DATASPACE_DEPTH:
+ switch (ds) {
+ case Dataspace::DEPTH:
return PublicFormat::DEPTH_POINT_CLOUD;
- case HAL_DATASPACE_V0_JFIF:
+ case Dataspace::V0_JFIF:
return PublicFormat::JPEG;
+ case Dataspace::HEIF:
+ return PublicFormat::HEIC;
default:
if (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
return PublicFormat::DEPTH_JPEG;
diff --git a/core/jni/include/android_runtime/android_view_Surface.h b/core/jni/include/android_runtime/android_view_Surface.h
index 984e942207c1..3f7c00c9ff01 100644
--- a/core/jni/include/android_runtime/android_view_Surface.h
+++ b/core/jni/include/android_runtime/android_view_Surface.h
@@ -55,10 +55,11 @@ enum class PublicFormat {
DEPTH_POINT_CLOUD = 0x101,
RAW_DEPTH = 0x1002, // @hide
YV12 = 0x32315659,
- Y8 = 0x20203859, // @hide
+ Y8 = 0x20203859,
Y16 = 0x20363159, // @hide
DEPTH16 = 0x44363159,
DEPTH_JPEG = 0x69656963,
+ HEIC = 0x48454946,
};
/* Gets the underlying ANativeWindow for a Surface. */
diff --git a/graphics/java/android/graphics/ImageFormat.java b/graphics/java/android/graphics/ImageFormat.java
index 0787d8518fa5..62647741dcfa 100644
--- a/graphics/java/android/graphics/ImageFormat.java
+++ b/graphics/java/android/graphics/ImageFormat.java
@@ -716,6 +716,14 @@ public class ImageFormat {
public static final int PRIVATE = 0x22;
/**
+ * Compressed HEIC format.
+ *
+ * <p>This format defines the HEIC brand of High Efficiency Image File
+ * Format as described in ISO/IEC 23008-12.</p>
+ */
+ public static final int HEIC = 0x48454946;
+
+ /**
* Use this function to retrieve the number of bits per pixel of an
* ImageFormat.
*
@@ -796,6 +804,7 @@ public class ImageFormat {
case RAW_DEPTH:
case Y8:
case DEPTH_JPEG:
+ case HEIC:
return true;
}
diff --git a/media/java/android/media/Image.java b/media/java/android/media/Image.java
index 26b9b8cf85a7..70a343f4de01 100644
--- a/media/java/android/media/Image.java
+++ b/media/java/android/media/Image.java
@@ -155,6 +155,13 @@ public abstract class Image implements AutoCloseable {
* UnSupportedOperationException being thrown.
* </td>
* </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#HEIC HEIC}</td>
+ * <td>1</td>
+ * <td>Compressed data, so row and pixel strides are 0. To uncompress, use
+ * {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
+ * </td>
+ * </tr>
* </table>
*
* @see android.graphics.ImageFormat
diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java
index 60ef1d93191a..6116429ae561 100644
--- a/media/java/android/media/ImageReader.java
+++ b/media/java/android/media/ImageReader.java
@@ -821,6 +821,7 @@ public class ImageReader implements AutoCloseable {
case ImageFormat.DEPTH_POINT_CLOUD:
case ImageFormat.RAW_PRIVATE:
case ImageFormat.DEPTH_JPEG:
+ case ImageFormat.HEIC:
width = ImageReader.this.getWidth();
break;
default:
@@ -838,6 +839,7 @@ public class ImageReader implements AutoCloseable {
case ImageFormat.DEPTH_POINT_CLOUD:
case ImageFormat.RAW_PRIVATE:
case ImageFormat.DEPTH_JPEG:
+ case ImageFormat.HEIC:
height = ImageReader.this.getHeight();
break;
default:
diff --git a/media/java/android/media/ImageUtils.java b/media/java/android/media/ImageUtils.java
index b77a884d3412..d8a0bb334c53 100644
--- a/media/java/android/media/ImageUtils.java
+++ b/media/java/android/media/ImageUtils.java
@@ -36,8 +36,8 @@ class ImageUtils {
* {@link android.graphics.PixelFormat PixelFormat} are supported by
* ImageReader. When reading RGB data from a surface, the formats defined in
* {@link android.graphics.PixelFormat PixelFormat} can be used; when
- * reading YUV, JPEG or raw sensor data (for example, from the camera or video
- * decoder), formats from {@link android.graphics.ImageFormat ImageFormat}
+ * reading YUV, JPEG, HEIC or raw sensor data (for example, from the camera
+ * or video decoder), formats from {@link android.graphics.ImageFormat ImageFormat}
* are used.
*/
public static int getNumPlanesForFormat(int format) {
@@ -64,6 +64,7 @@ class ImageUtils {
case ImageFormat.DEPTH_POINT_CLOUD:
case ImageFormat.RAW_DEPTH:
case ImageFormat.DEPTH_JPEG:
+ case ImageFormat.HEIC:
return 1;
case ImageFormat.PRIVATE:
return 0;
@@ -194,6 +195,7 @@ class ImageUtils {
case ImageFormat.JPEG:
case ImageFormat.DEPTH_POINT_CLOUD:
case ImageFormat.DEPTH_JPEG:
+ case ImageFormat.HEIC:
estimatedBytePerPixel = 0.3;
break;
case ImageFormat.Y8:
@@ -262,6 +264,7 @@ class ImageUtils {
case ImageFormat.RAW10:
case ImageFormat.RAW12:
case ImageFormat.RAW_DEPTH:
+ case ImageFormat.HEIC:
return new Size(image.getWidth(), image.getHeight());
case ImageFormat.PRIVATE:
return new Size(0, 0);
diff --git a/media/jni/android_media_Utils.cpp b/media/jni/android_media_Utils.cpp
index 458d8471dafd..01baadb2f024 100644
--- a/media/jni/android_media_Utils.cpp
+++ b/media/jni/android_media_Utils.cpp
@@ -29,6 +29,9 @@
#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
+// Must be in sync with the value in HeicCompositeStream.cpp
+#define CAMERA3_HEIC_BLOB_ID 0x00FE
+
namespace android {
AssetStream::AssetStream(SkStream* stream)
@@ -609,34 +612,35 @@ bool isPossiblyYUV(PixelFormat format) {
}
}
-uint32_t Image_getJpegSize(LockedImage* buffer, bool usingRGBAOverride) {
+uint32_t Image_getBlobSize(LockedImage* buffer, bool usingRGBAOverride) {
ALOGV("%s", __FUNCTION__);
LOG_ALWAYS_FATAL_IF(buffer == NULL, "Input buffer is NULL!!!");
uint32_t size = 0;
uint32_t width = buffer->width;
- uint8_t* jpegBuffer = buffer->data;
+ uint8_t* blobBuffer = buffer->data;
if (usingRGBAOverride) {
width = (buffer->width + buffer->stride * (buffer->height - 1)) * 4;
}
- // First check for JPEG transport header at the end of the buffer
- uint8_t* header = jpegBuffer + (width - sizeof(struct camera3_jpeg_blob));
+ // First check for BLOB transport header at the end of the buffer
+ uint8_t* header = blobBuffer + (width - sizeof(struct camera3_jpeg_blob));
struct camera3_jpeg_blob *blob = (struct camera3_jpeg_blob*)(header);
- if (blob->jpeg_blob_id == CAMERA3_JPEG_BLOB_ID) {
+ if (blob->jpeg_blob_id == CAMERA3_JPEG_BLOB_ID ||
+ blob->jpeg_blob_id == CAMERA3_HEIC_BLOB_ID) {
size = blob->jpeg_size;
- ALOGV("%s: Jpeg size = %d", __FUNCTION__, size);
+ ALOGV("%s: Jpeg/Heic size = %d", __FUNCTION__, size);
}
// failed to find size, default to whole buffer
if (size == 0) {
/*
- * This is a problem because not including the JPEG header
- * means that in certain rare situations a regular JPEG blob
+ * This is a problem because not including the JPEG/BLOB header
+ * means that in certain rare situations a regular JPEG/HEIC blob
* will be mis-identified as having a header, in which case
* we will get a garbage size value.
*/
- ALOGW("%s: No JPEG header detected, defaulting to size=width=%d",
+ ALOGW("%s: No JPEG/HEIC header detected, defaulting to size=width=%d",
__FUNCTION__, width);
size = width;
}
@@ -760,7 +764,7 @@ status_t getLockedImageInfo(LockedImage* buffer, int idx,
pData = buffer->data;
- dataSize = Image_getJpegSize(buffer, usingRGBAOverride);
+ dataSize = Image_getBlobSize(buffer, usingRGBAOverride);
pStride = 0;
rStride = 0;
break;
diff --git a/media/jni/android_media_Utils.h b/media/jni/android_media_Utils.h
index 821c6b25c333..19c1b88f78e8 100644
--- a/media/jni/android_media_Utils.h
+++ b/media/jni/android_media_Utils.h
@@ -119,7 +119,7 @@ bool usingRGBAToJpegOverride(int32_t imageFormat, int32_t containerFormat);
int32_t applyFormatOverrides(int32_t imageFormat, int32_t containerFormat);
-uint32_t Image_getJpegSize(LockedImage* buffer, bool usingRGBAOverride);
+uint32_t Image_getBlobSize(LockedImage* buffer, bool usingRGBAOverride);
bool isFormatOpaque(int format);