diff options
| -rw-r--r-- | core/api/current.txt | 2 | ||||
| -rw-r--r-- | core/java/android/hardware/DataSpace.java | 14 | ||||
| -rw-r--r-- | core/java/android/hardware/camera2/CameraCharacteristics.java | 109 | ||||
| -rw-r--r-- | core/java/android/hardware/camera2/CameraDevice.java | 22 | ||||
| -rw-r--r-- | core/java/android/hardware/camera2/impl/CameraMetadataNative.java | 26 | ||||
| -rw-r--r-- | core/java/android/hardware/camera2/params/StreamConfigurationMap.java | 88 | ||||
| -rw-r--r-- | graphics/java/android/graphics/ImageFormat.java | 12 | ||||
| -rw-r--r-- | media/java/android/media/ImageReader.java | 2 | ||||
| -rw-r--r-- | media/java/android/media/ImageUtils.java | 3 |
9 files changed, 274 insertions, 4 deletions
diff --git a/core/api/current.txt b/core/api/current.txt index addca77b3941..d7e762afa331 100644 --- a/core/api/current.txt +++ b/core/api/current.txt @@ -15048,6 +15048,7 @@ package android.graphics { field public static final int FLEX_RGB_888 = 41; // 0x29 field public static final int HEIC = 1212500294; // 0x48454946 field public static final int JPEG = 256; // 0x100 + field public static final int JPEG_R = 4101; // 0x1005 field public static final int NV16 = 16; // 0x10 field public static final int NV21 = 17; // 0x11 field public static final int PRIVATE = 34; // 0x22 @@ -17229,6 +17230,7 @@ package android.hardware { field public static final int DATASPACE_DYNAMIC_DEPTH = 4098; // 0x1002 field public static final int DATASPACE_HEIF = 4100; // 0x1004 field public static final int DATASPACE_JFIF = 146931712; // 0x8c20000 + field public static final int DATASPACE_JPEG_R = 4101; // 0x1005 field public static final int DATASPACE_SCRGB = 411107328; // 0x18810000 field public static final int DATASPACE_SCRGB_LINEAR = 406913024; // 0x18410000 field public static final int DATASPACE_SRGB = 142671872; // 0x8810000 diff --git a/core/java/android/hardware/DataSpace.java b/core/java/android/hardware/DataSpace.java index 15eae0920e7d..0a145746d303 100644 --- a/core/java/android/hardware/DataSpace.java +++ b/core/java/android/hardware/DataSpace.java @@ -408,6 +408,19 @@ public final class DataSpace { */ public static final int DATASPACE_HEIF = 4100; + /** + * ISO/IEC TBD + * + * JPEG image with embedded recovery map following the Jpeg/R specification. + * + * <p>This value must always remain aligned with the public ImageFormat Jpeg/R definition and is + * valid with formats: + * HAL_PIXEL_FORMAT_BLOB: JPEG image encoded by Jpeg/R encoder according to ISO/IEC TBD. + * The image contains a standard SDR JPEG and a recovery map. Jpeg/R decoders can use the + * map to recover the input image.</p> + */ + public static final int DATASPACE_JPEG_R = 4101; + /** @hide */ @Retention(RetentionPolicy.SOURCE) @IntDef(flag = true, value = { @@ -626,6 +639,7 @@ public final class DataSpace { DATASPACE_DEPTH, DATASPACE_DYNAMIC_DEPTH, DATASPACE_HEIF, + DATASPACE_JPEG_R, DATASPACE_UNKNOWN, DATASPACE_SCRGB_LINEAR, DATASPACE_SRGB, diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java index 6e72b5f291f0..a6f7e945bef8 100644 --- a/core/java/android/hardware/camera2/CameraCharacteristics.java +++ b/core/java/android/hardware/camera2/CameraCharacteristics.java @@ -5562,6 +5562,115 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri public static final Key<Integer> AUTOMOTIVE_LOCATION = new Key<Integer>("android.automotive.location", int.class); + /** + * <p>The available Jpeg/R stream + * configurations that this camera device supports + * (i.e. format, width, height, output/input stream).</p> + * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p> + * <p>If the camera device supports Jpeg/R, it will support the same stream combinations with + * Jpeg/R as it does with P010. The stream combinations with Jpeg/R (or P010) supported + * by the device is determined by the device's hardware level and capabilities.</p> + * <p>All the static, control, and dynamic metadata tags related to JPEG apply to Jpeg/R formats. + * Configuring JPEG and Jpeg/R streams at the same time is not supported.</p> + * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p> + * <p><b>Limited capability</b> - + * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the + * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p> + * + * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL + * @hide + */ + public static final Key<android.hardware.camera2.params.StreamConfiguration[]> JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS = + new Key<android.hardware.camera2.params.StreamConfiguration[]>("android.jpegr.availableJpegRStreamConfigurations", android.hardware.camera2.params.StreamConfiguration[].class); + + /** + * <p>This lists the minimum frame duration for each + * format/size combination for Jpeg/R output formats.</p> + * <p>This should correspond to the frame duration when only that + * stream is active, with all processing (typically in android.*.mode) + * set to either OFF or FAST.</p> + * <p>When multiple streams are used in a request, the minimum frame + * duration will be max(individual stream min durations).</p> + * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and + * android.scaler.availableStallDurations for more details about + * calculating the max frame rate.</p> + * <p><b>Units</b>: (format, width, height, ns) x n</p> + * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p> + * <p><b>Limited capability</b> - + * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the + * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p> + * + * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL + * @see CaptureRequest#SENSOR_FRAME_DURATION + * @hide + */ + public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS = + new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.jpegr.availableJpegRMinFrameDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class); + + /** + * <p>This lists the maximum stall duration for each + * output format/size combination for Jpeg/R streams.</p> + * <p>A stall duration is how much extra time would get added + * to the normal minimum frame duration for a repeating request + * that has streams with non-zero stall.</p> + * <p>This functions similarly to + * android.scaler.availableStallDurations for Jpeg/R + * streams.</p> + * <p>All Jpeg/R output stream formats may have a nonzero stall + * duration.</p> + * <p><b>Units</b>: (format, width, height, ns) x n</p> + * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p> + * <p><b>Limited capability</b> - + * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the + * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p> + * + * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL + * @hide + */ + public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS = + new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.jpegr.availableJpegRStallDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class); + + /** + * <p>The available Jpeg/R stream + * configurations that this camera device supports + * (i.e. format, width, height, output/input stream).</p> + * <p>Refer to android.jpegr.availableJpegRStreamConfigurations for details.</p> + * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p> + * @hide + */ + public static final Key<android.hardware.camera2.params.StreamConfiguration[]> JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = + new Key<android.hardware.camera2.params.StreamConfiguration[]>("android.jpegr.availableJpegRStreamConfigurationsMaximumResolution", android.hardware.camera2.params.StreamConfiguration[].class); + + /** + * <p>This lists the minimum frame duration for each + * format/size combination for Jpeg/R output formats for CaptureRequests where + * {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode} is set to + * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION }.</p> + * <p>Refer to android.jpegr.availableJpegRMinFrameDurations for details.</p> + * <p><b>Units</b>: (format, width, height, ns) x n</p> + * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p> + * + * @see CaptureRequest#SENSOR_PIXEL_MODE + * @hide + */ + public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = + new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.jpegr.availableJpegRMinFrameDurationsMaximumResolution", android.hardware.camera2.params.StreamConfigurationDuration[].class); + + /** + * <p>This lists the maximum stall duration for each + * output format/size combination for Jpeg/R streams for CaptureRequests where + * {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode} is set to + * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION }.</p> + * <p>Refer to android.jpegr.availableJpegRStallDurations for details.</p> + * <p><b>Units</b>: (format, width, height, ns) x n</p> + * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p> + * + * @see CaptureRequest#SENSOR_PIXEL_MODE + * @hide + */ + public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION = + new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.jpegr.availableJpegRStallDurationsMaximumResolution", android.hardware.camera2.params.StreamConfigurationDuration[].class); + /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ * End generated code *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ diff --git a/core/java/android/hardware/camera2/CameraDevice.java b/core/java/android/hardware/camera2/CameraDevice.java index 10a7538cf488..bf2e56359785 100644 --- a/core/java/android/hardware/camera2/CameraDevice.java +++ b/core/java/android/hardware/camera2/CameraDevice.java @@ -859,6 +859,28 @@ public abstract class CameraDevice implements AutoCloseable { * format {@link android.graphics.ImageFormat#YUV_420_888} with a 10-bit profile * will cause a capture session initialization failure. * </p> + * <p>{@link android.graphics.ImageFormat#JPEG_R} may also be supported if advertised by + * {@link android.hardware.camera2.params.StreamConfigurationMap}. When initializing a capture + * session that includes a Jpeg/R camera output clients must consider the following items w.r.t. + * the 10-bit mandatory stream combination table: + * + * <ul> + * <li>To generate the compressed Jpeg/R image a single + * {@link android.graphics.ImageFormat#YCBCR_P010} output will be used internally by + * the camera device.</li> + * <li>On camera devices that are able to support concurrent 10 and 8-bit capture requests + * see {@link android.hardware.camera2.params.DynamicRangeProfiles#getProfileCaptureRequestConstraints} + * an extra {@link android.graphics.ImageFormat#JPEG} will also + * be configured internally to help speed up the encoding process.</li> + * </ul> + * + * Jpeg/R camera outputs will typically be able to support the MAXIMUM device resolution. + * Clients can also call {@link StreamConfigurationMap#getOutputSizes(int)} for a complete list + * supported sizes. + * Camera clients that register a Jpeg/R output within a stream combination that doesn't fit + * in the mandatory stream table above can call + * {@link CameraDevice#isSessionConfigurationSupported} to ensure that this particular + * configuration is supported.</p> * * <p>Devices with the STREAM_USE_CASE capability ({@link * CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES} includes {@link diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java index 012fad52fddf..9a164743bc8d 100644 --- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java +++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java @@ -1351,6 +1351,9 @@ public class CameraMetadataNative implements Parcelable { /*heicconfiguration*/ null, /*heicminduration*/ null, /*heicstallduration*/ null, + /*jpegRconfiguration*/ null, + /*jpegRminduration*/ null, + /*jpegRstallduration*/ null, /*highspeedvideoconfigurations*/ null, /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); break; @@ -1365,6 +1368,9 @@ public class CameraMetadataNative implements Parcelable { /*heicconfiguration*/ null, /*heicminduration*/ null, /*heicstallduration*/ null, + /*jpegRconfiguration*/ null, + /*jpegRminduration*/ null, + /*jpegRstallduration*/ null, highSpeedVideoConfigurations, /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); break; @@ -1379,6 +1385,9 @@ public class CameraMetadataNative implements Parcelable { /*heicconfiguration*/ null, /*heicminduration*/ null, /*heicstallduration*/ null, + /*jpegRconfiguration*/ null, + /*jpegRminduration*/ null, + /*jpegRstallduration*/ null, /*highSpeedVideoConfigurations*/ null, inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); break; @@ -1393,6 +1402,9 @@ public class CameraMetadataNative implements Parcelable { /*heicconfiguration*/ null, /*heicminduration*/ null, /*heicstallduration*/ null, + /*jpegRconfiguration*/ null, + /*jpegRminduration*/ null, + /*jpegRstallduration*/ null, /*highSpeedVideoConfigurations*/ null, /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); } @@ -1546,6 +1558,12 @@ public class CameraMetadataNative implements Parcelable { CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); StreamConfigurationDuration[] heicStallDurations = getBase( CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); + StreamConfiguration[] jpegRConfigurations = getBase( + CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS); + StreamConfigurationDuration[] jpegRMinFrameDurations = getBase( + CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS); + StreamConfigurationDuration[] jpegRStallDurations = getBase( + CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS); HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); ReprocessFormatsMap inputOutputFormatsMap = getBase( @@ -1557,6 +1575,7 @@ public class CameraMetadataNative implements Parcelable { dynamicDepthConfigurations, dynamicDepthMinFrameDurations, dynamicDepthStallDurations, heicConfigurations, heicMinFrameDurations, heicStallDurations, + jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution); } @@ -1589,6 +1608,12 @@ public class CameraMetadataNative implements Parcelable { CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); StreamConfigurationDuration[] heicStallDurations = getBase( CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION); + StreamConfiguration[] jpegRConfigurations = getBase( + CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); + StreamConfigurationDuration[] jpegRMinFrameDurations = getBase( + CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); + StreamConfigurationDuration[] jpegRStallDurations = getBase( + CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION); HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION); ReprocessFormatsMap inputOutputFormatsMap = getBase( @@ -1601,6 +1626,7 @@ public class CameraMetadataNative implements Parcelable { dynamicDepthConfigurations, dynamicDepthMinFrameDurations, dynamicDepthStallDurations, heicConfigurations, heicMinFrameDurations, heicStallDurations, + jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution, false); } diff --git a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java index 5981d279227d..cb678b98a998 100644 --- a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java +++ b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java @@ -26,6 +26,7 @@ import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.utils.HashCodeHelpers; import android.hardware.camera2.utils.SurfaceUtils; +import android.util.Log; import android.util.Range; import android.util.Size; import android.util.SparseIntArray; @@ -95,6 +96,11 @@ public final class StreamConfigurationMap { * {@link StreamConfigurationDuration} * @param heicStallDurations a non-{@code null} array of heic * {@link StreamConfigurationDuration} + * @param jpegRConfigurations a non-{@code null} array of Jpeg/R {@link StreamConfiguration} + * @param jpegRMinFrameDurations a non-{@code null} array of Jpeg/R + * {@link StreamConfigurationDuration} + * @param jpegRStallDurations a non-{@code null} array of Jpeg/R + * {@link StreamConfigurationDuration} * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if * camera device does not support high speed video recording * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE @@ -117,6 +123,9 @@ public final class StreamConfigurationMap { StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, + StreamConfiguration[] jpegRConfigurations, + StreamConfigurationDuration[] jpegRMinFrameDurations, + StreamConfigurationDuration[] jpegRStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution) { @@ -125,6 +134,7 @@ public final class StreamConfigurationMap { dynamicDepthConfigurations, dynamicDepthMinFrameDurations, dynamicDepthStallDurations, heicConfigurations, heicMinFrameDurations, heicStallDurations, + jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution, /*enforceImplementationDefined*/ true); } @@ -154,6 +164,11 @@ public final class StreamConfigurationMap { * {@link StreamConfigurationDuration} * @param heicStallDurations a non-{@code null} array of heic * {@link StreamConfigurationDuration} + * @param jpegRConfigurations a non-{@code null} array of Jpeg/R {@link StreamConfiguration} + * @param jpegRMinFrameDurations a non-{@code null} array of Jpeg/R + * {@link StreamConfigurationDuration} + * @param jpegRStallDurations a non-{@code null} array of Jpeg/R + * {@link StreamConfigurationDuration} * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if * camera device does not support high speed video recording * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE @@ -178,6 +193,9 @@ public final class StreamConfigurationMap { StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, + StreamConfiguration[] jpegRConfigurations, + StreamConfigurationDuration[] jpegRMinFrameDurations, + StreamConfigurationDuration[] jpegRStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution, @@ -242,6 +260,20 @@ public final class StreamConfigurationMap { "heicStallDurations"); } + + if (jpegRConfigurations == null) { + mJpegRConfigurations = new StreamConfiguration[0]; + mJpegRMinFrameDurations = new StreamConfigurationDuration[0]; + mJpegRStallDurations = new StreamConfigurationDuration[0]; + } else { + mJpegRConfigurations = checkArrayElementsNotNull(jpegRConfigurations, + "jpegRConfigurations"); + mJpegRMinFrameDurations = checkArrayElementsNotNull(jpegRMinFrameDurations, + "jpegRFrameDurations"); + mJpegRStallDurations = checkArrayElementsNotNull(jpegRStallDurations, + "jpegRStallDurations"); + } + if (highSpeedVideoConfigurations == null) { mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0]; } else { @@ -305,6 +337,17 @@ public final class StreamConfigurationMap { mHeicOutputFormats.get(config.getFormat()) + 1); } + // For each Jpeg/R format, track how many sizes there are available to configure + for (StreamConfiguration config : mJpegRConfigurations) { + if (!config.isOutput()) { + // Ignoring input Jpeg/R configs + continue; + } + + mJpegROutputFormats.put(config.getFormat(), + mJpegROutputFormats.get(config.getFormat()) + 1); + } + if (configurations != null && enforceImplementationDefined && mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) { throw new AssertionError( @@ -447,6 +490,8 @@ public final class StreamConfigurationMap { return mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0; } else if (dataspace == HAL_DATASPACE_HEIF) { return mHeicOutputFormats.indexOfKey(internalFormat) >= 0; + } else if (dataspace == HAL_DATASPACE_JPEG_R) { + return mJpegROutputFormats.indexOfKey(internalFormat) >= 0; } else { return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0; } @@ -561,6 +606,7 @@ public final class StreamConfigurationMap { surfaceDataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations : surfaceDataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations : surfaceDataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations : + surfaceDataspace == HAL_DATASPACE_JPEG_R ? mJpegRConfigurations : mConfigurations; for (StreamConfiguration config : configs) { if (config.getFormat() == surfaceFormat && config.isOutput()) { @@ -597,6 +643,7 @@ public final class StreamConfigurationMap { dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations : dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations : dataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations : + dataspace == HAL_DATASPACE_JPEG_R ? mJpegRConfigurations : mConfigurations; for (StreamConfiguration config : configs) { if ((config.getFormat() == internalFormat) && config.isOutput() && @@ -1120,6 +1167,9 @@ public final class StreamConfigurationMap { Arrays.equals(mHeicConfigurations, other.mHeicConfigurations) && Arrays.equals(mHeicMinFrameDurations, other.mHeicMinFrameDurations) && Arrays.equals(mHeicStallDurations, other.mHeicStallDurations) && + Arrays.equals(mJpegRConfigurations, other.mJpegRConfigurations) && + Arrays.equals(mJpegRMinFrameDurations, other.mJpegRMinFrameDurations) && + Arrays.equals(mJpegRStallDurations, other.mJpegRStallDurations) && Arrays.equals(mHighSpeedVideoConfigurations, other.mHighSpeedVideoConfigurations); } @@ -1138,6 +1188,7 @@ public final class StreamConfigurationMap { mDynamicDepthConfigurations, mDynamicDepthMinFrameDurations, mDynamicDepthStallDurations, mHeicConfigurations, mHeicMinFrameDurations, mHeicStallDurations, + mJpegRConfigurations, mJpegRMinFrameDurations, mJpegRStallDurations, mHighSpeedVideoConfigurations); } @@ -1161,6 +1212,10 @@ public final class StreamConfigurationMap { if (mHeicOutputFormats.indexOfKey(internalFormat) >= 0) { return format; } + } else if (internalDataspace == HAL_DATASPACE_JPEG_R) { + if (mJpegROutputFormats.indexOfKey(internalFormat) >= 0) { + return format; + } } else { if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) { return format; @@ -1365,6 +1420,7 @@ public final class StreamConfigurationMap { * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB * <li>ImageFormat.DEPTH_JPEG => HAL_PIXEL_FORMAT_BLOB * <li>ImageFormat.HEIC => HAL_PIXEL_FORMAT_BLOB + * <li>ImageFormat.JPEG_R => HAL_PIXEL_FORMAT_BLOB * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16 * </ul> * </p> @@ -1391,6 +1447,7 @@ public final class StreamConfigurationMap { case ImageFormat.DEPTH_POINT_CLOUD: case ImageFormat.DEPTH_JPEG: case ImageFormat.HEIC: + case ImageFormat.JPEG_R: return HAL_PIXEL_FORMAT_BLOB; case ImageFormat.DEPTH16: return HAL_PIXEL_FORMAT_Y16; @@ -1414,6 +1471,7 @@ public final class StreamConfigurationMap { * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH * <li>ImageFormat.DEPTH_JPEG => HAL_DATASPACE_DYNAMIC_DEPTH * <li>ImageFormat.HEIC => HAL_DATASPACE_HEIF + * <li>ImageFormat.JPEG_R => HAL_DATASPACE_JPEG_R * <li>others => HAL_DATASPACE_UNKNOWN * </ul> * </p> @@ -1448,6 +1506,8 @@ public final class StreamConfigurationMap { return HAL_DATASPACE_DYNAMIC_DEPTH; case ImageFormat.HEIC: return HAL_DATASPACE_HEIF; + case ImageFormat.JPEG_R: + return HAL_DATASPACE_JPEG_R; default: return HAL_DATASPACE_UNKNOWN; } @@ -1500,14 +1560,15 @@ public final class StreamConfigurationMap { dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats : dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthOutputFormats : dataspace == HAL_DATASPACE_HEIF ? mHeicOutputFormats : + dataspace == HAL_DATASPACE_JPEG_R ? mJpegROutputFormats : highRes ? mHighResOutputFormats : mOutputFormats; int sizesCount = formatsMap.get(format); - if ( ((!output || (dataspace == HAL_DATASPACE_DEPTH || + if ( ((!output || (dataspace == HAL_DATASPACE_DEPTH || dataspace == HAL_DATASPACE_JPEG_R || dataspace == HAL_DATASPACE_DYNAMIC_DEPTH || dataspace == HAL_DATASPACE_HEIF)) && sizesCount == 0) || - (output && (dataspace != HAL_DATASPACE_DEPTH && + (output && (dataspace != HAL_DATASPACE_DEPTH && dataspace != HAL_DATASPACE_JPEG_R && dataspace != HAL_DATASPACE_DYNAMIC_DEPTH && dataspace != HAL_DATASPACE_HEIF) && mAllOutputFormats.get(format) == 0)) { @@ -1521,11 +1582,13 @@ public final class StreamConfigurationMap { (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations : (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations : + (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRConfigurations : mConfigurations; StreamConfigurationDuration[] minFrameDurations = (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations : (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthMinFrameDurations : (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations : + (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRMinFrameDurations : mMinFrameDurations; for (StreamConfiguration config : configurations) { @@ -1555,7 +1618,7 @@ public final class StreamConfigurationMap { // Dynamic depth streams can have both fast and also high res modes. if ((sizeIndex != sizesCount) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH || - dataspace == HAL_DATASPACE_HEIF)) { + dataspace == HAL_DATASPACE_HEIF) || (dataspace == HAL_DATASPACE_JPEG_R)) { if (sizeIndex > sizesCount) { throw new AssertionError( @@ -1598,6 +1661,9 @@ public final class StreamConfigurationMap { if (mHeicOutputFormats.size() > 0) { formats[i++] = ImageFormat.HEIC; } + if (mJpegROutputFormats.size() > 0) { + formats[i++] = ImageFormat.JPEG_R; + } } if (formats.length != i) { throw new AssertionError("Too few formats " + i + ", expected " + formats.length); @@ -1644,12 +1710,14 @@ public final class StreamConfigurationMap { (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthMinFrameDurations : (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations : + (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRMinFrameDurations : mMinFrameDurations; case DURATION_STALL: return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthStallDurations : (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthStallDurations : (dataspace == HAL_DATASPACE_HEIF) ? mHeicStallDurations : + (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRStallDurations : mStallDurations; default: throw new IllegalArgumentException("duration was invalid"); @@ -1664,6 +1732,7 @@ public final class StreamConfigurationMap { size += mDepthOutputFormats.size(); size += mDynamicDepthOutputFormats.size(); size += mHeicOutputFormats.size(); + size += mJpegROutputFormats.size(); } return size; @@ -1688,6 +1757,7 @@ public final class StreamConfigurationMap { (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations : (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations : + (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRConfigurations : mConfigurations; for (int i = 0; i < configurations.length; i++) { @@ -1908,6 +1978,8 @@ public final class StreamConfigurationMap { return "PRIVATE"; case ImageFormat.HEIC: return "HEIC"; + case ImageFormat.JPEG_R: + return "JPEG/R"; default: return "UNKNOWN"; } @@ -1948,6 +2020,10 @@ public final class StreamConfigurationMap { * @hide */ public static final int HAL_DATASPACE_HEIF = 0x1003; + /** + * @hide + */ + public static final int HAL_DATASPACE_JPEG_R = 0x1005; private static final long DURATION_20FPS_NS = 50000000L; /** * @see #getDurations(int, int) @@ -1971,6 +2047,10 @@ public final class StreamConfigurationMap { private final StreamConfigurationDuration[] mHeicMinFrameDurations; private final StreamConfigurationDuration[] mHeicStallDurations; + private final StreamConfiguration[] mJpegRConfigurations; + private final StreamConfigurationDuration[] mJpegRMinFrameDurations; + private final StreamConfigurationDuration[] mJpegRStallDurations; + private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations; private final ReprocessFormatsMap mInputOutputFormatsMap; @@ -1992,6 +2072,8 @@ public final class StreamConfigurationMap { private final SparseIntArray mDynamicDepthOutputFormats = new SparseIntArray(); /** internal format -> num heic output sizes mapping, for HAL_DATASPACE_HEIF */ private final SparseIntArray mHeicOutputFormats = new SparseIntArray(); + /** internal format -> num Jpeg/R output sizes mapping, for HAL_DATASPACE_JPEG_R */ + private final SparseIntArray mJpegROutputFormats = new SparseIntArray(); /** High speed video Size -> FPS range count mapping*/ private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap = diff --git a/graphics/java/android/graphics/ImageFormat.java b/graphics/java/android/graphics/ImageFormat.java index 68f29278f282..88373e80240a 100644 --- a/graphics/java/android/graphics/ImageFormat.java +++ b/graphics/java/android/graphics/ImageFormat.java @@ -60,7 +60,8 @@ public class ImageFormat { RAW_DEPTH, RAW_DEPTH10, PRIVATE, - HEIC + HEIC, + JPEG_R }) public @interface Format { } @@ -258,6 +259,14 @@ public class ImageFormat { public static final int DEPTH_JPEG = 0x69656963; /** + * Compressed JPEG format that includes an embedded recovery map. + * + * <p>JPEG compressed main image along with XMP embedded recovery map + * following ISO TBD.</p> + */ + public static final int JPEG_R = 0x1005; + + /** * <p>Multi-plane Android YUV 420 format</p> * * <p>This format is a generic YCbCr format, capable of describing any 4:2:0 @@ -886,6 +895,7 @@ public class ImageFormat { case Y8: case DEPTH_JPEG: case HEIC: + case JPEG_R: return true; } diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java index f223bfd357d6..72aaa3554ddb 100644 --- a/media/java/android/media/ImageReader.java +++ b/media/java/android/media/ImageReader.java @@ -1199,6 +1199,7 @@ public class ImageReader implements AutoCloseable { case ImageFormat.RAW_PRIVATE: case ImageFormat.DEPTH_JPEG: case ImageFormat.HEIC: + case ImageFormat.JPEG_R: width = ImageReader.this.getWidth(); break; default: @@ -1217,6 +1218,7 @@ public class ImageReader implements AutoCloseable { case ImageFormat.RAW_PRIVATE: case ImageFormat.DEPTH_JPEG: case ImageFormat.HEIC: + case ImageFormat.JPEG_R: height = ImageReader.this.getHeight(); break; default: diff --git a/media/java/android/media/ImageUtils.java b/media/java/android/media/ImageUtils.java index 2f1a36cba9d0..8f7019d4e494 100644 --- a/media/java/android/media/ImageUtils.java +++ b/media/java/android/media/ImageUtils.java @@ -68,6 +68,7 @@ class ImageUtils { case ImageFormat.RAW_DEPTH10: case ImageFormat.DEPTH_JPEG: case ImageFormat.HEIC: + case ImageFormat.JPEG_R: return 1; case ImageFormat.PRIVATE: return 0; @@ -231,6 +232,7 @@ class ImageUtils { case ImageFormat.DEPTH_POINT_CLOUD: case ImageFormat.DEPTH_JPEG: case ImageFormat.HEIC: + case ImageFormat.JPEG_R: estimatedBytePerPixel = 0.3; break; case ImageFormat.Y8: @@ -304,6 +306,7 @@ class ImageUtils { case ImageFormat.RAW_DEPTH: case ImageFormat.RAW_DEPTH10: case ImageFormat.HEIC: + case ImageFormat.JPEG_R: return new Size(image.getWidth(), image.getHeight()); case ImageFormat.PRIVATE: return new Size(0, 0); |