summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--api/current.txt5
-rw-r--r--core/java/android/hardware/camera2/CameraCharacteristics.java188
-rw-r--r--core/java/android/hardware/camera2/CameraMetadata.java14
-rw-r--r--core/java/android/hardware/camera2/CaptureRequest.java77
-rw-r--r--core/java/android/hardware/camera2/CaptureResult.java77
5 files changed, 263 insertions, 98 deletions
diff --git a/api/current.txt b/api/current.txt
index 9ade8295b10e..d7b4ef588105 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -11286,8 +11286,11 @@ package android.hardware.camera2 {
field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_JPEG_MIN_DURATIONS;
field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_JPEG_SIZES;
field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_MAX_DIGITAL_ZOOM;
+ field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS;
field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_PROCESSED_SIZES;
+ field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_STALL_DURATIONS;
+ field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_AVAILABLE_TEST_PATTERN_MODES;
field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_BASE_GAIN_FACTOR;
field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_BLACK_LEVEL_PATTERN;
@@ -11480,6 +11483,8 @@ package android.hardware.camera2 {
field public static final int REQUEST_AVAILABLE_CAPABILITIES_DNG = 5; // 0x5
field public static final int REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR = 2; // 0x2
field public static final int REQUEST_AVAILABLE_CAPABILITIES_ZSL = 4; // 0x4
+ field public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT = 1; // 0x1
+ field public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0; // 0x0
field public static final int SENSOR_TEST_PATTERN_MODE_COLOR_BARS = 2; // 0x2
field public static final int SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY = 3; // 0x3
field public static final int SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256; // 0x100
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index f975dcd8a623..3672de4f8d00 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -284,16 +284,16 @@ public final class CameraCharacteristics extends CameraMetadata {
* <li>The sizes will be sorted by increasing pixel area (width x height).
* If several resolutions have the same area, they will be sorted by increasing width.</li>
* <li>The aspect ratio of the largest thumbnail size will be same as the
- * aspect ratio of largest size in {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES android.scaler.availableJpegSizes}.
+ * aspect ratio of largest JPEG output size in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations}.
* The largest size is defined as the size that has the largest pixel area
* in a given size list.</li>
- * <li>Each size in {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES android.scaler.availableJpegSizes} will have at least
+ * <li>Each output JPEG size in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations} will have at least
* one corresponding size that has the same aspect ratio in availableThumbnailSizes,
* and vice versa.</li>
* <li>All non (0, 0) sizes will have non-zero widths and heights.</li>
* </ul>
*
- * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
*/
public static final Key<android.hardware.camera2.Size[]> JPEG_AVAILABLE_THUMBNAIL_SIZES =
new Key<android.hardware.camera2.Size[]>("android.jpeg.availableThumbnailSizes", android.hardware.camera2.Size[].class);
@@ -707,15 +707,195 @@ public final class CameraCharacteristics extends CameraMetadata {
* </table>
* <p>For ZSL-capable camera devices, using the RAW_OPAQUE format
* as either input or output will never hurt maximum frame rate (i.e.
- * android.scaler.availableStallDurations will not have RAW_OPAQUE).</p>
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations} will not have RAW_OPAQUE).</p>
* <p>Attempting to configure an input stream with output streams not
* listed as available in this map is not valid.</p>
* <p>TODO: Add java type mapping for this property.</p>
+ *
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
*/
public static final Key<int[]> SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP =
new Key<int[]>("android.scaler.availableInputOutputFormatsMap", int[].class);
/**
+ * <p>The available stream configurations that this
+ * camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ * <p>The configurations are listed as <code>(format, width, height, input?)</code>
+ * tuples.</p>
+ * <p>All camera devices will support sensor maximum resolution (defined by
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}) for the JPEG format.</p>
+ * <p>For a given use case, the actual maximum supported resolution
+ * may be lower than what is listed here, depending on the destination
+ * Surface for the image data. For example, for recording video,
+ * the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ * smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ * can provide.</p>
+ * <p>Please reference the documentation for the image data destination to
+ * check if it limits the maximum size for image data.</p>
+ * <p>Not all output formats may be supported in a configuration with
+ * an input stream of a particular format. For more details, see
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP android.scaler.availableInputOutputFormatsMap}.</p>
+ * <p>The following table describes the minimum required output stream
+ * configurations based on the hardware level
+ * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">Format</th>
+ * <th align="center">Size</th>
+ * <th align="center">Hardware Level</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">1920x1080 (1080p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 1080p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">1280x720 (720)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 720p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">640x480 (480p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 480p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">320x240 (240p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 240p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">YUV_420_888</td>
+ * <td align="center">all output sizes available for JPEG</td>
+ * <td align="center">FULL</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">YUV_420_888</td>
+ * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
+ * <td align="center">LIMITED</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">IMPLEMENTATION_DEFINED</td>
+ * <td align="center">same as YUV_420_888</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional
+ * mandatory stream configurations on a per-capability basis.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see #SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+ * @see #SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT
+ */
+ public static final Key<int[]> SCALER_AVAILABLE_STREAM_CONFIGURATIONS =
+ new Key<int[]>("android.scaler.availableStreamConfigurations", int[].class);
+
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination.</p>
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>The minimum frame duration of a stream (of a particular format, size)
+ * is the same regardless of whether the stream is input or output.</p>
+ * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations} for more details about
+ * calculating the max frame rate.</p>
+ *
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ */
+ public static final Key<long[]> SCALER_AVAILABLE_MIN_FRAME_DURATIONS =
+ new Key<long[]>("android.scaler.availableMinFrameDurations", long[].class);
+
+ /**
+ * <p>This lists the maximum stall duration for each
+ * format/size combination.</p>
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>For example, consider JPEG captures which have the following
+ * characteristics:</p>
+ * <ul>
+ * <li>JPEG streams act like processed YUV streams in requests for which
+ * they are not included; in requests in which they are directly
+ * referenced, they act as JPEG streams. This is because supporting a
+ * JPEG stream requires the underlying YUV data to always be ready for
+ * use by a JPEG encoder, but the encoder will only be used (and impact
+ * frame duration) on requests that actually reference a JPEG stream.</li>
+ * <li>The JPEG processor can run concurrently to the rest of the camera
+ * pipeline, but cannot process more than 1 capture at a time.</li>
+ * </ul>
+ * <p>In other words, using a repeating YUV request would result
+ * in a steady frame rate (let's say it's 30 FPS). If a single
+ * JPEG request is submitted periodically, the frame rate will stay
+ * at 30 FPS (as long as we wait for the previous JPEG to return each
+ * time). If we try to submit a repeating YUV + JPEG request, then
+ * the frame rate will drop from 30 FPS.</p>
+ * <p>In general, submitting a new request with a non-0 stall time
+ * stream will <em>not</em> cause a frame rate drop unless there are still
+ * outstanding buffers for that stream from previous requests.</p>
+ * <p>Submitting a repeating request with streams (call this <code>S</code>)
+ * is the same as setting the minimum frame duration from
+ * the normal minimum frame duration corresponding to <code>S</code>, added with
+ * the maximum stall duration for <code>S</code>.</p>
+ * <p>If interleaving requests with and without a stall duration,
+ * a request will stall by the maximum of the remaining times
+ * for each can-stall stream with outstanding buffers.</p>
+ * <p>This means that a stalling request will not have an exposure start
+ * until the stall has completed.</p>
+ * <p>This should correspond to the stall duration when only that stream is
+ * active, with all processing (typically in android.*.mode) set to FAST
+ * or OFF. Setting any of the processing modes to HIGH_QUALITY
+ * effectively results in an indeterminate stall duration for all
+ * streams in a request (the regular stall calculation rules are
+ * ignored).</p>
+ * <p>The following formats may always have a stall duration:</p>
+ * <ul>
+ * <li>JPEG</li>
+ * <li>RAW16</li>
+ * </ul>
+ * <p>The following formats will never have a stall duration:</p>
+ * <ul>
+ * <li>YUV_420_888</li>
+ * <li>IMPLEMENTATION_DEFINED</li>
+ * </ul>
+ * <p>All other formats may or may not have an allowed stall duration on
+ * a per-capability basis; refer to android.request.availableCapabilities
+ * for more details.</p>
+ * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about
+ * calculating the max frame rate (absent stalls).</p>
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ */
+ public static final Key<long[]> SCALER_AVAILABLE_STALL_DURATIONS =
+ new Key<long[]>("android.scaler.availableStallDurations", long[].class);
+
+ /**
* <p>Area of raw data which corresponds to only
* active pixels.</p>
* <p>It is smaller or equal to
diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java
index 66be290e2157..1ce91f64b135 100644
--- a/core/java/android/hardware/camera2/CameraMetadata.java
+++ b/core/java/android/hardware/camera2/CameraMetadata.java
@@ -409,6 +409,20 @@ public abstract class CameraMetadata {
public static final int REQUEST_AVAILABLE_CAPABILITIES_DNG = 5;
//
+ // Enumeration values for CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ //
+
+ /**
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ */
+ public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0;
+
+ /**
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ */
+ public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT = 1;
+
+ //
// Enumeration values for CameraCharacteristics#LED_AVAILABLE_LEDS
//
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index 9904db7392b0..326205edc22d 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -1141,62 +1141,45 @@ public final class CaptureRequest extends CameraMetadata implements Parcelable {
* largest requested stream resolution.</li>
* <li>Using more than one output stream in a request does not affect the
* frame duration.</li>
- * <li>JPEG streams act like processed YUV streams in requests for which
- * they are not included; in requests in which they are directly
- * referenced, they act as JPEG streams. This is because supporting a
- * JPEG stream requires the underlying YUV data to always be ready for
- * use by a JPEG encoder, but the encoder will only be used (and impact
- * frame duration) on requests that actually reference a JPEG stream.</li>
- * <li>The JPEG processor can run concurrently to the rest of the camera
- * pipeline, but cannot process more than 1 capture at a time.</li>
+ * <li>Certain format-streams may need to do additional background processing
+ * before data is consumed/produced by that stream. These processors
+ * can run concurrently to the rest of the camera pipeline, but
+ * cannot process more than 1 capture at a time.</li>
* </ul>
* <p>The necessary information for the application, given the model above,
- * is provided via the android.scaler.available*MinDurations fields.
+ * is provided via the {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} field.
* These are used to determine the maximum frame rate / minimum frame
* duration that is possible for a given stream configuration.</p>
* <p>Specifically, the application can use the following rules to
- * determine the minimum frame duration it can request from the HAL
+ * determine the minimum frame duration it can request from the camera
* device:</p>
* <ol>
- * <li>Given the application's currently configured set of output
- * streams, <code>S</code>, divide them into three sets: streams in a JPEG format
- * <code>SJ</code>, streams in a raw sensor format <code>SR</code>, and the rest ('processed')
- * <code>SP</code>.</li>
- * <li>For each subset of streams, find the largest resolution (by pixel
- * count) in the subset. This gives (at most) three resolutions <code>RJ</code>,
- * <code>RR</code>, and <code>RP</code>.</li>
- * <li>If <code>RJ</code> is greater than <code>RP</code>, set <code>RP</code> equal to <code>RJ</code>. If there is
- * no exact match for <code>RP == RJ</code> (in particular there isn't an available
- * processed resolution at the same size as <code>RJ</code>), then set <code>RP</code> equal
- * to the smallest processed resolution that is larger than <code>RJ</code>. If
- * there are no processed resolutions larger than <code>RJ</code>, then set <code>RJ</code> to
- * the processed resolution closest to <code>RJ</code>.</li>
- * <li>If <code>RP</code> is greater than <code>RR</code>, set <code>RR</code> equal to <code>RP</code>. If there is
- * no exact match for <code>RR == RP</code> (in particular there isn't an available
- * raw resolution at the same size as <code>RP</code>), then set <code>RR</code> equal to
- * or to the smallest raw resolution that is larger than <code>RP</code>. If
- * there are no raw resolutions larger than <code>RP</code>, then set <code>RR</code> to
- * the raw resolution closest to <code>RP</code>.</li>
- * <li>Look up the matching minimum frame durations in the property lists
- * {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS android.scaler.availableJpegMinDurations},
- * android.scaler.availableRawMinDurations, and
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS android.scaler.availableProcessedMinDurations}. This gives three
- * minimum frame durations <code>FJ</code>, <code>FR</code>, and <code>FP</code>.</li>
- * <li>If a stream of requests do not use a JPEG stream, then the minimum
- * supported frame duration for each request is <code>max(FR, FP)</code>.</li>
- * <li>If a stream of requests all use the JPEG stream, then the minimum
- * supported frame duration for each request is <code>max(FR, FP, FJ)</code>.</li>
- * <li>If a mix of JPEG-using and non-JPEG-using requests is submitted by
- * the application, then the HAL will have to delay JPEG-using requests
- * whenever the JPEG encoder is still busy processing an older capture.
- * This will happen whenever a JPEG-using request starts capture less
- * than <code>FJ</code> <em>ns</em> after a previous JPEG-using request. The minimum
- * supported frame duration will vary between the values calculated in
- * #6 and #7.</li>
+ * <li>Let the set of currently configured input/output streams
+ * be called <code>S</code>.</li>
+ * <li>Find the minimum frame durations for each stream in <code>S</code>, by
+ * looking it up in {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} (with
+ * its respective size/format). Let this set of frame durations be called
+ * <code>F</code>.</li>
+ * <li>For any given request <code>R</code>, the minimum frame duration allowed
+ * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
+ * used in <code>R</code> be called <code>S_r</code>.</li>
* </ol>
+ * <p>If none of the streams in <code>S_r</code> have a stall time (listed in
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}), then the frame duration in
+ * <code>F</code> determines the steady state frame rate that the application will
+ * get if it uses <code>R</code> as a repeating request. Let this special kind
+ * of request be called <code>Rsimple</code>.</p>
+ * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+ * by a single capture of a new request <code>Rstall</code> (which has at least
+ * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+ * same minimum frame duration this will not cause a frame rate loss
+ * if all buffers from the previous <code>Rstall</code> have already been
+ * delivered.</p>
+ * <p>For more details about stalling, see
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}.</p>
*
- * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS
- * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS
+ * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
*/
public static final Key<Long> SENSOR_FRAME_DURATION =
new Key<Long>("android.sensor.frameDuration", long.class);
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index 51746ca0139a..f556d71aedcf 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -1375,62 +1375,45 @@ public final class CaptureResult extends CameraMetadata {
* largest requested stream resolution.</li>
* <li>Using more than one output stream in a request does not affect the
* frame duration.</li>
- * <li>JPEG streams act like processed YUV streams in requests for which
- * they are not included; in requests in which they are directly
- * referenced, they act as JPEG streams. This is because supporting a
- * JPEG stream requires the underlying YUV data to always be ready for
- * use by a JPEG encoder, but the encoder will only be used (and impact
- * frame duration) on requests that actually reference a JPEG stream.</li>
- * <li>The JPEG processor can run concurrently to the rest of the camera
- * pipeline, but cannot process more than 1 capture at a time.</li>
+ * <li>Certain format-streams may need to do additional background processing
+ * before data is consumed/produced by that stream. These processors
+ * can run concurrently to the rest of the camera pipeline, but
+ * cannot process more than 1 capture at a time.</li>
* </ul>
* <p>The necessary information for the application, given the model above,
- * is provided via the android.scaler.available*MinDurations fields.
+ * is provided via the {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} field.
* These are used to determine the maximum frame rate / minimum frame
* duration that is possible for a given stream configuration.</p>
* <p>Specifically, the application can use the following rules to
- * determine the minimum frame duration it can request from the HAL
+ * determine the minimum frame duration it can request from the camera
* device:</p>
* <ol>
- * <li>Given the application's currently configured set of output
- * streams, <code>S</code>, divide them into three sets: streams in a JPEG format
- * <code>SJ</code>, streams in a raw sensor format <code>SR</code>, and the rest ('processed')
- * <code>SP</code>.</li>
- * <li>For each subset of streams, find the largest resolution (by pixel
- * count) in the subset. This gives (at most) three resolutions <code>RJ</code>,
- * <code>RR</code>, and <code>RP</code>.</li>
- * <li>If <code>RJ</code> is greater than <code>RP</code>, set <code>RP</code> equal to <code>RJ</code>. If there is
- * no exact match for <code>RP == RJ</code> (in particular there isn't an available
- * processed resolution at the same size as <code>RJ</code>), then set <code>RP</code> equal
- * to the smallest processed resolution that is larger than <code>RJ</code>. If
- * there are no processed resolutions larger than <code>RJ</code>, then set <code>RJ</code> to
- * the processed resolution closest to <code>RJ</code>.</li>
- * <li>If <code>RP</code> is greater than <code>RR</code>, set <code>RR</code> equal to <code>RP</code>. If there is
- * no exact match for <code>RR == RP</code> (in particular there isn't an available
- * raw resolution at the same size as <code>RP</code>), then set <code>RR</code> equal to
- * or to the smallest raw resolution that is larger than <code>RP</code>. If
- * there are no raw resolutions larger than <code>RP</code>, then set <code>RR</code> to
- * the raw resolution closest to <code>RP</code>.</li>
- * <li>Look up the matching minimum frame durations in the property lists
- * {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS android.scaler.availableJpegMinDurations},
- * android.scaler.availableRawMinDurations, and
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS android.scaler.availableProcessedMinDurations}. This gives three
- * minimum frame durations <code>FJ</code>, <code>FR</code>, and <code>FP</code>.</li>
- * <li>If a stream of requests do not use a JPEG stream, then the minimum
- * supported frame duration for each request is <code>max(FR, FP)</code>.</li>
- * <li>If a stream of requests all use the JPEG stream, then the minimum
- * supported frame duration for each request is <code>max(FR, FP, FJ)</code>.</li>
- * <li>If a mix of JPEG-using and non-JPEG-using requests is submitted by
- * the application, then the HAL will have to delay JPEG-using requests
- * whenever the JPEG encoder is still busy processing an older capture.
- * This will happen whenever a JPEG-using request starts capture less
- * than <code>FJ</code> <em>ns</em> after a previous JPEG-using request. The minimum
- * supported frame duration will vary between the values calculated in
- * #6 and #7.</li>
+ * <li>Let the set of currently configured input/output streams
+ * be called <code>S</code>.</li>
+ * <li>Find the minimum frame durations for each stream in <code>S</code>, by
+ * looking it up in {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} (with
+ * its respective size/format). Let this set of frame durations be called
+ * <code>F</code>.</li>
+ * <li>For any given request <code>R</code>, the minimum frame duration allowed
+ * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
+ * used in <code>R</code> be called <code>S_r</code>.</li>
* </ol>
+ * <p>If none of the streams in <code>S_r</code> have a stall time (listed in
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}), then the frame duration in
+ * <code>F</code> determines the steady state frame rate that the application will
+ * get if it uses <code>R</code> as a repeating request. Let this special kind
+ * of request be called <code>Rsimple</code>.</p>
+ * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+ * by a single capture of a new request <code>Rstall</code> (which has at least
+ * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+ * same minimum frame duration this will not cause a frame rate loss
+ * if all buffers from the previous <code>Rstall</code> have already been
+ * delivered.</p>
+ * <p>For more details about stalling, see
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}.</p>
*
- * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS
- * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS
+ * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
*/
public static final Key<Long> SENSOR_FRAME_DURATION =
new Key<Long>("android.sensor.frameDuration", long.class);