/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.hardware.camera2.params; import android.graphics.ImageFormat; import android.graphics.PixelFormat; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.utils.HashCodeHelpers; import android.hardware.camera2.utils.SurfaceUtils; import android.hardware.camera2.legacy.LegacyCameraDevice; import android.hardware.camera2.legacy.LegacyMetadataMapper; import android.view.Surface; import android.util.Range; import android.util.Size; import android.util.SparseIntArray; import java.util.Arrays; import java.util.HashMap; import java.util.Objects; import java.util.Set; import static com.android.internal.util.Preconditions.*; /** * Immutable class to store the available stream * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up * {@link android.view.Surface Surfaces} for creating a * {@link android.hardware.camera2.CameraCaptureSession capture session} with * {@link android.hardware.camera2.CameraDevice#createCaptureSession}. * * *
This is the authoritative list for all output formats (and sizes respectively * for that format) that are supported by a camera device.
* *This also contains the minimum frame durations and stall durations for each format/size * combination that can be used to calculate effective frame rate when submitting multiple captures. *
* *An instance of this object is available from {@link CameraCharacteristics} using * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the * {@link CameraCharacteristics#get} method.
* *{@code
* CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
* StreamConfigurationMap configs = characteristics.get(
* CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
* }
*
* @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
* @see CameraDevice#createCaptureSession
*/
public final class StreamConfigurationMap {
private static final String TAG = "StreamConfigurationMap";
/**
* Create a new {@link StreamConfigurationMap}.
*
* The array parameters ownership is passed to this object after creation; do not * write to them after this constructor is invoked.
* * @param configurations a non-{@code null} array of {@link StreamConfiguration} * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration} * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration} * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if * camera device does not support high speed video recording * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE * and thus needs a separate list of slow high-resolution output sizes * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations * were {@code null} or any subelements were {@code null} * * @hide */ public StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution) { if (configurations == null) { // If no color configurations exist, ensure depth ones do checkArrayElementsNotNull(depthConfigurations, "depthConfigurations"); mConfigurations = new StreamConfiguration[0]; mMinFrameDurations = new StreamConfigurationDuration[0]; mStallDurations = new StreamConfigurationDuration[0]; } else { mConfigurations = checkArrayElementsNotNull(configurations, "configurations"); mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations"); mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations"); } mListHighResolution = listHighResolution; if (depthConfigurations == null) { mDepthConfigurations = new StreamConfiguration[0]; mDepthMinFrameDurations = new StreamConfigurationDuration[0]; mDepthStallDurations = new StreamConfigurationDuration[0]; } else { mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations, "depthConfigurations"); mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations, "depthMinFrameDurations"); mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations, "depthStallDurations"); } if (highSpeedVideoConfigurations == null) { mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0]; } else { mHighSpeedVideoConfigurations = checkArrayElementsNotNull( highSpeedVideoConfigurations, "highSpeedVideoConfigurations"); } // For each format, track how many sizes there are available to configure for (StreamConfiguration config : mConfigurations) { int fmt = config.getFormat(); SparseIntArray map = null; if (config.isOutput()) { mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1); long duration = 0; if (mListHighResolution) { for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) { if (configurationDuration.getFormat() == fmt && configurationDuration.getWidth() == config.getSize().getWidth() && configurationDuration.getHeight() == config.getSize().getHeight()) { duration = configurationDuration.getDuration(); break; } } } map = duration <= DURATION_20FPS_NS ? mOutputFormats : mHighResOutputFormats; } else { map = mInputFormats; } map.put(fmt, map.get(fmt) + 1); } // For each depth format, track how many sizes there are available to configure for (StreamConfiguration config : mDepthConfigurations) { if (!config.isOutput()) { // Ignoring input depth configs continue; } mDepthOutputFormats.put(config.getFormat(), mDepthOutputFormats.get(config.getFormat()) + 1); } if (configurations != null && mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) { throw new AssertionError( "At least one stream configuration for IMPLEMENTATION_DEFINED must exist"); } // For each Size/FPS range, track how many FPS range/Size there are available for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) { Size size = config.getSize(); RangeAll image formats returned by this function will be defined in either {@link ImageFormat} * or in {@link PixelFormat} (and there is no possibility of collision).
* *Formats listed in this array are guaranteed to return true if queried with * {@link #isOutputSupportedFor(int)}.
* * @return an array of integer format * * @see ImageFormat * @see PixelFormat */ public final int[] getOutputFormats() { return getPublicFormats(/*output*/true); } /** * Get the image {@code format} output formats for a reprocessing input format. * *When submitting a {@link CaptureRequest} with an input Surface of a given format, * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format * listed in the return value of this method. Including any other output Surface as a target * will throw an IllegalArgumentException. If no output format is supported given the input * format, an empty int[] will be returned.
* *All image formats returned by this function will be defined in either {@link ImageFormat} * or in {@link PixelFormat} (and there is no possibility of collision).
* *Formats listed in this array are guaranteed to return true if queried with * {@link #isOutputSupportedFor(int)}.
* * @return an array of integer format * * @see ImageFormat * @see PixelFormat */ public final int[] getValidOutputFormatsForInput(int inputFormat) { if (mInputOutputFormatsMap == null) { return new int[0]; } return mInputOutputFormatsMap.getOutputs(inputFormat); } /** * Get the image {@code format} input formats in this stream configuration. * *All image formats returned by this function will be defined in either {@link ImageFormat} * or in {@link PixelFormat} (and there is no possibility of collision).
* * @return an array of integer format * * @see ImageFormat * @see PixelFormat */ public final int[] getInputFormats() { return getPublicFormats(/*output*/false); } /** * Get the supported input sizes for this input format. * *The format must have come from {@link #getInputFormats}; otherwise * {@code null} is returned.
* * @param format a format from {@link #getInputFormats} * @return a non-empty array of sizes, or {@code null} if the format was not available. */ public Size[] getInputSizes(final int format) { return getPublicFormatSizes(format, /*output*/false, /*highRes*/false); } /** * Determine whether or not output surfaces with a particular user-defined format can be passed * {@link CameraDevice#createCaptureSession createCaptureSession}. * *This method determines that the output {@code format} is supported by the camera device; * each output {@code surface} target may or may not itself support that {@code format}. * Refer to the class which provides the surface for additional documentation.
* *Formats for which this returns {@code true} are guaranteed to exist in the result * returned by {@link #getOutputSizes}.
* * @param format an image format from either {@link ImageFormat} or {@link PixelFormat} * @return * {@code true} iff using a {@code surface} with this {@code format} will be * supported with {@link CameraDevice#createCaptureSession} * * @throws IllegalArgumentException * if the image format was not a defined named constant * from either {@link ImageFormat} or {@link PixelFormat} * * @see ImageFormat * @see PixelFormat * @see CameraDevice#createCaptureSession */ public boolean isOutputSupportedFor(int format) { checkArgumentFormat(format); int internalFormat = imageFormatToInternal(format); int dataspace = imageFormatToDataspace(format); if (dataspace == HAL_DATASPACE_DEPTH) { return mDepthOutputFormats.indexOfKey(internalFormat) >= 0; } else { return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0; } } /** * Determine whether or not output streams can be configured with a particular class * as a consumer. * *The following list is generally usable for outputs: *
Generally speaking this means that creating a {@link Surface} from that class may * provide a producer endpoint that is suitable to be used with * {@link CameraDevice#createCaptureSession}.
* *Since not all of the above classes support output of all format and size combinations, * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.
* * @param klass a non-{@code null} {@link Class} object reference * @return {@code true} if this class is supported as an output, {@code false} otherwise * * @throws NullPointerException if {@code klass} was {@code null} * * @see CameraDevice#createCaptureSession * @see #isOutputSupportedFor(Surface) */ public staticNot all surfaces are usable with the {@link CameraDevice}, and not all configurations * of that {@code surface} are compatible. Some classes that provide the {@code surface} are * compatible with the {@link CameraDevice} in general * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the * {@code surface} into a state that will be compatible with the {@link CameraDevice}.
* *Reasons for a {@code surface} being specifically incompatible might be: *
Surfaces from flexible sources will return true even if the exact size of the Surface does * not match a camera-supported size, as long as the format (or class) is supported and the * camera device supports a size that is equal to or less than 1080p in that format. If such as * Surface is used to create a capture session, it will have its size rounded to the nearest * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture, * and ImageReader.
* *This is not an exhaustive list; see the particular class's documentation for further * possible reasons of incompatibility.
* * @param surface a non-{@code null} {@link Surface} object reference * @return {@code true} if this is supported, {@code false} otherwise * * @throws NullPointerException if {@code surface} was {@code null} * @throws IllegalArgumentException if the Surface endpoint is no longer valid * * @see CameraDevice#createCaptureSession * @see #isOutputSupportedFor(Class) */ public boolean isOutputSupportedFor(Surface surface) { checkNotNull(surface, "surface must not be null"); Size surfaceSize = SurfaceUtils.getSurfaceSize(surface); int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface); int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface); // See if consumer is flexible. boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface); // Override RGB formats to IMPLEMENTATION_DEFINED, b/9487482 if ((surfaceFormat >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 && surfaceFormat <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) { surfaceFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; } StreamConfiguration[] configs = surfaceDataspace != HAL_DATASPACE_DEPTH ? mConfigurations : mDepthConfigurations; for (StreamConfiguration config : configs) { if (config.getFormat() == surfaceFormat && config.isOutput()) { // Matching format, either need exact size match, or a flexible consumer // and a size no bigger than MAX_DIMEN_FOR_ROUNDING if (config.getSize().equals(surfaceSize)) { return true; } else if (isFlexible && (config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) { return true; } } } return false; } /** * Get a list of sizes compatible with {@code klass} to use as an output. * *Some of the supported classes may support additional formats beyond * {@link ImageFormat#PRIVATE}; this function only returns * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader} * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader} * class.
* *If a well-defined format such as {@code NV21} is required, use * {@link #getOutputSizes(int)} instead.
* *The {@code klass} should be a supported output, that querying * {@code #isOutputSupportedFor(Class)} should return {@code true}.
* * @param klass * a non-{@code null} {@link Class} object reference * @return * an array of supported sizes for {@link ImageFormat#PRIVATE} format, * or {@code null} iff the {@code klass} is not a supported output. * * * @throws NullPointerException if {@code klass} was {@code null} * * @see #isOutputSupportedFor(Class) */ publicThe {@code format} should be a supported format (one of the formats returned by * {@link #getOutputFormats}).
* * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices * that support the * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE} * capability to get a list of high-resolution output sizes that cannot operate at the preferred * 20fps rate. This means that for some supported formats, this method will return an empty * list, if all the supported resolutions operate at below 20fps. For devices that do not * support the BURST_CAPTURE capability, all output resolutions are listed through this method. * * @param format an image format from {@link ImageFormat} or {@link PixelFormat} * @return * an array of supported sizes, * or {@code null} if the {@code format} is not a supported output * * @see ImageFormat * @see PixelFormat * @see #getOutputFormats */ public Size[] getOutputSizes(int format) { return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false); } /** * Get a list of supported high speed video recording sizes. ** When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will * list the supported high speed video size configurations. All the sizes listed will be a * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.) *
** To enable high speed video recording, application must create a constrained create high speed * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit * a CaptureRequest list created by * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList} * to this session. The application must select the video size from this method and * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and * generate the high speed request list. For example, if the application intends to do high * speed recording, it can select the maximum size reported by this method to create high speed * capture session. Note that for the use case of multiple output streams, application must * select one unique size from this method to use (e.g., preview and recording streams must have * the same size). Otherwise, the high speed session creation will fail. Once the size is * selected, application can get the supported FPS ranges by * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording * request lists via * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. *
* * @return an array of supported high speed video recording sizes * @see #getHighSpeedVideoFpsRangesFor(Size) * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO * @see CameraDevice#createConstrainedHighSpeedCaptureSession * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList */ public Size[] getHighSpeedVideoSizes() { Set* See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording. *
** The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method * must not be used to setup capture requests that are submitted to unconstrained capture * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}. *
** See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges. *
* * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()} * @return an array of supported high speed video recording FPS ranges The upper bound of * returned ranges is guaranteed to be greater than or equal to 120. * @throws IllegalArgumentException if input size does not exist in the return value of * getHighSpeedVideoSizes * @see #getHighSpeedVideoSizes() * @see #getHighSpeedVideoFpsRanges() */ public Range* When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will * list the supported high speed video FPS range configurations. Application can then use * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range. *
** To enable high speed video recording, application must create a constrained create high speed * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit * a CaptureRequest list created by * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList} * to this session. The application must select the video size from this method and * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and * generate the high speed request list. For example, if the application intends to do high * speed recording, it can select one FPS range reported by this method, query the video sizes * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported * sizes to create a high speed capture session. Note that for the use case of multiple output * streams, application must select one unique size from this method to use (e.g., preview and * recording streams must have the same size). Otherwise, the high speed session creation will * fail. Once the high speed capture session is created, the application can set the FPS range * in the recording request lists via * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. *
** The FPS ranges reported by this method will have below characteristics: *
See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.
* * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()} * @return An array of video sizes to create high speed capture sessions for high speed streaming * use cases. * * @throws IllegalArgumentException if input FPS range does not exist in the return value of * getHighSpeedVideoFpsRanges * @see #getHighSpeedVideoFpsRanges() */ public Size[] getHighSpeedVideoSizesFor(RangeThis includes all output sizes that cannot meet the 20 fps frame rate requirements for the * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE} * capability. This does not include the stall duration, so for example, a JPEG or RAW16 output * resolution with a large stall duration but a minimum frame duration that's above 20 fps will * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list are * still guaranteed to operate at a rate of at least 10 fps, not including stall duration.
* *For a device that does not support the BURST_CAPTURE capability, this list will be * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not * guaranteed to meet >= 20 fps rate requirements. For a device that does support the * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20 * fps requirement.
* * @return an array of supported slower high-resolution sizes, or {@code null} if the * BURST_CAPTURE capability is not supported */ public Size[] getHighResolutionOutputSizes(int format) { if (!mListHighResolution) return null; return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true); } /** * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration} * for the format/size combination (in nanoseconds). * *{@code format} should be one of the ones returned by {@link #getOutputFormats()}.
*{@code size} should be one of the ones returned by * {@link #getOutputSizes(int)}.
* *This should correspond to the frame duration when only that stream is active, with all * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}. *
* *When multiple streams are used in a request, the minimum frame duration will be * {@code max(individual stream min durations)}.
* *For devices that do not support manual sensor control * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}), * this function may return 0.
* * * * @param format an image format from {@link ImageFormat} or {@link PixelFormat} * @param size an output-compatible size * @return a minimum frame duration {@code >} 0 in nanoseconds, or * 0 if the minimum frame duration is not available. * * @throws IllegalArgumentException if {@code format} or {@code size} was not supported * @throws NullPointerException if {@code size} was {@code null} * * @see CaptureRequest#SENSOR_FRAME_DURATION * @see #getOutputStallDuration(int, Size) * @see ImageFormat * @see PixelFormat */ public long getOutputMinFrameDuration(int format, Size size) { checkNotNull(size, "size must not be null"); checkArgumentFormatSupported(format, /*output*/true); return getInternalFormatDuration(imageFormatToInternal(format), imageFormatToDataspace(format), size, DURATION_MIN_FRAME); } /** * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration} * for the class/size combination (in nanoseconds). * *This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}. * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.
* *{@code klass} should be one of the ones which is supported by * {@link #isOutputSupportedFor(Class)}.
* *{@code size} should be one of the ones returned by * {@link #getOutputSizes(int)}.
* *This should correspond to the frame duration when only that stream is active, with all * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}. *
* *When multiple streams are used in a request, the minimum frame duration will be * {@code max(individual stream min durations)}.
* *For devices that do not support manual sensor control * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}), * this function may return 0.
* * * * @param klass * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a * non-empty array returned by {@link #getOutputSizes(Class)} * @param size an output-compatible size * @return a minimum frame duration {@code >} 0 in nanoseconds, or * 0 if the minimum frame duration is not available. * * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported * @throws NullPointerException if {@code size} or {@code klass} was {@code null} * * @see CaptureRequest#SENSOR_FRAME_DURATION * @see ImageFormat * @see PixelFormat */ public{@code format} should be one of the ones returned by {@link #getOutputFormats()}.
*{@code size} should be one of the ones returned by * {@link #getOutputSizes(int)}.
* ** A stall duration is how much extra time would get added to the normal minimum frame duration * for a repeating request that has streams with non-zero stall. * *
For example, consider JPEG captures which have the following characteristics: * *
In other words, using a repeating YUV request would result in a steady frame rate * (let's say it's 30 FPS). If a single JPEG request is submitted periodically, * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from * 30 FPS.
* *In general, submitting a new request with a non-0 stall time stream will not cause a * frame rate drop unless there are still outstanding buffers for that stream from previous * requests.
* *Submitting a repeating request with streams (call this {@code S}) is the same as setting * the minimum frame duration from the normal minimum frame duration corresponding to {@code S}, * added with the maximum stall duration for {@code S}.
* *If interleaving requests with and without a stall duration, a request will stall by the * maximum of the remaining times for each can-stall stream with outstanding buffers.
* *This means that a stalling request will not have an exposure start until the stall has * completed.
* *This should correspond to the stall duration when only that stream is active, with all * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}. * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an * indeterminate stall duration for all streams in a request (the regular stall calculation * rules are ignored).
* *The following formats may always have a stall duration: *
The following formats will never have a stall duration: *
* All other formats may or may not have an allowed stall duration on a per-capability basis; * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES * android.request.availableCapabilities} for more details.
* * *See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} * for more information about calculating the max frame rate (absent stalls).
* * @param format an image format from {@link ImageFormat} or {@link PixelFormat} * @param size an output-compatible size * @return a stall duration {@code >=} 0 in nanoseconds * * @throws IllegalArgumentException if {@code format} or {@code size} was not supported * @throws NullPointerException if {@code size} was {@code null} * * @see CaptureRequest#SENSOR_FRAME_DURATION * @see ImageFormat * @see PixelFormat */ public long getOutputStallDuration(int format, Size size) { checkArgumentFormatSupported(format, /*output*/true); return getInternalFormatDuration(imageFormatToInternal(format), imageFormatToDataspace(format), size, DURATION_STALL); } /** * Get the stall duration for the class/size combination (in nanoseconds). * *This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}. * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.
* *{@code klass} should be one of the ones with a non-empty array returned by * {@link #getOutputSizes(Class)}.
* *{@code size} should be one of the ones returned by * {@link #getOutputSizes(Class)}.
* *See {@link #getOutputStallDuration(int, Size)} for a definition of a * stall duration.
* * @param klass * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a * non-empty array returned by {@link #getOutputSizes(Class)} * @param size an output-compatible size * @return a minimum frame duration {@code >=} 0 in nanoseconds * * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported * @throws NullPointerException if {@code size} or {@code klass} was {@code null} * * @see CaptureRequest#SENSOR_FRAME_DURATION * @see ImageFormat * @see PixelFormat */ publicTwo vectors are only equal if and only if each of the respective elements is equal.
* * @return {@code true} if the objects were equal, {@code false} otherwise */ @Override public boolean equals(final Object obj) { if (obj == null) { return false; } if (this == obj) { return true; } if (obj instanceof StreamConfigurationMap) { final StreamConfigurationMap other = (StreamConfigurationMap) obj; // XX: do we care about order? return Arrays.equals(mConfigurations, other.mConfigurations) && Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) && Arrays.equals(mStallDurations, other.mStallDurations) && Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) && Arrays.equals(mHighSpeedVideoConfigurations, other.mHighSpeedVideoConfigurations); } return false; } /** * {@inheritDoc} */ @Override public int hashCode() { // XX: do we care about order? return HashCodeHelpers.hashCodeGeneric( mConfigurations, mMinFrameDurations, mStallDurations, mDepthConfigurations, mHighSpeedVideoConfigurations); } // Check that the argument is supported by #getOutputFormats or #getInputFormats private int checkArgumentFormatSupported(int format, boolean output) { checkArgumentFormat(format); int internalFormat = imageFormatToInternal(format); int internalDataspace = imageFormatToDataspace(format); if (output) { if (internalDataspace == HAL_DATASPACE_DEPTH) { if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) { return format; } } else { if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) { return format; } } } else { if (mInputFormats.indexOfKey(internalFormat) >= 0) { return format; } } throw new IllegalArgumentException(String.format( "format %x is not supported by this stream configuration map", format)); } /** * Ensures that the format is either user-defined or implementation defined. * *If a format has a different internal representation than the public representation, * passing in the public representation here will fail.
* *For example if trying to use {@link ImageFormat#JPEG}: * it has a different public representation than the internal representation * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.
* *Any invalid/undefined formats will raise an exception.
* * @param format image format * @return the format * * @throws IllegalArgumentException if the format was invalid */ static int checkArgumentFormatInternal(int format) { switch (format) { case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: case HAL_PIXEL_FORMAT_BLOB: case HAL_PIXEL_FORMAT_RAW_OPAQUE: case HAL_PIXEL_FORMAT_Y16: return format; case ImageFormat.JPEG: throw new IllegalArgumentException( "ImageFormat.JPEG is an unknown internal format"); default: return checkArgumentFormat(format); } } /** * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat. * *If a format has a different public representation than the internal representation, * passing in the internal representation here will fail.
* *For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}: * it has a different internal representation than the public representation * {@link ImageFormat#JPEG}, this check will fail.
* *Any invalid/undefined formats will raise an exception, including implementation-defined. *
* *Note that {@code @hide} and deprecated formats will not pass this check.
* * @param format image format * @return the format * * @throws IllegalArgumentException if the format was not user-defined */ static int checkArgumentFormat(int format) { if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) { throw new IllegalArgumentException(String.format( "format 0x%x was not defined in either ImageFormat or PixelFormat", format)); } return format; } /** * Convert an internal format compatible with {@code graphics.h} into public-visible * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH. * *In particular these formats are converted: *
Passing in a format which has no public equivalent will fail; * as will passing in a public format which has a different internal format equivalent. * See {@link #checkArgumentFormat} for more details about a legal public format.
* *All other formats are returned as-is, no further invalid check is performed.
* *This function is the dual of {@link #imageFormatToInternal} for dataspaces other than * HAL_DATASPACE_DEPTH.
* * @param format image format from {@link ImageFormat} or {@link PixelFormat} * @return the converted image formats * * @throws IllegalArgumentException * if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or * {@link ImageFormat#JPEG} * * @see ImageFormat * @see PixelFormat * @see #checkArgumentFormat */ static int imageFormatToPublic(int format) { switch (format) { case HAL_PIXEL_FORMAT_BLOB: return ImageFormat.JPEG; case ImageFormat.JPEG: throw new IllegalArgumentException( "ImageFormat.JPEG is an unknown internal format"); default: return format; } } /** * Convert an internal format compatible with {@code graphics.h} into public-visible * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH. * *In particular these formats are converted: *
Passing in an implementation-defined format which has no public equivalent will fail; * as will passing in a public format which has a different internal format equivalent. * See {@link #checkArgumentFormat} for more details about a legal public format.
* *All other formats are returned as-is, no further invalid check is performed.
* *This function is the dual of {@link #imageFormatToInternal} for formats associated with * HAL_DATASPACE_DEPTH.
* * @param format image format from {@link ImageFormat} or {@link PixelFormat} * @return the converted image formats * * @throws IllegalArgumentException * if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or * {@link ImageFormat#JPEG} * * @see ImageFormat * @see PixelFormat * @see #checkArgumentFormat */ static int depthFormatToPublic(int format) { switch (format) { case HAL_PIXEL_FORMAT_BLOB: return ImageFormat.DEPTH_POINT_CLOUD; case HAL_PIXEL_FORMAT_Y16: return ImageFormat.DEPTH16; case ImageFormat.JPEG: throw new IllegalArgumentException( "ImageFormat.JPEG is an unknown internal format"); case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: throw new IllegalArgumentException( "IMPLEMENTATION_DEFINED must not leak to public API"); default: throw new IllegalArgumentException( "Unknown DATASPACE_DEPTH format " + format); } } /** * Convert image formats from internal to public formats (in-place). * * @param formats an array of image formats * @return {@code formats} * * @see #imageFormatToPublic */ static int[] imageFormatToPublic(int[] formats) { if (formats == null) { return null; } for (int i = 0; i < formats.length; ++i) { formats[i] = imageFormatToPublic(formats[i]); } return formats; } /** * Convert a public format compatible with {@code ImageFormat} to an internal format * from {@code graphics.h}. * *In particular these formats are converted: *
Passing in an internal format which has a different public format equivalent will fail. * See {@link #checkArgumentFormat} for more details about a legal public format.
* *All other formats are returned as-is, no invalid check is performed.
* *This function is the dual of {@link #imageFormatToPublic}.
* * @param format public image format from {@link ImageFormat} or {@link PixelFormat} * @return the converted image formats * * @see ImageFormat * @see PixelFormat * * @throws IllegalArgumentException * if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} */ static int imageFormatToInternal(int format) { switch (format) { case ImageFormat.JPEG: case ImageFormat.DEPTH_POINT_CLOUD: return HAL_PIXEL_FORMAT_BLOB; case ImageFormat.DEPTH16: return HAL_PIXEL_FORMAT_Y16; default: return format; } } /** * Convert a public format compatible with {@code ImageFormat} to an internal dataspace * from {@code graphics.h}. * *In particular these formats are converted: *
Passing in an implementation-defined format here will fail (it's not a public format); * as will passing in an internal format which has a different public format equivalent. * See {@link #checkArgumentFormat} for more details about a legal public format.
* *All other formats are returned as-is, no invalid check is performed.
* *This function is the dual of {@link #imageFormatToPublic}.
* * @param format public image format from {@link ImageFormat} or {@link PixelFormat} * @return the converted image formats * * @see ImageFormat * @see PixelFormat * * @throws IllegalArgumentException * if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} */ static int imageFormatToDataspace(int format) { switch (format) { case ImageFormat.JPEG: return HAL_DATASPACE_V0_JFIF; case ImageFormat.DEPTH_POINT_CLOUD: case ImageFormat.DEPTH16: return HAL_DATASPACE_DEPTH; default: return HAL_DATASPACE_UNKNOWN; } } /** * Convert image formats from public to internal formats (in-place). * * @param formats an array of image formats * @return {@code formats} * * @see #imageFormatToInternal * * @hide */ public static int[] imageFormatToInternal(int[] formats) { if (formats == null) { return null; } for (int i = 0; i < formats.length; ++i) { formats[i] = imageFormatToInternal(formats[i]); } return formats; } private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) { try { checkArgumentFormatSupported(format, output); } catch (IllegalArgumentException e) { return null; } int internalFormat = imageFormatToInternal(format); int dataspace = imageFormatToDataspace(format); return getInternalFormatSizes(internalFormat, dataspace, output, highRes); } private Size[] getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes) { // All depth formats are non-high-res. if (dataspace == HAL_DATASPACE_DEPTH && highRes) { return new Size[0]; } SparseIntArray formatsMap = !output ? mInputFormats : dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats : highRes ? mHighResOutputFormats : mOutputFormats; int sizesCount = formatsMap.get(format); if ( ((!output || dataspace == HAL_DATASPACE_DEPTH) && sizesCount == 0) || (output && dataspace != HAL_DATASPACE_DEPTH && mAllOutputFormats.get(format) == 0)) { // Only throw if this is really not supported at all throw new IllegalArgumentException("format not available"); } Size[] sizes = new Size[sizesCount]; int sizeIndex = 0; StreamConfiguration[] configurations = (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations; StreamConfigurationDuration[] minFrameDurations = (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations : mMinFrameDurations; for (StreamConfiguration config : configurations) { int fmt = config.getFormat(); if (fmt == format && config.isOutput() == output) { if (output && mListHighResolution) { // Filter slow high-res output formats; include for // highRes, remove for !highRes long duration = 0; for (int i = 0; i < minFrameDurations.length; i++) { StreamConfigurationDuration d = minFrameDurations[i]; if (d.getFormat() == fmt && d.getWidth() == config.getSize().getWidth() && d.getHeight() == config.getSize().getHeight()) { duration = d.getDuration(); break; } } if (dataspace != HAL_DATASPACE_DEPTH && highRes != (duration > DURATION_20FPS_NS)) { continue; } } sizes[sizeIndex++] = config.getSize(); } } if (sizeIndex != sizesCount) { throw new AssertionError( "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")"); } return sizes; } /** Get the list of publically visible output formats; does not include IMPL_DEFINED */ private int[] getPublicFormats(boolean output) { int[] formats = new int[getPublicFormatCount(output)]; int i = 0; SparseIntArray map = getFormatsMap(output); for (int j = 0; j < map.size(); j++) { int format = map.keyAt(j); formats[i++] = imageFormatToPublic(format); } if (output) { for (int j = 0; j < mDepthOutputFormats.size(); j++) { formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j)); } } if (formats.length != i) { throw new AssertionError("Too few formats " + i + ", expected " + formats.length); } return formats; } /** Get the format -> size count map for either output or input formats */ private SparseIntArray getFormatsMap(boolean output) { return output ? mAllOutputFormats : mInputFormats; } private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) { // assume format is already checked, since its internal if (!isSupportedInternalConfiguration(format, dataspace, size)) { throw new IllegalArgumentException("size was not supported"); } StreamConfigurationDuration[] durations = getDurations(duration, dataspace); for (StreamConfigurationDuration configurationDuration : durations) { if (configurationDuration.getFormat() == format && configurationDuration.getWidth() == size.getWidth() && configurationDuration.getHeight() == size.getHeight()) { return configurationDuration.getDuration(); } } // Default duration is '0' (unsupported/no extra stall) return 0; } /** * Get the durations array for the kind of duration * * @see #DURATION_MIN_FRAME * @see #DURATION_STALL * */ private StreamConfigurationDuration[] getDurations(int duration, int dataspace) { switch (duration) { case DURATION_MIN_FRAME: return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations : mMinFrameDurations; case DURATION_STALL: return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthStallDurations : mStallDurations; default: throw new IllegalArgumentException("duration was invalid"); } } /** Count the number of publicly-visible output formats */ private int getPublicFormatCount(boolean output) { SparseIntArray formatsMap = getFormatsMap(output); int size = formatsMap.size(); if (output) { size += mDepthOutputFormats.size(); } return size; } private static{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d, * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput( * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations( * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.
* *{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ... * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output * configuration's width, height, format, minimal frame duration in nanoseconds, and stall * duration in nanoseconds.
* *{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and * format.
* *{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)], * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]} * represents an input fomat and its valid output formats.
* *{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d], * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output * configuration's width, height, minimal frame rate, and maximal frame rate.
* * @return string representation of {@link StreamConfigurationMap} */ @Override public String toString() { StringBuilder sb = new StringBuilder("StreamConfiguration("); appendOutputsString(sb); sb.append(", "); appendHighResOutputsString(sb); sb.append(", "); appendInputsString(sb); sb.append(", "); appendValidOutputFormatsForInputString(sb); sb.append(", "); appendHighSpeedVideoConfigurationsString(sb); sb.append(")"); return sb.toString(); } private void appendOutputsString(StringBuilder sb) { sb.append("Outputs("); int[] formats = getOutputFormats(); for (int format : formats) { Size[] sizes = getOutputSizes(format); for (Size size : sizes) { long minFrameDuration = getOutputMinFrameDuration(format, size); long stallDuration = getOutputStallDuration(format, size); sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " + "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format), format, minFrameDuration, stallDuration)); } } // Remove the pending ", " if (sb.charAt(sb.length() - 1) == ' ') { sb.delete(sb.length() - 2, sb.length()); } sb.append(")"); } private void appendHighResOutputsString(StringBuilder sb) { sb.append("HighResolutionOutputs("); int[] formats = getOutputFormats(); for (int format : formats) { Size[] sizes = getHighResolutionOutputSizes(format); if (sizes == null) continue; for (Size size : sizes) { long minFrameDuration = getOutputMinFrameDuration(format, size); long stallDuration = getOutputStallDuration(format, size); sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " + "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format), format, minFrameDuration, stallDuration)); } } // Remove the pending ", " if (sb.charAt(sb.length() - 1) == ' ') { sb.delete(sb.length() - 2, sb.length()); } sb.append(")"); } private void appendInputsString(StringBuilder sb) { sb.append("Inputs("); int[] formats = getInputFormats(); for (int format : formats) { Size[] sizes = getInputSizes(format); for (Size size : sizes) { sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(), size.getHeight(), formatToString(format), format)); } } // Remove the pending ", " if (sb.charAt(sb.length() - 1) == ' ') { sb.delete(sb.length() - 2, sb.length()); } sb.append(")"); } private void appendValidOutputFormatsForInputString(StringBuilder sb) { sb.append("ValidOutputFormatsForInput("); int[] inputFormats = getInputFormats(); for (int inputFormat : inputFormats) { sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat)); int[] outputFormats = getValidOutputFormatsForInput(inputFormat); for (int i = 0; i < outputFormats.length; i++) { sb.append(String.format("%s(%d)", formatToString(outputFormats[i]), outputFormats[i])); if (i < outputFormats.length - 1) { sb.append(", "); } } sb.append("], "); } // Remove the pending ", " if (sb.charAt(sb.length() - 1) == ' ') { sb.delete(sb.length() - 2, sb.length()); } sb.append(")"); } private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) { sb.append("HighSpeedVideoConfigurations("); Size[] sizes = getHighSpeedVideoSizes(); for (Size size : sizes) { Range