/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.hardware.camera2; import android.annotation.NonNull; import android.annotation.Nullable; import android.hardware.camera2.impl.CameraMetadataNative; import android.hardware.camera2.impl.CaptureResultExtras; import android.hardware.camera2.impl.PublicKey; import android.hardware.camera2.impl.SyntheticKey; import android.hardware.camera2.utils.TypeReference; import android.util.Log; import android.util.Rational; import java.util.List; /** *
The subset of the results of a single image capture from the image sensor.
* *Contains a subset of the final configuration for the capture hardware (sensor, lens, * flash), the processing pipeline, the control algorithms, and the output * buffers.
* *CaptureResults are produced by a {@link CameraDevice} after processing a * {@link CaptureRequest}. All properties listed for capture requests can also * be queried on the capture result, to determine the final values used for * capture. The result also includes additional metadata about the state of the * camera device during the capture.
* *Not all properties returned by {@link CameraCharacteristics#getAvailableCaptureResultKeys()} * are necessarily available. Some results are {@link CaptureResult partial} and will * not have every key set. Only {@link TotalCaptureResult total} results are guaranteed to have * every key available that was enabled by the request.
* *{@link CaptureResult} objects are immutable.
* */ public class CaptureResult extends CameraMetadataFor example, to get the timestamp corresponding to the exposure of the first row:
*
*
* long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
*
To enumerate over all possible keys for {@link CaptureResult}, see * {@link CameraCharacteristics#getAvailableCaptureResultKeys}.
* * @see CaptureResult#get * @see CameraCharacteristics#getAvailableCaptureResultKeys */ public final static class KeyBuilt-in keys exposed by the Android SDK are always prefixed with {@code "android."}; * keys that are device/platform-specific are prefixed with {@code "com."}.
* *For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device * specific key might look like {@code "com.google.nexus.data.private"}.
* * @return String representation of the key name */ @NonNull public String getName() { return mKey.getName(); } /** * Return vendor tag id. * * @hide */ public long getVendorId() { return mKey.getVendorId(); } /** * {@inheritDoc} */ @Override public final int hashCode() { return mKey.hashCode(); } /** * {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public final boolean equals(Object o) { return o instanceof Key && ((Key{@code "CaptureResult.Key(%s)"}, where {@code %s} represents * the name of this key as returned by {@link #getName}.
* * @return string representation of {@link Key} */ @NonNull @Override public String toString() { return String.format("CaptureResult.Key(%s)", mKey.getName()); } /** * Visible for CameraMetadataNative implementation only; do not use. * * TODO: Make this private or remove it altogether. * * @hide */ public CameraMetadataNative.KeyFor internal use only
* @hide */ public CaptureResult(CameraMetadataNative results, CaptureRequest parent, CaptureResultExtras extras) { if (results == null) { throw new IllegalArgumentException("results was null"); } if (parent == null) { throw new IllegalArgumentException("parent was null"); } if (extras == null) { throw new IllegalArgumentException("extras was null"); } mResults = CameraMetadataNative.move(results); if (mResults.isEmpty()) { throw new AssertionError("Results must not be empty"); } setNativeInstance(mResults); mRequest = parent; mSequenceId = extras.getRequestId(); mFrameNumber = extras.getFrameNumber(); } /** * Returns a copy of the underlying {@link CameraMetadataNative}. * @hide */ public CameraMetadataNative getNativeCopy() { return new CameraMetadataNative(mResults); } /** * Creates a request-less result. * *For testing only.
* @hide */ public CaptureResult(CameraMetadataNative results, int sequenceId) { if (results == null) { throw new IllegalArgumentException("results was null"); } mResults = CameraMetadataNative.move(results); if (mResults.isEmpty()) { throw new AssertionError("Results must not be empty"); } setNativeInstance(mResults); mRequest = null; mSequenceId = sequenceId; mFrameNumber = -1; } /** * Get a capture result field value. * *The field definitions can be found in {@link CaptureResult}.
* *Querying the value for the same key more than once will return a value * which is equal to the previous queried value.
* * @throws IllegalArgumentException if the key was not valid * * @param key The result field to read. * @return The value of that key, or {@code null} if the field is not set. */ @Nullable publicVisibility for testing/debugging only. The results will not * include any synthesized keys, as they are invisible to the native layer.
* * @hide */ public void dumpToLog() { mResults.dumpToLog(); } /** * {@inheritDoc} */ @Override @NonNull public ListWhenever a request has been fully or partially captured, with * {@link CameraCaptureSession.CaptureCallback#onCaptureCompleted} or * {@link CameraCaptureSession.CaptureCallback#onCaptureProgressed}, the {@code result}'s * {@code getRequest()} will return that {@code request}. *
* *For example,
*
* cameraDevice.capture(someRequest, new CaptureCallback() {
* {@literal @}Override
* void onCaptureCompleted(CaptureRequest myRequest, CaptureResult myResult) {
* assert(myResult.getRequest.equals(myRequest) == true);
* }
* }, null);
*
Whenever a request has been processed, regardless of failure or success, * it gets a unique frame number assigned to its future result/failure.
* *For the same type of request (capturing from the camera device or reprocessing), this * value monotonically increments, starting with 0, for every new result or failure and the * scope is the lifetime of the {@link CameraDevice}. Between different types of requests, * the frame number may not monotonically increment. For example, the frame number of a newer * reprocess result may be smaller than the frame number of an older result of capturing new * images from the camera device, but the frame number of a newer reprocess result will never be * smaller than the frame number of an older reprocess result.
* * @return The frame number * * @see CameraDevice#createCaptureRequest * @see CameraDevice#createReprocessCaptureRequest */ public long getFrameNumber() { return mFrameNumber; } /** * The sequence ID for this failure that was returned by the * {@link CameraCaptureSession#capture} family of functions. * *The sequence ID is a unique monotonically increasing value starting from 0, * incremented every time a new group of requests is submitted to the CameraDevice.
* * @return int The ID for the sequence of requests that this capture result is a part of * * @see CameraDevice.CaptureCallback#onCaptureSequenceCompleted * @see CameraDevice.CaptureCallback#onCaptureSequenceAborted */ public int getSequenceId() { return mSequenceId; } /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ * The key entries below this point are generated from metadata * definitions in /system/media/camera/docs. Do not modify by hand or * modify the comment blocks at the start or end. *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ /** *The mode control selects how the image data is converted from the * sensor's native color into linear sRGB color.
*When auto-white balance (AWB) is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, this * control is overridden by the AWB routine. When AWB is disabled, the * application controls how the color mapping is performed.
*We define the expected processing pipeline below. For consistency * across devices, this is always the case with TRANSFORM_MATRIX.
*When either FULL or HIGH_QUALITY is used, the camera device may * do additional processing but {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} will still be provided by the * camera device (in the results) and be roughly correct.
*Switching to TRANSFORM_MATRIX and using the data provided from * FAST or HIGH_QUALITY will yield a picture with the same white point * as what was produced by the camera device in the earlier frame.
*The expected processing pipeline is as follows:
* *The white balance is encoded by two values, a 4-channel white-balance * gain vector (applied in the Bayer domain), and a 3x3 color transform * matrix (applied after demosaic).
*The 4-channel white-balance gains are defined as:
*{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} = [ R G_even G_odd B ]
*
* where G_even
is the gain for green pixels on even rows of the
* output, and G_odd
is the gain for green pixels on the odd rows.
* These may be identical for a given camera device implementation; if
* the camera device does not support a separate gain for even/odd green
* channels, it will use the G_even
value, and write G_odd
equal to
* G_even
in the output result metadata.
The matrices for color transforms are defined as a 9-entry vector:
*{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
*
* which define a transform from input sensor colors, P_in = [ r g b ]
,
* to output linear sRGB, P_out = [ r' g' b' ]
,
with colors as follows:
*r' = I0r + I1g + I2b
* g' = I3r + I4g + I5b
* b' = I6r + I7g + I8b
*
* Both the input and output value ranges must match. Overflow/underflow * values are clipped to fit within the range.
*Possible values: *
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX * @see #COLOR_CORRECTION_MODE_FAST * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY */ @PublicKey public static final KeyA color transform matrix to use to transform * from sensor RGB color space to output linear sRGB color space.
*This matrix is either set by the camera device when the request * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or * directly by the application in the request when the * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.
*In the latter case, the camera device may round the matrix to account
* for precision issues; the final rounded matrix should be reported back
* in this matrix result metadata. The transform should keep the magnitude
* of the output color values within [0, 1.0]
(assuming input color
* values is within the normalized range [0, 1.0]
), or clipping may occur.
The valid range of each matrix element varies on different devices, but * values within [-1.5, 3.0] are guaranteed not to be clipped.
*Units: Unitless scale factors
*Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#COLOR_CORRECTION_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL */ @PublicKey public static final KeyGains applying to Bayer raw color channels for * white-balance.
*These per-channel gains are either set by the camera device * when the request {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not * TRANSFORM_MATRIX, or directly by the application in the * request when the {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is * TRANSFORM_MATRIX.
*The gains in the result metadata are the gains actually * applied by the camera device to the current frame.
*The valid range of gains varies on different devices, but gains * between [1.0, 3.0] are guaranteed not to be clipped. Even if a given * device allows gains below 1.0, this is usually not recommended because * this can create color artifacts.
*Units: Unitless gain factors
*Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#COLOR_CORRECTION_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL */ @PublicKey public static final KeyMode of operation for the chromatic aberration correction algorithm.
*Chromatic (color) aberration is caused by the fact that different wavelengths of light * can not focus on the same point after exiting from the lens. This metadata defines * the high level control of chromatic aberration correction algorithm, which aims to * minimize the chromatic artifacts that may occur along the object boundaries in an * image.
*FAST/HIGH_QUALITY both mean that camera device determined aberration * correction will be applied. HIGH_QUALITY mode indicates that the camera device will * use the highest-quality aberration correction algorithms, even if it slows down * capture rate. FAST means the camera device will not slow down capture rate when * applying aberration correction.
*LEGACY devices will always be in FAST mode.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES android.colorCorrection.availableAberrationModes}
This key is available on all devices.
* * @see CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES * @see #COLOR_CORRECTION_ABERRATION_MODE_OFF * @see #COLOR_CORRECTION_ABERRATION_MODE_FAST * @see #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY */ @PublicKey public static final KeyThe desired setting for the camera device's auto-exposure * algorithm's antibanding compensation.
*Some kinds of lighting fixtures, such as some fluorescent * lights, flicker at the rate of the power supply frequency * (60Hz or 50Hz, depending on country). While this is * typically not noticeable to a person, it can be visible to * a camera device. If a camera sets its exposure time to the * wrong value, the flicker may become visible in the * viewfinder as flicker or in a final captured image, as a * set of variable-brightness bands across the image.
*Therefore, the auto-exposure routines of camera devices * include antibanding routines that ensure that the chosen * exposure value will not cause such banding. The choice of * exposure time depends on the rate of flicker, which the * camera device can detect automatically, or the expected * rate can be selected by the application using this * control.
*A given camera device may not support all of the possible * options for the antibanding mode. The * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes} key contains * the available modes for a given camera device.
*AUTO mode is the default if it is available on given * camera device. When AUTO mode is not available, the * default will be either 50HZ or 60HZ, and both 50HZ * and 60HZ will be available.
*If manual exposure control is enabled (by setting * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} to OFF), * then this setting has no effect, and the application must * ensure it selects exposure times that do not cause banding * issues. The {@link CaptureResult#STATISTICS_SCENE_FLICKER android.statistics.sceneFlicker} key can assist * the application in this.
*Possible values: *
Available values for this device:
{@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes}
*This key is available on all devices.
* * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_MODE * @see CaptureResult#STATISTICS_SCENE_FLICKER * @see #CONTROL_AE_ANTIBANDING_MODE_OFF * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO */ @PublicKey public static final KeyAdjustment to auto-exposure (AE) target image * brightness.
*The adjustment is measured as a count of steps, with the * step size defined by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep} and the * allowed range by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}.
*For example, if the exposure value (EV) step is 0.333, '6'
* will mean an exposure compensation of +2 EV; -3 will mean an
* exposure compensation of -1 EV. One EV represents a doubling
* of image brightness. Note that this control will only be
* effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} !=
OFF. This control
* will take effect even when {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == true
.
In the event of exposure compensation value being changed, camera device * may take several frames to reach the newly requested exposure target. * During that time, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} field will be in the SEARCHING * state. Once the new exposure target is reached, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} will * change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or * FLASH_REQUIRED (if the scene is too dark for still capture).
*Units: Compensation steps
*Range of valid values:
* {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}
This key is available on all devices.
* * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP * @see CaptureRequest#CONTROL_AE_LOCK * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureResult#CONTROL_AE_STATE */ @PublicKey public static final KeyWhether auto-exposure (AE) is currently locked to its latest * calculated values.
*When set to true
(ON), the AE algorithm is locked to its latest parameters,
* and will not change exposure settings until the lock is set to false
(OFF).
Note that even when AE is locked, the flash may be fired if * the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_AUTO_FLASH / * ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
*When {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation} is changed, even if the AE lock * is ON, the camera device will still adjust its exposure value.
*If AE precapture is triggered (see {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) * when AE is already locked, the camera device will not change the exposure time * ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}) and sensitivity ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}) * parameters. The flash may be fired if the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_ALWAYS_FLASH, the scene may become overexposed. * Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
*When an AE precapture sequence is triggered, AE unlock will not be able to unlock
* the AE if AE is locked by the camera device internally during precapture metering
* sequence In other words, submitting requests with AE unlock has no effect for an
* ongoing precapture metering sequence. Otherwise, the precapture metering sequence
* will never succeed in a sequence of preview requests where AE lock is always set
* to false
.
Since the camera device has a pipeline of in-flight requests, the settings that * get locked do not necessarily correspond to the settings that were present in the * latest capture result received from the camera device, since additional captures * and AE updates may have occurred even before the result was sent out. If an * application is switching between automatic and manual control and wishes to eliminate * any flicker during the switch, the following procedure is recommended:
*See {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE lock related state transition details.
*This key is available on all devices.
* * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureResult#CONTROL_AE_STATE * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_SENSITIVITY */ @PublicKey public static final KeyThe desired mode for the camera device's * auto-exposure routine.
*This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is * AUTO.
*When set to any of the ON modes, the camera device's * auto-exposure routine is enabled, overriding the * application's selected exposure time, sensor sensitivity, * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes * is selected, the camera device's flash unit controls are * also overridden.
*The FLASH modes are only available if the camera device
* has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is true
).
If flash TORCH mode is desired, this field must be set to * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.
*When set to any of the ON modes, the values chosen by the * camera device auto-exposure routine for the overridden * fields for a given capture will be available in its * CaptureResult.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES android.control.aeAvailableModes}
This key is available on all devices.
* * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES * @see CaptureRequest#CONTROL_MODE * @see CameraCharacteristics#FLASH_INFO_AVAILABLE * @see CaptureRequest#FLASH_MODE * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_FRAME_DURATION * @see CaptureRequest#SENSOR_SENSITIVITY * @see #CONTROL_AE_MODE_OFF * @see #CONTROL_AE_MODE_ON * @see #CONTROL_AE_MODE_ON_AUTO_FLASH * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE */ @PublicKey public static final KeyList of metering areas to use for auto-exposure adjustment.
*Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe} is 0. * Otherwise will always be present.
*The maximum number of regions supported by the device is determined by the value * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe}.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the * bottom-right pixel in the active pixel array.
*The weight must be within [0, 1000]
, and represents a weight
* for every pixel in the area. This means that a large metering area
* with the same weight as a smaller area will have more effect in
* the metering result. Metering areas can partially overlap and the
* camera device will add the weights in the overlap region.
The weights are relative to weights of other exposure metering regions, so if only one * region is used, all non-zero weights will have the same effect. A region with 0 * weight is ignored.
*If all regions have 0 weight, then no specific metering area needs to be used by the * camera device.
*If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in * capture result metadata, the camera device will ignore the sections outside the crop * region and output only the intersection rectangle as the metering region in the result * metadata. If the region is entirely outside the crop region, it will be ignored and * not reported in the result metadata.
*Units: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}
*Range of valid values:
* Coordinates must be between [(0,0), (width, height))
of
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}
Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AE * @see CaptureRequest#SCALER_CROP_REGION * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE */ @PublicKey public static final KeyRange over which the auto-exposure routine can * adjust the capture frame rate to maintain good * exposure.
*Only constrains auto-exposure (AE) algorithm, not * manual control of {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime} and * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}.
*Units: Frames per second (FPS)
*Range of valid values:
* Any of the entries in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}
This key is available on all devices.
* * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_FRAME_DURATION */ @PublicKey public static final KeyWhether the camera device will trigger a precapture * metering sequence when it processes this request.
*This entry is normally set to IDLE, or is not * included at all in the request settings. When included and * set to START, the camera device will trigger the auto-exposure (AE) * precapture metering sequence.
*When set to CANCEL, the camera device will cancel any active * precapture metering trigger, and return to its initial AE state. * If a precapture metering sequence is already completed, and the camera * device has implicitly locked the AE for subsequent still capture, the * CANCEL trigger will unlock the AE and return to its initial AE state.
*The precapture sequence should be triggered before starting a * high-quality still capture for final metering decisions to * be made, and for firing pre-capture flash pulses to estimate * scene brightness and required final capture flash power, when * the flash is enabled.
*Normally, this entry should be set to START for only a * single request, and the application should wait until the * sequence completes before starting a new one.
*When a precapture metering sequence is finished, the camera device
* may lock the auto-exposure routine internally to be able to accurately expose the
* subsequent still capture image ({@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE
).
* For this case, the AE may not resume normal scan if no subsequent still capture is
* submitted. To ensure that the AE routine restarts normal scan, the application should
* submit a request with {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == true
, followed by a request
* with {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == false
, if the application decides not to submit a
* still capture request after the precapture sequence completes. Alternatively, for
* API level 23 or newer devices, the CANCEL can be used to unlock the camera device
* internally locked AE if the application doesn't submit a still capture request after
* the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
* be used in devices that have earlier API levels.
The exact effect of auto-exposure (AE) precapture trigger * depends on the current AE mode and state; see * {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture state transition * details.
*On LEGACY-level devices, the precapture trigger is not supported; * capturing a high-resolution JPEG image will automatically trigger a * precapture sequence before the high-resolution capture, including * potentially firing a pre-capture flash.
*Using the precapture trigger and the auto-focus trigger {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger} * simultaneously is allowed. However, since these triggers often require cooperation between * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a * focus sweep), the camera device may delay acting on a later trigger until the previous * trigger has been fully handled. This may lead to longer intervals between the trigger and * changes to {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} indicating the start of the precapture sequence, for * example.
*If both the precapture and the auto-focus trigger are activated on the same request, then * the camera device will complete them in the optimal order for that device.
*Possible values: *
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_LOCK * @see CaptureResult#CONTROL_AE_STATE * @see CaptureRequest#CONTROL_AF_TRIGGER * @see CaptureRequest#CONTROL_CAPTURE_INTENT * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START * @see #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL */ @PublicKey public static final KeyCurrent state of the auto-exposure (AE) algorithm.
*Switching between or enabling AE modes ({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}) always
* resets the AE state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
* or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if {@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE
resets all
* the algorithm states to INACTIVE.
The camera device can do several state transitions between two results, if it is * allowed by the state transition table. For example: INACTIVE may never actually be * seen in a result.
*The state in the result is the state for this image (in sync with this image): if * AE state becomes CONVERGED, then the image data associated with this result should * be good to use.
*Below are state transition tables for different AE modes.
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | ** | INACTIVE | *Camera device auto exposure algorithm is disabled | *
When {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is AE_MODE_ON_*:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | *Camera device initiates AE scan | *SEARCHING | *Values changing | *
INACTIVE | *{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON | *LOCKED | *Values locked | *
SEARCHING | *Camera device finishes AE scan | *CONVERGED | *Good values, not changing | *
SEARCHING | *Camera device finishes AE scan | *FLASH_REQUIRED | *Converged but too dark w/o flash | *
SEARCHING | *{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON | *LOCKED | *Values locked | *
CONVERGED | *Camera device initiates AE scan | *SEARCHING | *Values changing | *
CONVERGED | *{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON | *LOCKED | *Values locked | *
FLASH_REQUIRED | *Camera device initiates AE scan | *SEARCHING | *Values changing | *
FLASH_REQUIRED | *{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON | *LOCKED | *Values locked | *
LOCKED | *{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF | *SEARCHING | *Values not good after unlock | *
LOCKED | *{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF | *CONVERGED | *Values good after unlock | *
LOCKED | *{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF | *FLASH_REQUIRED | *Exposure good, but too dark | *
PRECAPTURE | *Sequence done. {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF | *CONVERGED | *Ready for high-quality capture | *
PRECAPTURE | *Sequence done. {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON | *LOCKED | *Ready for high-quality capture | *
LOCKED | *aeLock is ON and aePrecaptureTrigger is START | *LOCKED | *Precapture trigger is ignored when AE is already locked | *
LOCKED | *aeLock is ON and aePrecaptureTrigger is CANCEL | *LOCKED | *Precapture trigger is ignored when AE is already locked | *
Any state (excluding LOCKED) | *{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START | *PRECAPTURE | *Start AE precapture metering sequence | *
Any state (excluding LOCKED) | *{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL | *INACTIVE | *Currently active precapture metering sequence is canceled | *
For the above table, the camera device may skip reporting any state changes that happen * without application intervention (i.e. mode switch, trigger, locking). Any state that * can be skipped in that manner is called a transient state.
*For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions * listed in above table, it is also legal for the camera device to skip one or more * transient states between two results. See below table for examples:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | *Camera device finished AE scan | *CONVERGED | *Values are already good, transient states are skipped by camera device. | *
Any state (excluding LOCKED) | *{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START, sequence done | *FLASH_REQUIRED | *Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device. | *
Any state (excluding LOCKED) | *{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START, sequence done | *CONVERGED | *Converged after a precapture sequence, transient states are skipped by camera device. | *
Any state (excluding LOCKED) | *{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL, converged | *FLASH_REQUIRED | *Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device. | *
Any state (excluding LOCKED) | *{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL, converged | *CONVERGED | *Converged after a precapture sequenceis canceled, transient states are skipped by camera device. | *
CONVERGED | *Camera device finished AE scan | *FLASH_REQUIRED | *Converged but too dark w/o flash after a new scan, transient states are skipped by camera device. | *
FLASH_REQUIRED | *Camera device finished AE scan | *CONVERGED | *Converged after a new scan, transient states are skipped by camera device. | *
Possible values: *
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_LOCK * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureRequest#CONTROL_MODE * @see CaptureRequest#CONTROL_SCENE_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see #CONTROL_AE_STATE_INACTIVE * @see #CONTROL_AE_STATE_SEARCHING * @see #CONTROL_AE_STATE_CONVERGED * @see #CONTROL_AE_STATE_LOCKED * @see #CONTROL_AE_STATE_FLASH_REQUIRED * @see #CONTROL_AE_STATE_PRECAPTURE */ @PublicKey public static final KeyWhether auto-focus (AF) is currently enabled, and what * mode it is set to.
*Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO and the lens is not fixed focus
* (i.e. {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} > 0
). Also note that
* when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, the behavior of AF is device
* dependent. It is recommended to lock AF by using {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger} before
* setting {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} to OFF, or set AF mode to OFF when AE is OFF.
If the lens is controlled by the camera device auto-focus algorithm, * the camera device will report the current AF status in {@link CaptureResult#CONTROL_AF_STATE android.control.afState} * in result metadata.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES android.control.afAvailableModes}
This key is available on all devices.
* * @see CaptureRequest#CONTROL_AE_MODE * @see CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES * @see CaptureResult#CONTROL_AF_STATE * @see CaptureRequest#CONTROL_AF_TRIGGER * @see CaptureRequest#CONTROL_MODE * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE * @see #CONTROL_AF_MODE_OFF * @see #CONTROL_AF_MODE_AUTO * @see #CONTROL_AF_MODE_MACRO * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE * @see #CONTROL_AF_MODE_EDOF */ @PublicKey public static final KeyList of metering areas to use for auto-focus.
*Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf} is 0. * Otherwise will always be present.
*The maximum number of focus areas supported by the device is determined by the value * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf}.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the * bottom-right pixel in the active pixel array.
*The weight must be within [0, 1000]
, and represents a weight
* for every pixel in the area. This means that a large metering area
* with the same weight as a smaller area will have more effect in
* the metering result. Metering areas can partially overlap and the
* camera device will add the weights in the overlap region.
The weights are relative to weights of other metering regions, so if only one region * is used, all non-zero weights will have the same effect. A region with 0 weight is * ignored.
*If all regions have 0 weight, then no specific metering area needs to be used by the * camera device.
*If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in * capture result metadata, the camera device will ignore the sections outside the crop * region and output only the intersection rectangle as the metering region in the result * metadata. If the region is entirely outside the crop region, it will be ignored and * not reported in the result metadata.
*Units: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}
*Range of valid values:
* Coordinates must be between [(0,0), (width, height))
of
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}
Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AF * @see CaptureRequest#SCALER_CROP_REGION * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE */ @PublicKey public static final KeyWhether the camera device will trigger autofocus for this request.
*This entry is normally set to IDLE, or is not * included at all in the request settings.
*When included and set to START, the camera device will trigger the * autofocus algorithm. If autofocus is disabled, this trigger has no effect.
*When set to CANCEL, the camera device will cancel any active trigger, * and return to its initial AF state.
*Generally, applications should set this entry to START or CANCEL for only a * single capture, and then return it to IDLE (or not set at all). Specifying * START for multiple captures in a row means restarting the AF operation over * and over again.
*See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what the trigger means for each AF mode.
*Using the autofocus trigger and the precapture trigger {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} * simultaneously is allowed. However, since these triggers often require cooperation between * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a * focus sweep), the camera device may delay acting on a later trigger until the previous * trigger has been fully handled. This may lead to longer intervals between the trigger and * changes to {@link CaptureResult#CONTROL_AF_STATE android.control.afState}, for example.
*Possible values: *
This key is available on all devices.
* * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureResult#CONTROL_AF_STATE * @see #CONTROL_AF_TRIGGER_IDLE * @see #CONTROL_AF_TRIGGER_START * @see #CONTROL_AF_TRIGGER_CANCEL */ @PublicKey public static final KeyCurrent state of auto-focus (AF) algorithm.
*Switching between or enabling AF modes ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) always
* resets the AF state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
* or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if {@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE
resets all
* the algorithm states to INACTIVE.
The camera device can do several state transitions between two results, if it is * allowed by the state transition table. For example: INACTIVE may never actually be * seen in a result.
*The state in the result is the state for this image (in sync with this image): if * AF state becomes FOCUSED, then the image data associated with this result should * be sharp.
*Below are state transition tables for different AF modes.
*When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_OFF or AF_MODE_EDOF:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | ** | INACTIVE | *Never changes | *
When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_AUTO or AF_MODE_MACRO:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | *AF_TRIGGER | *ACTIVE_SCAN | *Start AF sweep, Lens now moving | *
ACTIVE_SCAN | *AF sweep done | *FOCUSED_LOCKED | *Focused, Lens now locked | *
ACTIVE_SCAN | *AF sweep done | *NOT_FOCUSED_LOCKED | *Not focused, Lens now locked | *
ACTIVE_SCAN | *AF_CANCEL | *INACTIVE | *Cancel/reset AF, Lens now locked | *
FOCUSED_LOCKED | *AF_CANCEL | *INACTIVE | *Cancel/reset AF | *
FOCUSED_LOCKED | *AF_TRIGGER | *ACTIVE_SCAN | *Start new sweep, Lens now moving | *
NOT_FOCUSED_LOCKED | *AF_CANCEL | *INACTIVE | *Cancel/reset AF | *
NOT_FOCUSED_LOCKED | *AF_TRIGGER | *ACTIVE_SCAN | *Start new sweep, Lens now moving | *
Any state | *Mode change | *INACTIVE | ** |
For the above table, the camera device may skip reporting any state changes that happen * without application intervention (i.e. mode switch, trigger, locking). Any state that * can be skipped in that manner is called a transient state.
*For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the * state transitions listed in above table, it is also legal for the camera device to skip * one or more transient states between two results. See below table for examples:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | *AF_TRIGGER | *FOCUSED_LOCKED | *Focus is already good or good after a scan, lens is now locked. | *
INACTIVE | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *Focus failed after a scan, lens is now locked. | *
FOCUSED_LOCKED | *AF_TRIGGER | *FOCUSED_LOCKED | *Focus is already good or good after a scan, lens is now locked. | *
NOT_FOCUSED_LOCKED | *AF_TRIGGER | *FOCUSED_LOCKED | *Focus is good after a scan, lens is not locked. | *
When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_CONTINUOUS_VIDEO:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | *Camera device initiates new scan | *PASSIVE_SCAN | *Start AF scan, Lens now moving | *
INACTIVE | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *AF state query, Lens now locked | *
PASSIVE_SCAN | *Camera device completes current scan | *PASSIVE_FOCUSED | *End AF scan, Lens now locked | *
PASSIVE_SCAN | *Camera device fails current scan | *PASSIVE_UNFOCUSED | *End AF scan, Lens now locked | *
PASSIVE_SCAN | *AF_TRIGGER | *FOCUSED_LOCKED | *Immediate transition, if focus is good. Lens now locked | *
PASSIVE_SCAN | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *Immediate transition, if focus is bad. Lens now locked | *
PASSIVE_SCAN | *AF_CANCEL | *INACTIVE | *Reset lens position, Lens now locked | *
PASSIVE_FOCUSED | *Camera device initiates new scan | *PASSIVE_SCAN | *Start AF scan, Lens now moving | *
PASSIVE_UNFOCUSED | *Camera device initiates new scan | *PASSIVE_SCAN | *Start AF scan, Lens now moving | *
PASSIVE_FOCUSED | *AF_TRIGGER | *FOCUSED_LOCKED | *Immediate transition, lens now locked | *
PASSIVE_UNFOCUSED | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *Immediate transition, lens now locked | *
FOCUSED_LOCKED | *AF_TRIGGER | *FOCUSED_LOCKED | *No effect | *
FOCUSED_LOCKED | *AF_CANCEL | *INACTIVE | *Restart AF scan | *
NOT_FOCUSED_LOCKED | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *No effect | *
NOT_FOCUSED_LOCKED | *AF_CANCEL | *INACTIVE | *Restart AF scan | *
When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_CONTINUOUS_PICTURE:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | *Camera device initiates new scan | *PASSIVE_SCAN | *Start AF scan, Lens now moving | *
INACTIVE | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *AF state query, Lens now locked | *
PASSIVE_SCAN | *Camera device completes current scan | *PASSIVE_FOCUSED | *End AF scan, Lens now locked | *
PASSIVE_SCAN | *Camera device fails current scan | *PASSIVE_UNFOCUSED | *End AF scan, Lens now locked | *
PASSIVE_SCAN | *AF_TRIGGER | *FOCUSED_LOCKED | *Eventual transition once the focus is good. Lens now locked | *
PASSIVE_SCAN | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *Eventual transition if cannot find focus. Lens now locked | *
PASSIVE_SCAN | *AF_CANCEL | *INACTIVE | *Reset lens position, Lens now locked | *
PASSIVE_FOCUSED | *Camera device initiates new scan | *PASSIVE_SCAN | *Start AF scan, Lens now moving | *
PASSIVE_UNFOCUSED | *Camera device initiates new scan | *PASSIVE_SCAN | *Start AF scan, Lens now moving | *
PASSIVE_FOCUSED | *AF_TRIGGER | *FOCUSED_LOCKED | *Immediate trans. Lens now locked | *
PASSIVE_UNFOCUSED | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *Immediate trans. Lens now locked | *
FOCUSED_LOCKED | *AF_TRIGGER | *FOCUSED_LOCKED | *No effect | *
FOCUSED_LOCKED | *AF_CANCEL | *INACTIVE | *Restart AF scan | *
NOT_FOCUSED_LOCKED | *AF_TRIGGER | *NOT_FOCUSED_LOCKED | *No effect | *
NOT_FOCUSED_LOCKED | *AF_CANCEL | *INACTIVE | *Restart AF scan | *
When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO * (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the * camera device. When a trigger is included in a mode switch request, the trigger * will be evaluated in the context of the new mode in the request. * See below table for examples:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
any state | *CAF-->AUTO mode switch | *INACTIVE | *Mode switch without trigger, initial state must be INACTIVE | *
any state | *CAF-->AUTO mode switch with AF_TRIGGER | *trigger-reachable states from INACTIVE | *Mode switch with trigger, INACTIVE is skipped | *
any state | *AUTO-->CAF mode switch | *passively reachable states from INACTIVE | *Mode switch without trigger, passive transient state is skipped | *
Possible values: *
This key is available on all devices.
* * @see CaptureRequest#CONTROL_AF_MODE * @see CaptureRequest#CONTROL_MODE * @see CaptureRequest#CONTROL_SCENE_MODE * @see #CONTROL_AF_STATE_INACTIVE * @see #CONTROL_AF_STATE_PASSIVE_SCAN * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED * @see #CONTROL_AF_STATE_ACTIVE_SCAN * @see #CONTROL_AF_STATE_FOCUSED_LOCKED * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED * @see #CONTROL_AF_STATE_PASSIVE_UNFOCUSED */ @PublicKey public static final KeyWhether auto-white balance (AWB) is currently locked to its * latest calculated values.
*When set to true
(ON), the AWB algorithm is locked to its latest parameters,
* and will not change color balance settings until the lock is set to false
(OFF).
Since the camera device has a pipeline of in-flight requests, the settings that * get locked do not necessarily correspond to the settings that were present in the * latest capture result received from the camera device, since additional captures * and AWB updates may have occurred even before the result was sent out. If an * application is switching between automatic and manual control and wishes to eliminate * any flicker during the switch, the following procedure is recommended:
*Note that AWB lock is only meaningful when * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is in the AUTO mode; in other modes, * AWB is already fixed to a specific setting.
*Some LEGACY devices may not support ON; the value is then overridden to OFF.
*This key is available on all devices.
* * @see CaptureRequest#CONTROL_AWB_MODE */ @PublicKey public static final KeyWhether auto-white balance (AWB) is currently setting the color * transform fields, and what its illumination target * is.
*This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.
*When set to the ON mode, the camera device's auto-white balance * routine is enabled, overriding the application's selected * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}. Note that when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} * is OFF, the behavior of AWB is device dependent. It is recommened to * also set AWB mode to OFF or lock AWB by using {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} before * setting AE mode to OFF.
*When set to the OFF mode, the camera device's auto-white balance * routine is disabled. The application manually controls the white * balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} * and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.
*When set to any other modes, the camera device's auto-white * balance routine is disabled. The camera device uses each * particular illumination target for white balance * adjustment. The application's values for * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, * {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} are ignored.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES android.control.awbAvailableModes}
This key is available on all devices.
* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_MODE * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AE_MODE * @see CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES * @see CaptureRequest#CONTROL_AWB_LOCK * @see CaptureRequest#CONTROL_MODE * @see #CONTROL_AWB_MODE_OFF * @see #CONTROL_AWB_MODE_AUTO * @see #CONTROL_AWB_MODE_INCANDESCENT * @see #CONTROL_AWB_MODE_FLUORESCENT * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT * @see #CONTROL_AWB_MODE_DAYLIGHT * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT * @see #CONTROL_AWB_MODE_TWILIGHT * @see #CONTROL_AWB_MODE_SHADE */ @PublicKey public static final KeyList of metering areas to use for auto-white-balance illuminant * estimation.
*Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb} is 0. * Otherwise will always be present.
*The maximum number of regions supported by the device is determined by the value * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb}.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the * bottom-right pixel in the active pixel array.
*The weight must range from 0 to 1000, and represents a weight * for every pixel in the area. This means that a large metering area * with the same weight as a smaller area will have more effect in * the metering result. Metering areas can partially overlap and the * camera device will add the weights in the overlap region.
*The weights are relative to weights of other white balance metering regions, so if * only one region is used, all non-zero weights will have the same effect. A region with * 0 weight is ignored.
*If all regions have 0 weight, then no specific metering area needs to be used by the * camera device.
*If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in * capture result metadata, the camera device will ignore the sections outside the crop * region and output only the intersection rectangle as the metering region in the result * metadata. If the region is entirely outside the crop region, it will be ignored and * not reported in the result metadata.
*Units: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}
*Range of valid values:
* Coordinates must be between [(0,0), (width, height))
of
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}
Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AWB * @see CaptureRequest#SCALER_CROP_REGION * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE */ @PublicKey public static final KeyInformation to the camera device 3A (auto-exposure, * auto-focus, auto-white balance) routines about the purpose * of this capture, to help the camera device to decide optimal 3A * strategy.
*This control (except for MANUAL) is only effective if
* {@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF
and any 3A routine is active.
ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if * {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR. Other intent values are * always supported.
*Possible values: *
This key is available on all devices.
* * @see CaptureRequest#CONTROL_MODE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES * @see #CONTROL_CAPTURE_INTENT_CUSTOM * @see #CONTROL_CAPTURE_INTENT_PREVIEW * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG * @see #CONTROL_CAPTURE_INTENT_MANUAL */ @PublicKey public static final KeyCurrent state of auto-white balance (AWB) algorithm.
*Switching between or enabling AWB modes ({@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}) always
* resets the AWB state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
* or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if {@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE
resets all
* the algorithm states to INACTIVE.
The camera device can do several state transitions between two results, if it is * allowed by the state transition table. So INACTIVE may never actually be seen in * a result.
*The state in the result is the state for this image (in sync with this image): if * AWB state becomes CONVERGED, then the image data associated with this result should * be good to use.
*Below are state transition tables for different AWB modes.
*When {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != AWB_MODE_AUTO
:
State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | ** | INACTIVE | *Camera device auto white balance algorithm is disabled | *
When {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is AWB_MODE_AUTO:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | *Camera device initiates AWB scan | *SEARCHING | *Values changing | *
INACTIVE | *{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON | *LOCKED | *Values locked | *
SEARCHING | *Camera device finishes AWB scan | *CONVERGED | *Good values, not changing | *
SEARCHING | *{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON | *LOCKED | *Values locked | *
CONVERGED | *Camera device initiates AWB scan | *SEARCHING | *Values changing | *
CONVERGED | *{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON | *LOCKED | *Values locked | *
LOCKED | *{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is OFF | *SEARCHING | *Values not good after unlock | *
For the above table, the camera device may skip reporting any state changes that happen * without application intervention (i.e. mode switch, trigger, locking). Any state that * can be skipped in that manner is called a transient state.
*For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions * listed in above table, it is also legal for the camera device to skip one or more * transient states between two results. See below table for examples:
*State | *Transition Cause | *New State | *Notes | *
---|---|---|---|
INACTIVE | *Camera device finished AWB scan | *CONVERGED | *Values are already good, transient states are skipped by camera device. | *
LOCKED | *{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is OFF | *CONVERGED | *Values good after unlock, transient states are skipped by camera device. | *
Possible values: *
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AWB_LOCK * @see CaptureRequest#CONTROL_AWB_MODE * @see CaptureRequest#CONTROL_MODE * @see CaptureRequest#CONTROL_SCENE_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see #CONTROL_AWB_STATE_INACTIVE * @see #CONTROL_AWB_STATE_SEARCHING * @see #CONTROL_AWB_STATE_CONVERGED * @see #CONTROL_AWB_STATE_LOCKED */ @PublicKey public static final KeyA special color effect to apply.
*When this mode is set, a color effect will be applied * to images produced by the camera device. The interpretation * and implementation of these color effects is left to the * implementor of the camera device, and should not be * depended on to be consistent (or present) across all * devices.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS android.control.availableEffects}
This key is available on all devices.
* * @see CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS * @see #CONTROL_EFFECT_MODE_OFF * @see #CONTROL_EFFECT_MODE_MONO * @see #CONTROL_EFFECT_MODE_NEGATIVE * @see #CONTROL_EFFECT_MODE_SOLARIZE * @see #CONTROL_EFFECT_MODE_SEPIA * @see #CONTROL_EFFECT_MODE_POSTERIZE * @see #CONTROL_EFFECT_MODE_WHITEBOARD * @see #CONTROL_EFFECT_MODE_BLACKBOARD * @see #CONTROL_EFFECT_MODE_AQUA */ @PublicKey public static final KeyOverall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control * routines.
*This is a top-level 3A control switch. When set to OFF, all 3A control * by the camera device is disabled. The application must set the fields for * capture parameters itself.
*When set to AUTO, the individual algorithm controls in * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.
*When set to USE_SCENE_MODE, the individual controls in * android.control.* are mostly disabled, and the camera device implements * one of the scene mode settings (such as ACTION, SUNSET, or PARTY) * as it wishes. The camera device scene mode 3A settings are provided by * {@link android.hardware.camera2.CaptureResult capture results}.
*When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference * is that this frame will not be used by camera device background 3A statistics * update, as if this frame is never captured. This mode can be used in the scenario * where the application doesn't want a 3A manual control capture to affect * the subsequent auto 3A capture results.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#CONTROL_AVAILABLE_MODES android.control.availableModes}
This key is available on all devices.
* * @see CaptureRequest#CONTROL_AF_MODE * @see CameraCharacteristics#CONTROL_AVAILABLE_MODES * @see #CONTROL_MODE_OFF * @see #CONTROL_MODE_AUTO * @see #CONTROL_MODE_USE_SCENE_MODE * @see #CONTROL_MODE_OFF_KEEP_STATE */ @PublicKey public static final KeyControl for which scene mode is currently active.
*Scene modes are custom camera modes optimized for a certain set of conditions and * capture settings.
*This is the mode that that is active when
* {@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE
. Aside from FACE_PRIORITY, these modes will
* disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}
* while in use.
The interpretation and implementation of these scene modes is left * to the implementor of the camera device. Their behavior will not be * consistent across all devices, and any given device may only implement * a subset of these modes.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}
This key is available on all devices.
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AF_MODE * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES * @see CaptureRequest#CONTROL_AWB_MODE * @see CaptureRequest#CONTROL_MODE * @see #CONTROL_SCENE_MODE_DISABLED * @see #CONTROL_SCENE_MODE_FACE_PRIORITY * @see #CONTROL_SCENE_MODE_ACTION * @see #CONTROL_SCENE_MODE_PORTRAIT * @see #CONTROL_SCENE_MODE_LANDSCAPE * @see #CONTROL_SCENE_MODE_NIGHT * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT * @see #CONTROL_SCENE_MODE_THEATRE * @see #CONTROL_SCENE_MODE_BEACH * @see #CONTROL_SCENE_MODE_SNOW * @see #CONTROL_SCENE_MODE_SUNSET * @see #CONTROL_SCENE_MODE_STEADYPHOTO * @see #CONTROL_SCENE_MODE_FIREWORKS * @see #CONTROL_SCENE_MODE_SPORTS * @see #CONTROL_SCENE_MODE_PARTY * @see #CONTROL_SCENE_MODE_CANDLELIGHT * @see #CONTROL_SCENE_MODE_BARCODE * @see #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO * @see #CONTROL_SCENE_MODE_HDR */ @PublicKey public static final KeyWhether video stabilization is * active.
*Video stabilization automatically warps images from * the camera in order to stabilize motion between consecutive frames.
*If enabled, video stabilization can modify the * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream stabilized.
*Switching between different video stabilization modes may take several * frames to initialize, the camera device will report the current mode * in capture result metadata. For example, When "ON" mode is requested, * the video stabilization modes in the first several capture results may * still be "OFF", and it will become "ON" when the initialization is * done.
*In addition, not all recording sizes or frame rates may be supported for * stabilization by a device that reports stabilization support. It is guaranteed * that an output targeting a MediaRecorder or MediaCodec will be stabilized if * the recording resolution is less than or equal to 1920 x 1080 (width less than * or equal to 1920, height less than or equal to 1080), and the recording * frame rate is less than or equal to 30fps. At other sizes, the CaptureResult * {@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode} field will return * OFF if the recording output is not stabilized, or if there are no output * Surface types that can be stabilized.
*If a camera device supports both this mode and OIS * ({@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}), turning both modes on may * produce undesirable interaction, so it is recommended not to enable * both at the same time.
*Possible values: *
This key is available on all devices.
* * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE * @see CaptureRequest#SCALER_CROP_REGION * @see #CONTROL_VIDEO_STABILIZATION_MODE_OFF * @see #CONTROL_VIDEO_STABILIZATION_MODE_ON */ @PublicKey public static final KeyThe amount of additional sensitivity boost applied to output images * after RAW sensor data is captured.
*Some camera devices support additional digital sensitivity boosting in the * camera processing pipeline after sensor RAW image is captured. * Such a boost will be applied to YUV/JPEG format output images but will not * have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.
*This key will be null
for devices that do not support any RAW format
* outputs. For devices that do support RAW format outputs, this key will always
* present, and if a device does not support post RAW sensitivity boost, it will
* list 100
in this key.
If the camera device cannot apply the exact boost requested, it will reduce the * boost to the nearest supported value. * The final boost value used will be available in the output capture result.
*For devices that support post RAW sensitivity boost, the YUV/JPEG output images
* of such device will have the total sensitivity of
* {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} * {@link CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST android.control.postRawSensitivityBoost} / 100
* The sensitivity of RAW format images will always be {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}
This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to * OFF; otherwise the auto-exposure algorithm will override this value.
*Units: ISO arithmetic units, the same as {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}
*Range of valid values:
* {@link CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE android.control.postRawSensitivityBoostRange}
Optional - This value may be {@code null} on some devices.
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_MODE * @see CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST * @see CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE * @see CaptureRequest#SENSOR_SENSITIVITY */ @PublicKey public static final KeyAllow camera device to enable zero-shutter-lag mode for requests with * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE.
*If enableZsl is true
, the camera device may enable zero-shutter-lag mode for requests with
* STILL_CAPTURE capture intent. The camera device may use images captured in the past to
* produce output images for a zero-shutter-lag request. The result metadata including the
* {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} reflects the source frames used to produce output images.
* Therefore, the contents of the output images and the result metadata may be out of order
* compared to previous regular requests. enableZsl does not affect requests with other
* capture intents.
For example, when requests are submitted in the following order: * Request A: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is PREVIEW * Request B: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is STILL_CAPTURE
*The output images for request B may have contents captured before the output images for * request A, and the result metadata for request B may be older than the result metadata for * request A.
*Note that when enableZsl is true
, it is not guaranteed to get output images captured in
* the past for requests with STILL_CAPTURE capture intent.
For applications targeting SDK versions O and newer, the value of enableZsl in
* TEMPLATE_STILL_CAPTURE template may be true
. The value in other templates is always
* false
if present.
For applications targeting SDK versions older than O, the value of enableZsl in all
* capture templates is always false
if present.
For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
*Optional - This value may be {@code null} on some devices.
* * @see CaptureRequest#CONTROL_CAPTURE_INTENT * @see CaptureResult#SENSOR_TIMESTAMP */ @PublicKey public static final KeyOperation mode for edge * enhancement.
*Edge enhancement improves sharpness and details in the captured image. OFF means * no enhancement will be applied by the camera device.
*FAST/HIGH_QUALITY both mean camera device determined enhancement * will be applied. HIGH_QUALITY mode indicates that the * camera device will use the highest-quality enhancement algorithms, * even if it slows down capture rate. FAST means the camera device will * not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if * edge enhancement will slow down capture rate. Every output stream will have a similar * amount of enhancement applied.
*ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular * buffer of high-resolution images during preview and reprocess image(s) from that buffer * into a final capture when triggered by the user. In this mode, the camera device applies * edge enhancement to low-resolution streams (below maximum recording resolution) to * maximize preview quality, but does not apply edge enhancement to high-resolution streams, * since those will be reprocessed later if necessary.
*For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera * device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively. * The camera device may adjust its internal edge enhancement parameters for best * image quality based on the {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor}, if it is set.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR * @see #EDGE_MODE_OFF * @see #EDGE_MODE_FAST * @see #EDGE_MODE_HIGH_QUALITY * @see #EDGE_MODE_ZERO_SHUTTER_LAG */ @PublicKey public static final KeyThe desired mode for for the camera device's flash control.
*This control is only effective when flash unit is available
* ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == true
).
When this control is used, the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} must be set to ON or OFF. * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
*When set to OFF, the camera device will not fire flash for this capture.
*When set to SINGLE, the camera device will fire flash regardless of the camera * device's auto-exposure routine's result. When used in still capture case, this * control should be used along with auto-exposure (AE) precapture metering sequence * ({@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}), otherwise, the image may be incorrectly exposed.
*When set to TORCH, the flash will be on continuously. This mode can be used * for use cases such as preview, auto-focus assist, still capture, or video recording.
*The flash status will be reported by {@link CaptureResult#FLASH_STATE android.flash.state} in the capture result metadata.
*Possible values: *
This key is available on all devices.
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CameraCharacteristics#FLASH_INFO_AVAILABLE * @see CaptureResult#FLASH_STATE * @see #FLASH_MODE_OFF * @see #FLASH_MODE_SINGLE * @see #FLASH_MODE_TORCH */ @PublicKey public static final KeyCurrent state of the flash * unit.
*When the camera device doesn't have flash unit
* (i.e. {@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == false
), this state will always be UNAVAILABLE.
* Other states indicate the current flash status.
In certain conditions, this will be available on LEGACY devices:
*==
ON_ALWAYS_FLASH
* will always return FIRED.==
TORCH
* will always return FIRED.In all other conditions the state will not be available on
* LEGACY devices (i.e. it will be null
).
Possible values: *
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_MODE * @see CameraCharacteristics#FLASH_INFO_AVAILABLE * @see CaptureRequest#FLASH_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see #FLASH_STATE_UNAVAILABLE * @see #FLASH_STATE_CHARGING * @see #FLASH_STATE_READY * @see #FLASH_STATE_FIRED * @see #FLASH_STATE_PARTIAL */ @PublicKey public static final KeyOperational mode for hot pixel correction.
*Hotpixel correction interpolates out, or otherwise removes, pixels * that do not accurately measure the incoming light (i.e. pixels that * are stuck at an arbitrary value or are oversensitive).
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES android.hotPixel.availableHotPixelModes}
Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES * @see #HOT_PIXEL_MODE_OFF * @see #HOT_PIXEL_MODE_FAST * @see #HOT_PIXEL_MODE_HIGH_QUALITY */ @PublicKey public static final KeyA location object to use when generating image GPS metadata.
*Setting a location object in a request will include the GPS coordinates of the location * into any JPEG images captured based on the request. These coordinates can then be * viewed by anyone who receives the JPEG image.
*This key is available on all devices.
*/ @PublicKey @SyntheticKey public static final KeyGPS coordinates to include in output JPEG * EXIF.
*Range of valid values:
* (-180 - 180], [-90,90], [-inf, inf]
This key is available on all devices.
* @hide */ public static final Key32 characters describing GPS algorithm to * include in EXIF.
*Units: UTF-8 null-terminated string
*This key is available on all devices.
* @hide */ public static final KeyTime GPS fix was made to include in * EXIF.
*Units: UTC in seconds since January 1, 1970
*This key is available on all devices.
* @hide */ public static final KeyThe orientation for a JPEG image.
*The clockwise rotation angle in degrees, relative to the orientation * to the camera, that the JPEG picture needs to be rotated by, to be viewed * upright.
*Camera devices may either encode this value into the JPEG EXIF header, or * rotate the image data to match this orientation. When the image data is rotated, * the thumbnail data will also be rotated.
*Note that this orientation is relative to the orientation of the camera sensor, given * by {@link CameraCharacteristics#SENSOR_ORIENTATION android.sensor.orientation}.
*To translate from the device orientation given by the Android sensor APIs, the following * sample code may be used:
*private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
* if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
* int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
*
* // Round device orientation to a multiple of 90
* deviceOrientation = (deviceOrientation + 45) / 90 * 90;
*
* // Reverse device orientation for front-facing cameras
* boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
* if (facingFront) deviceOrientation = -deviceOrientation;
*
* // Calculate desired JPEG orientation relative to camera orientation to make
* // the image upright relative to the device orientation
* int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
*
* return jpegOrientation;
* }
*
* Units: Degrees in multiples of 90
*Range of valid values:
* 0, 90, 180, 270
This key is available on all devices.
* * @see CameraCharacteristics#SENSOR_ORIENTATION */ @PublicKey public static final KeyCompression quality of the final JPEG * image.
*85-95 is typical usage range.
*Range of valid values:
* 1-100; larger is higher quality
This key is available on all devices.
*/ @PublicKey public static final KeyCompression quality of JPEG * thumbnail.
*Range of valid values:
* 1-100; larger is higher quality
This key is available on all devices.
*/ @PublicKey public static final KeyResolution of embedded JPEG thumbnail.
*When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, * but the captured JPEG will still be a valid image.
*For best results, when issuing a request for a JPEG image, the thumbnail size selected * should have the same aspect ratio as the main JPEG output.
*If the thumbnail image aspect ratio differs from the JPEG primary image aspect * ratio, the camera device creates the thumbnail by cropping it from the primary image. * For example, if the primary image has 4:3 aspect ratio, the thumbnail image has * 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to * generate the thumbnail image. The thumbnail image will always have a smaller Field * Of View (FOV) than the primary image when aspect ratios differ.
*When an {@link CaptureRequest#JPEG_ORIENTATION android.jpeg.orientation} of non-zero degree is requested, * the camera device will handle thumbnail rotation in one of the following ways:
*Range of valid values:
* {@link CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES android.jpeg.availableThumbnailSizes}
This key is available on all devices.
* * @see CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES * @see CaptureRequest#JPEG_ORIENTATION */ @PublicKey public static final KeyThe desired lens aperture size, as a ratio of lens focal length to the * effective aperture diameter.
*Setting this value is only supported on the camera devices that have a variable * aperture lens.
*When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, * this can be set along with {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} * to achieve manual exposure control.
*The requested aperture value may take several frames to reach the * requested value; the camera device will report the current (intermediate) * aperture size in capture result metadata while the aperture is changing. * While the aperture is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.
*When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of * the ON modes, this will be overridden by the camera device * auto-exposure algorithm, the overridden values are then provided * back to the user in the corresponding result.
*Units: The f-number (f/N)
*Range of valid values:
* {@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES * @see CaptureResult#LENS_STATE * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_FRAME_DURATION * @see CaptureRequest#SENSOR_SENSITIVITY */ @PublicKey public static final KeyThe desired setting for the lens neutral density filter(s).
*This control will not be supported on most camera devices.
*Lens filters are typically used to lower the amount of light the * sensor is exposed to (measured in steps of EV). As used here, an EV * step is the standard logarithmic representation, which are * non-negative, and inversely proportional to the amount of light * hitting the sensor. For example, setting this to 0 would result * in no reduction of the incoming light, and setting this to 2 would * mean that the filter is set to reduce incoming light by two stops * (allowing 1/4 of the prior amount of light to the sensor).
*It may take several frames before the lens filter density changes * to the requested value. While the filter density is still changing, * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.
*Units: Exposure Value (EV)
*Range of valid values:
* {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES * @see CaptureResult#LENS_STATE */ @PublicKey public static final KeyThe desired lens focal length; used for optical zoom.
*This setting controls the physical focal length of the camera * device's lens. Changing the focal length changes the field of * view of the camera device, and is usually used for optical zoom.
*Like {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, this * setting won't be applied instantaneously, and it may take several * frames before the lens can change to the requested focal length. * While the focal length is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will * be set to MOVING.
*Optical zoom will not be supported on most devices.
*Units: Millimeters
*Range of valid values:
* {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS android.lens.info.availableFocalLengths}
This key is available on all devices.
* * @see CaptureRequest#LENS_APERTURE * @see CaptureRequest#LENS_FOCUS_DISTANCE * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS * @see CaptureResult#LENS_STATE */ @PublicKey public static final KeyDesired distance to plane of sharpest focus, * measured from frontmost surface of the lens.
*Should be zero for fixed-focus cameras
*Units: See {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details
*Range of valid values:
* >= 0
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION */ @PublicKey public static final KeyThe range of scene distances that are in * sharp focus (depth of field).
*If variable focus not supported, can still report * fixed depth of field range
*Units: A pair of focus distances in diopters: (near, * far); see {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details.
*Range of valid values:
* >=0
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION */ @PublicKey public static final KeySets whether the camera device uses optical image stabilization (OIS) * when capturing images.
*OIS is used to compensate for motion blur due to small * movements of the camera during capture. Unlike digital image * stabilization ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), OIS * makes use of mechanical elements to stabilize the camera * sensor, and thus allows for longer exposure times before * camera shake becomes apparent.
*Switching between different optical stabilization modes may take several * frames to initialize, the camera device will report the current mode in * capture result metadata. For example, When "ON" mode is requested, the * optical stabilization modes in the first several capture results may still * be "OFF", and it will become "ON" when the initialization is done.
*If a camera device supports both OIS and digital image stabilization * ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), turning both modes on may produce undesirable * interaction, so it is recommended not to enable both at the same time.
*Not all devices will support OIS; see * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization} for * available controls.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization}
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF * @see #LENS_OPTICAL_STABILIZATION_MODE_ON */ @PublicKey public static final KeyCurrent lens status.
*For lens parameters {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance}, * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, when changes are requested, * they may take several frames to reach the requested values. This state indicates * the current status of the lens parameters.
*When the state is STATIONARY, the lens parameters are not changing. This could be * either because the parameters are all fixed, or because the lens has had enough * time to reach the most recently-requested values. * If all these lens parameters are not changable for a camera device, as listed below:
*{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} == 0
), which means
* {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} parameter will always be 0.Then this state will always be STATIONARY.
*When the state is MOVING, it indicates that at least one of the lens parameters * is changing.
*Possible values: *
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CaptureRequest#LENS_APERTURE * @see CaptureRequest#LENS_FILTER_DENSITY * @see CaptureRequest#LENS_FOCAL_LENGTH * @see CaptureRequest#LENS_FOCUS_DISTANCE * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE * @see #LENS_STATE_STATIONARY * @see #LENS_STATE_MOVING */ @PublicKey public static final KeyThe orientation of the camera relative to the sensor * coordinate system.
*The four coefficients that describe the quaternion * rotation from the Android sensor coordinate system to a * camera-aligned coordinate system where the X-axis is * aligned with the long side of the image sensor, the Y-axis * is aligned with the short side of the image sensor, and * the Z-axis is aligned with the optical axis of the sensor.
*To convert from the quaternion coefficients (x,y,z,w)
* to the axis of rotation (a_x, a_y, a_z)
and rotation
* amount theta
, the following formulas can be used:
theta = 2 * acos(w)
* a_x = x / sin(theta/2)
* a_y = y / sin(theta/2)
* a_z = z / sin(theta/2)
*
* To create a 3x3 rotation matrix that applies the rotation * defined by this quaternion, the following matrix can be * used:
*R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw,
* 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw,
* 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ]
*
* This matrix can then be used to apply the rotation to a * column vector point with
*p' = Rp
where p
is in the device sensor coordinate system, and
* p'
is in the camera-oriented coordinate system.
Units: * Quaternion coefficients
*Optional - This value may be {@code null} on some devices.
*/ @PublicKey public static final KeyPosition of the camera optical center.
*The position of the camera device's lens optical center,
* as a three-dimensional vector (x,y,z)
, relative to the
* optical center of the largest camera device facing in the
* same direction as this camera, in the {@link android.hardware.SensorEvent Android sensor coordinate
* axes}. Note that only the axis definitions are shared with
* the sensor coordinate system, but not the origin.
If this device is the largest or only camera device with a
* given facing, then this position will be (0, 0, 0)
; a
* camera device with a lens optical center located 3 cm from
* the main sensor along the +X axis (to the right from the
* user's perspective) will report (0.03, 0, 0)
.
To transform a pixel coordinates between two cameras * facing the same direction, first the source camera * {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion} must be corrected for. Then * the source camera {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} needs * to be applied, followed by the {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} * of the source camera, the translation of the source camera * relative to the destination camera, the * {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} of the destination camera, and * finally the inverse of {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} * of the destination camera. This obtains a * radial-distortion-free coordinate in the destination * camera pixel coordinates.
*To compare this against a real image from the destination * camera, the destination camera image then needs to be * corrected for radial distortion before comparison or * sampling.
*Units: Meters
*Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_RADIAL_DISTORTION */ @PublicKey public static final KeyThe parameters for this camera device's intrinsic * calibration.
*The five calibration parameters that describe the * transform from camera-centric 3D coordinates to sensor * pixel coordinates:
*[f_x, f_y, c_x, c_y, s]
*
* Where f_x
and f_y
are the horizontal and vertical
* focal lengths, [c_x, c_y]
is the position of the optical
* axis, and s
is a skew parameter for the sensor plane not
* being aligned with the lens plane.
These are typically used within a transformation matrix K:
*K = [ f_x, s, c_x,
* 0, f_y, c_y,
* 0 0, 1 ]
*
* which can then be combined with the camera pose rotation
* R
and translation t
({@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} and
* {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}, respective) to calculate the
* complete transform from world coordinates to pixel
* coordinates:
P = [ K 0 * [ R t
* 0 1 ] 0 1 ]
*
* and with p_w
being a point in the world coordinate system
* and p_s
being a point in the camera active pixel array
* coordinate system, and with the mapping including the
* homogeneous division by z:
p_h = (x_h, y_h, z_h) = P p_w
* p_s = p_h / z_h
*
* so [x_s, y_s]
is the pixel coordinates of the world
* point, z_s = 1
, and w_s
is a measurement of disparity
* (depth) in pixel coordinates.
Note that the coordinate system for this transform is the
* {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} system,
* where (0,0)
is the top-left of the
* preCorrectionActiveArraySize rectangle. Once the pose and
* intrinsic calibration transforms have been applied to a
* world point, then the {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion}
* transform needs to be applied, and the result adjusted to
* be in the {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} coordinate
* system (where (0, 0)
is the top-left of the
* activeArraySize rectangle), to determine the final pixel
* coordinate of the world point for processed (non-RAW)
* output buffers.
Units: * Pixels in the * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} * coordinate system.
*Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#LENS_RADIAL_DISTORTION * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE */ @PublicKey public static final KeyThe correction coefficients to correct for this camera device's * radial and tangential lens distortion.
*Four radial distortion coefficients [kappa_0, kappa_1, kappa_2,
* kappa_3]
and two tangential distortion coefficients
* [kappa_4, kappa_5]
that can be used to correct the
* lens's geometric distortion with the mapping equations:
x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
* kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
* y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
* kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
*
* Here, [x_c, y_c]
are the coordinates to sample in the
* input image that correspond to the pixel values in the
* corrected image at the coordinate [x_i, y_i]
:
correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
*
* The pixel coordinates are defined in a normalized
* coordinate system related to the
* {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} calibration fields.
* Both [x_i, y_i]
and [x_c, y_c]
have (0,0)
at the
* lens optical center [c_x, c_y]
. The maximum magnitudes
* of both x and y coordinates are normalized to be 1 at the
* edge further from the optical center, so the range
* for both dimensions is -1 <= x <= 1
.
Finally, r
represents the radial distance from the
* optical center, r^2 = x_i^2 + y_i^2
, and its magnitude
* is therefore no larger than |r| <= sqrt(2)
.
The distortion model used is the Brown-Conrady model.
*Units: * Unitless coefficients.
*Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION */ @PublicKey public static final KeyMode of operation for the noise reduction algorithm.
*The noise reduction algorithm attempts to improve image quality by removing * excessive noise added by the capture process, especially in dark conditions.
*OFF means no noise reduction will be applied by the camera device, for both raw and * YUV domain.
*MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove * demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF. * This mode is optional, may not be support by all devices. The application should check * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} before using it.
*FAST/HIGH_QUALITY both mean camera device determined noise filtering * will be applied. HIGH_QUALITY mode indicates that the camera device * will use the highest-quality noise filtering algorithms, * even if it slows down capture rate. FAST means the camera device will not * slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if * MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate. * Every output stream will have a similar amount of enhancement applied.
*ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular * buffer of high-resolution images during preview and reprocess image(s) from that buffer * into a final capture when triggered by the user. In this mode, the camera device applies * noise reduction to low-resolution streams (below maximum recording resolution) to maximize * preview quality, but does not apply noise reduction to high-resolution streams, since * those will be reprocessed later if necessary.
*For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device * will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device * may adjust the noise reduction parameters for best image quality based on the * {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor} if it is set.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR * @see #NOISE_REDUCTION_MODE_OFF * @see #NOISE_REDUCTION_MODE_FAST * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY * @see #NOISE_REDUCTION_MODE_MINIMAL * @see #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG */ @PublicKey public static final KeyWhether a result given to the framework is the * final one for the capture, or only a partial that contains a * subset of the full set of dynamic metadata * values.
*The entries in the result metadata buffers for a * single capture may not overlap, except for this entry. The * FINAL buffers must retain FIFO ordering relative to the * requests that generate them, so the FINAL buffer for frame 3 must * always be sent to the framework after the FINAL buffer for frame 2, and * before the FINAL buffer for frame 4. PARTIAL buffers may be returned * in any order relative to other frames, but all PARTIAL buffers for a given * capture must arrive before the FINAL buffer for that capture. This entry may * only be used by the camera device if quirks.usePartialResult is set to 1.
*Range of valid values:
* Optional. Default value is FINAL.
Optional - This value may be {@code null} on some devices.
* @deprecated * @hide */ @Deprecated public static final KeyA frame counter set by the framework. This value monotonically * increases with every new result (that is, each new result has a unique * frameCount value).
*Reset on release()
*Units: count of frames
*Range of valid values:
* > 0
Optional - This value may be {@code null} on some devices.
* @deprecated * @hide */ @Deprecated public static final KeyAn application-specified ID for the current * request. Must be maintained unchanged in output * frame
*Units: arbitrary integer assigned by application
*Range of valid values:
* Any int
Optional - This value may be {@code null} on some devices.
* @hide */ public static final KeySpecifies the number of pipeline stages the frame went * through from when it was exposed to when the final completed result * was available to the framework.
*Depending on what settings are used in the request, and * what streams are configured, the data may undergo less processing, * and some pipeline stages skipped.
*See {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} for more details.
*Range of valid values:
* <= {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth}
This key is available on all devices.
* * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH */ @PublicKey public static final KeyThe desired region of the sensor to read out for this capture.
*This control can be used to implement digital zoom.
*The crop region coordinate system is based off
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with (0, 0)
being the
* top-left corner of the sensor active array.
Output streams use this rectangle to produce their output, * cropping to a smaller region if necessary to maintain the * stream's aspect ratio, then scaling the sensor input to * match the output's configured resolution.
*The crop region is applied after the RAW to other color * space (e.g. YUV) conversion. Since raw streams * (e.g. RAW16) don't have the conversion stage, they are not * croppable. The crop region will be ignored by raw streams.
*For non-raw streams, any additional per-stream cropping will * be done to maximize the final pixel area of the stream.
*For example, if the crop region is set to a 4:3 aspect * ratio, then 4:3 streams will use the exact crop * region. 16:9 streams will further crop vertically * (letterbox).
*Conversely, if the crop region is set to a 16:9, then 4:3 * outputs will crop horizontally (pillarbox), and 16:9 * streams will match exactly. These additional crops will * be centered within the crop region.
*The width and height of the crop region cannot
* be set to be smaller than
* floor( activeArraySize.width / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )
and
* floor( activeArraySize.height / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )
, respectively.
The camera device may adjust the crop region to account * for rounding and other hardware requirements; the final * crop region used will be included in the output capture * result.
*Units: Pixel coordinates relative to * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}
*This key is available on all devices.
* * @see CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE */ @PublicKey public static final KeyDuration each pixel is exposed to * light.
*If the sensor can't expose this exact duration, it will shorten the * duration exposed to the nearest possible value (rather than expose longer). * The final exposure time used will be available in the output capture result.
*This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to * OFF; otherwise the auto-exposure algorithm will override this value.
*Units: Nanoseconds
*Range of valid values:
* {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE */ @PublicKey public static final KeyDuration from start of frame exposure to * start of next frame exposure.
*The maximum frame rate that can be supported by a camera subsystem is * a function of many factors:
*Since these factors can vary greatly between different ISPs and * sensors, the camera abstraction tries to represent the bandwidth * restrictions with as simple a model as possible.
*The model presented has the following characteristics:
*The necessary information for the application, given the model above, * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }. * These are used to determine the maximum frame rate / minimum frame * duration that is possible for a given stream configuration.
*Specifically, the application can use the following rules to * determine the minimum frame duration it can request from the camera * device:
*S
.S
, by looking
* it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
* (with its respective size/format). Let this set of frame durations be
* called F
.R
, the minimum frame duration allowed
* for R
is the maximum out of all values in F
. Let the streams
* used in R
be called S_r
.If none of the streams in S_r
have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }
* using its respective size/format), then the frame duration in F
* determines the steady state frame rate that the application will get
* if it uses R
as a repeating request. Let this special kind of
* request be called Rsimple
.
A repeating request Rsimple
can be occasionally interleaved
* by a single capture of a new request Rstall
(which has at least
* one in-use stream with a non-0 stall time) and if Rstall
has the
* same minimum frame duration this will not cause a frame rate loss
* if all buffers from the previous Rstall
have already been
* delivered.
For more details about stalling, see * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.
*This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to * OFF; otherwise the auto-exposure algorithm will override this value.
*Units: Nanoseconds
*Range of valid values:
* See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration},
* {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}. The duration
* is capped to max(duration, exposureTime + overhead)
.
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION */ @PublicKey public static final KeyThe amount of gain applied to sensor data * before processing.
*The sensitivity is the standard ISO sensitivity value, * as defined in ISO 12232:2006.
*The sensitivity must be within {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}, and * if if it less than {@link CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY android.sensor.maxAnalogSensitivity}, the camera device * is guaranteed to use only analog amplification for applying the gain.
*If the camera device cannot apply the exact sensitivity * requested, it will reduce the gain to the nearest supported * value. The final sensitivity used will be available in the * output capture result.
*This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to * OFF; otherwise the auto-exposure algorithm will override this value.
*Units: ISO arithmetic units
*Range of valid values:
* {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE * @see CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY */ @PublicKey public static final KeyTime at start of exposure of first * row of the image sensor active array, in nanoseconds.
*The timestamps are also included in all image * buffers produced for the same capture, and will be identical * on all the outputs.
*When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} ==
UNKNOWN,
* the timestamps measure time since an unspecified starting point,
* and are monotonically increasing. They can be compared with the
* timestamps for other captures from the same camera device, but are
* not guaranteed to be comparable to any other time source.
When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} ==
REALTIME, the
* timestamps measure time in the same timebase as {@link android.os.SystemClock#elapsedRealtimeNanos }, and they can
* be compared to other timestamps from other subsystems that
* are using that base.
For reprocessing, the timestamp will match the start of exposure of * the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the * timestamp} in the TotalCaptureResult that was used to create the * reprocess capture request.
*Units: Nanoseconds
*Range of valid values:
* > 0
This key is available on all devices.
* * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE */ @PublicKey public static final KeyThe estimated camera neutral color in the native sensor colorspace at * the time of capture.
*This value gives the neutral color point encoded as an RGB value in the * native sensor color space. The neutral color point indicates the * currently estimated white point of the scene illumination. It can be * used to interpolate between the provided color transforms when * processing raw sensor data.
*The order of the values is R, G, B; where R is in the lowest index.
*Optional - This value may be {@code null} on some devices.
*/ @PublicKey public static final KeyNoise model coefficients for each CFA mosaic channel.
*This key contains two noise model coefficients for each CFA channel * corresponding to the sensor amplification (S) and sensor readout * noise (O). These are given as pairs of coefficients for each channel * in the same order as channels listed for the CFA layout key * (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}). This is * represented as an array of Pair<Double, Double>, where * the first member of the Pair at index n is the S coefficient and the * second member is the O coefficient for the nth color channel in the CFA.
*These coefficients are used in a two parameter noise model to describe * the amount of noise present in the image for each CFA channel. The * noise model used here is:
*N(x) = sqrt(Sx + O)
*Where x represents the recorded signal of a CFA channel normalized to * the range [0, 1], and S and O are the noise model coeffiecients for * that channel.
*A more detailed description of the noise model can be found in the * Adobe DNG specification for the NoiseProfile tag.
*Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT */ @PublicKey public static final KeyThe worst-case divergence between Bayer green channels.
*This value is an estimate of the worst case split between the * Bayer green channels in the red and blue rows in the sensor color * filter array.
*The green split is calculated as follows:
*R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))
The ratio R is the green split divergence reported for this property, * which represents how much the green channels differ in the mosaic * pattern. This value is typically used to determine the treatment of * the green mosaic channels when demosaicing.
*The green split value can be roughly interpreted as follows:
*Range of valid values:
>= 0
*Optional - This value may be {@code null} on some devices.
*/ @PublicKey public static final KeyA pixel [R, G_even, G_odd, B]
that supplies the test pattern
* when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.
Each color channel is treated as an unsigned 32-bit integer. * The camera device then uses the most significant X bits * that correspond to how many bits are in its Bayer raw sensor * output.
*For example, a sensor with RAW10 Bayer output would use the * 10 most significant bits from each color channel.
*Optional - This value may be {@code null} on some devices.
* * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE */ @PublicKey public static final KeyWhen enabled, the sensor sends a test pattern instead of * doing a real exposure from the camera.
*When a test pattern is enabled, all manual sensor controls specified * by android.sensor.* will be ignored. All other controls should * work as normal.
*For example, if manual flash is enabled, flash firing should still * occur (and that the test pattern remain unmodified, since the flash * would not actually affect it).
*Defaults to OFF.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES android.sensor.availableTestPatternModes}
Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES * @see #SENSOR_TEST_PATTERN_MODE_OFF * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY * @see #SENSOR_TEST_PATTERN_MODE_PN9 * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1 */ @PublicKey public static final KeyDuration between the start of first row exposure * and the start of last row exposure.
*This is the exposure time skew between the first and last * row exposure start times. The first row and the last row are * the first and last rows inside of the * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.
*For typical camera sensors that use rolling shutters, this is also equivalent * to the frame readout time.
*Units: Nanoseconds
*Range of valid values:
* >= 0 and <
* {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE */ @PublicKey public static final KeyA per-frame dynamic black level offset for each of the color filter * arrangement (CFA) mosaic channels.
*Camera sensor black levels may vary dramatically for different * capture settings (e.g. {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}). The fixed black * level reported by {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may be too * inaccurate to represent the actual value on a per-frame basis. The * camera device internal pipeline relies on reliable black level values * to process the raw images appropriately. To get the best image * quality, the camera device may choose to estimate the per frame black * level values either based on optically shielded black regions * ({@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}) or its internal model.
*This key reports the camera device estimated per-frame zero light * value for each of the CFA mosaic channels in the camera sensor. The * {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may only represent a coarse * approximation of the actual black level values. This value is the * black level used in camera device internal image processing pipeline * and generally more accurate than the fixed black level values. * However, since they are estimated values by the camera device, they * may not be as accurate as the black level values calculated from the * optical black pixels reported by {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}.
*The values are given in the same order as channels listed for the CFA * layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the * nth value given corresponds to the black level offset for the nth * color channel listed in the CFA.
*This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is * available or the camera device advertises this key via * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.
*Range of valid values:
* >= 0 for each.
Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS * @see CaptureRequest#SENSOR_SENSITIVITY */ @PublicKey public static final KeyMaximum raw value output by sensor for this frame.
*Since the {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may change for different * capture settings (e.g., {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}), the white * level will change accordingly. This key is similar to * {@link CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL android.sensor.info.whiteLevel}, but specifies the camera device * estimated white level for each frame.
*This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is * available or the camera device advertises this key via * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys }.
*Range of valid values:
* >= 0
Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN * @see CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS * @see CaptureRequest#SENSOR_SENSITIVITY */ @PublicKey public static final KeyQuality of lens shading correction applied * to the image data.
*When set to OFF mode, no lens shading correction will be applied by the
* camera device, and an identity lens shading map data will be provided
* if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON
. For example, for lens
* shading map with size of [ 4, 3 ]
,
* the output {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap} for this case will be an identity
* map shown below:
[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ]
*
* When set to other modes, lens shading correction will be applied by the camera * device. Applications can request lens shading map data by setting * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide lens * shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap}; the returned shading map * data will be the one applied by the camera device for this capture request.
*The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
* the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
* AWB are in AUTO modes({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} !=
OFF and {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} !=
* OFF), to get best results, it is recommended that the applications wait for the AE and AWB
* to be converged before using the returned shading map data.
Possible values: *
Available values for this device:
* {@link CameraCharacteristics#SHADING_AVAILABLE_MODES android.shading.availableModes}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AWB_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#SHADING_AVAILABLE_MODES * @see CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE * @see #SHADING_MODE_OFF * @see #SHADING_MODE_FAST * @see #SHADING_MODE_HIGH_QUALITY */ @PublicKey public static final KeyOperating mode for the face detector * unit.
*Whether face detection is enabled, and whether it * should output just the basic fields or the full set of * fields.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}
This key is available on all devices.
* * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES * @see #STATISTICS_FACE_DETECT_MODE_OFF * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE * @see #STATISTICS_FACE_DETECT_MODE_FULL */ @PublicKey public static final KeyList of unique IDs for detected faces.
*Each detected face is given a unique ID that is valid for as long as the face is visible * to the camera device. A face that leaves the field of view and later returns may be * assigned a new ID.
*Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL * This key is available on all devices.
* * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE * @hide */ public static final KeyList of landmarks for detected * faces.
*The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
* (0, 0)
being the top-left pixel of the active array.
Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL * This key is available on all devices.
* * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE * @hide */ public static final KeyList of the bounding rectangles for detected * faces.
*The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
* (0, 0)
being the top-left pixel of the active array.
Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF * This key is available on all devices.
* * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE * @hide */ public static final KeyList of the face confidence scores for * detected faces
*Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF.
*Range of valid values:
* 1-100
This key is available on all devices.
* * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE * @hide */ public static final KeyList of the faces detected through camera face detection * in this capture.
*Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} !=
OFF.
This key is available on all devices.
* * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE */ @PublicKey @SyntheticKey public static final KeyThe shading map is a low-resolution floating-point map * that lists the coefficients used to correct for vignetting, for each * Bayer color channel.
*The map provided here is the same map that is used by the camera device to * correct both color shading and vignetting for output non-RAW images.
*When there is no lens shading correction applied to RAW
* output images ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} ==
* false), this map is the complete lens shading correction
* map; when there is some lens shading correction applied to
* the RAW output image ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}==
true), this map reports the remaining lens shading
* correction map that needs to be applied to get shading
* corrected images that match the camera device's output for
* non-RAW formats.
For a complete shading correction map, the least shaded * section of the image will have a gain factor of 1; all * other sections will have gains above 1.
*When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map * will take into account the colorCorrection settings.
*The shading map is for the entire active pixel array, and is not * affected by the crop region specified in the request. Each shading map * entry is the value of the shading compensation map over a specific * pixel on the sensor. Specifically, with a (N x M) resolution shading * map, and an active pixel array size (W x H), shading map entry * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. * The map is assumed to be bilinearly interpolated between the sample points.
*The channel order is [R, Geven, Godd, B], where Geven is the green * channel for the even rows of a Bayer pattern, and Godd is the odd rows. * The shading map is stored in a fully interleaved format.
*The shading map will generally have on the order of 30-40 rows and columns, * and will be smaller than 64x64.
*As an example, given a very small map defined as:
*width,height = [ 4, 3 ]
* values =
* [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
* 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
* 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
* 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
* 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
*
* The low-resolution scaling map images for each channel are * (displayed using nearest-neighbor interpolation):
** * *
*As a visualization only, inverting the full-color map to recover an * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:
* *Range of valid values:
* Each gain factor is >= 1
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#COLOR_CORRECTION_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED */ @PublicKey public static final KeyThe shading map is a low-resolution floating-point map * that lists the coefficients used to correct for vignetting and color shading, * for each Bayer color channel of RAW image data.
*The map provided here is the same map that is used by the camera device to * correct both color shading and vignetting for output non-RAW images.
*When there is no lens shading correction applied to RAW
* output images ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} ==
* false), this map is the complete lens shading correction
* map; when there is some lens shading correction applied to
* the RAW output image ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}==
true), this map reports the remaining lens shading
* correction map that needs to be applied to get shading
* corrected images that match the camera device's output for
* non-RAW formats.
For a complete shading correction map, the least shaded * section of the image will have a gain factor of 1; all * other sections will have gains above 1.
*When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map * will take into account the colorCorrection settings.
*The shading map is for the entire active pixel array, and is not * affected by the crop region specified in the request. Each shading map * entry is the value of the shading compensation map over a specific * pixel on the sensor. Specifically, with a (N x M) resolution shading * map, and an active pixel array size (W x H), shading map entry * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. * The map is assumed to be bilinearly interpolated between the sample points.
*The channel order is [R, Geven, Godd, B], where Geven is the green * channel for the even rows of a Bayer pattern, and Godd is the odd rows. * The shading map is stored in a fully interleaved format, and its size * is provided in the camera static metadata by android.lens.info.shadingMapSize.
*The shading map will generally have on the order of 30-40 rows and columns, * and will be smaller than 64x64.
*As an example, given a very small map defined as:
*android.lens.info.shadingMapSize = [ 4, 3 ]
* android.statistics.lensShadingMap =
* [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
* 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
* 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
* 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
* 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
*
* The low-resolution scaling map images for each channel are * (displayed using nearest-neighbor interpolation):
** * *
*As a visualization only, inverting the full-color map to recover an * image of a gray wall (using bicubic interpolation for visual quality) * as captured by the sensor gives:
* *Note that the RAW image data might be subject to lens shading * correction not reported on this map. Query * {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject * to lens shading correction. If {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} * is TRUE, the RAW image data is subject to partial or full lens shading * correction. In the case full lens shading correction is applied to RAW * images, the gain factor map reported in this key will contain all 1.0 gains. * In other words, the map reported in this key is the remaining lens shading * that needs to be applied on the RAW image to get images without lens shading * artifacts. See {@link CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW android.request.maxNumOutputRaw} for a list of RAW image * formats.
*Range of valid values:
* Each gain factor is >= 1
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#COLOR_CORRECTION_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED * @hide */ public static final KeyThe best-fit color channel gains calculated * by the camera device's statistics units for the current output frame.
*This may be different than the gains used for this frame, * since statistics processing on data from a new frame * typically completes after the transform has already been * applied to that frame.
*The 4 channel gains are defined in Bayer domain, * see {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} for details.
*This value should always be calculated by the auto-white balance (AWB) block, * regardless of the android.control.* current values.
*Optional - This value may be {@code null} on some devices.
* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @deprecated * @hide */ @Deprecated public static final KeyThe best-fit color transform matrix estimate * calculated by the camera device's statistics units for the current * output frame.
*The camera device will provide the estimate from its * statistics unit on the white balance transforms to use * for the next frame. These are the values the camera device believes * are the best fit for the current output frame. This may * be different than the transform used for this frame, since * statistics processing on data from a new frame typically * completes after the transform has already been applied to * that frame.
*These estimates must be provided for all frames, even if * capture settings and color transforms are set by the application.
*This value should always be calculated by the auto-white balance (AWB) block, * regardless of the android.control.* current values.
*Optional - This value may be {@code null} on some devices.
* @deprecated * @hide */ @Deprecated public static final KeyThe camera device estimated scene illumination lighting * frequency.
*Many light sources, such as most fluorescent lights, flicker at a rate * that depends on the local utility power standards. This flicker must be * accounted for by auto-exposure routines to avoid artifacts in captured images. * The camera device uses this entry to tell the application what the scene * illuminant frequency is.
*When manual exposure control is enabled
* ({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} == OFF
or {@link CaptureRequest#CONTROL_MODE android.control.mode} ==
* OFF
), the {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} doesn't perform
* antibanding, and the application can ensure it selects
* exposure times that do not cause banding issues by looking
* into this metadata field. See
* {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} for more details.
Reports NONE if there doesn't appear to be flickering illumination.
*Possible values: *
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see #STATISTICS_SCENE_FLICKER_NONE * @see #STATISTICS_SCENE_FLICKER_50HZ * @see #STATISTICS_SCENE_FLICKER_60HZ */ @PublicKey public static final KeyOperating mode for hot pixel map generation.
*If set to true
, a hot pixel map is returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.
* If set to false
, no hot pixel map will be returned.
Range of valid values:
* {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES android.statistics.info.availableHotPixelMapModes}
Optional - This value may be {@code null} on some devices.
* * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES */ @PublicKey public static final KeyList of (x, y)
coordinates of hot/defective pixels on the sensor.
A coordinate (x, y)
must lie between (0, 0)
, and
* (width - 1, height - 1)
(inclusive), which are the top-left and
* bottom-right of the pixel array, respectively. The width and
* height dimensions are given in {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.
* This may include hot pixels that lie outside of the active array
* bounds given by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.
Range of valid values:
n <= number of pixels on the sensor.
* The (x, y)
coordinates must be bounded by
* {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.
Optional - This value may be {@code null} on some devices.
* * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE */ @PublicKey public static final KeyWhether the camera device will output the lens * shading map in output result metadata.
*When set to ON, * android.statistics.lensShadingMap will be provided in * the output result metadata.
*ON is always supported on devices with the RAW capability.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES android.statistics.info.availableLensShadingMapModes}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON */ @PublicKey public static final KeyTonemapping / contrast / gamma curve for the blue * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * CONTRAST_CURVE.
*See android.tonemap.curveRed for more details.
*Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CaptureRequest#TONEMAP_MODE * @hide */ public static final KeyTonemapping / contrast / gamma curve for the green * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * CONTRAST_CURVE.
*See android.tonemap.curveRed for more details.
*Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CaptureRequest#TONEMAP_MODE * @hide */ public static final KeyTonemapping / contrast / gamma curve for the red * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * CONTRAST_CURVE.
*Each channel's curve is defined by an array of control points:
*android.tonemap.curveRed =
* [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
* 2 <= N <= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}
* These are sorted in order of increasing Pin
; it is
* required that input values 0.0 and 1.0 are included in the list to
* define a complete mapping. For input values between control points,
* the camera device must linearly interpolate between the control
* points.
Each curve can have an independent number of points, and the number * of points can be less than max (that is, the request doesn't have to * always provide a curve with number of points equivalent to * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).
*A few examples, and their corresponding graphical mappings; these * only specify the red channel and the precision is limited to 4 * digits, for conciseness.
*Linear mapping:
*android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
*
*
* Invert mapping:
*android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
*
*
* Gamma 1/2.2 mapping, with 16 control points:
*android.tonemap.curveRed = [
* 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
* 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
* 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
* 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
*
*
* Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
*android.tonemap.curveRed = [
* 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
* 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
* 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
* 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
*
*
* Range of valid values:
* 0-1 on both input and output coordinates, normalized
* as a floating-point value such that 0 == black and 1 == white.
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS * @see CaptureRequest#TONEMAP_MODE * @hide */ public static final KeyTonemapping / contrast / gamma curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} * is CONTRAST_CURVE.
*The tonemapCurve consist of three curves for each of red, green, and blue * channels respectively. The following example uses the red channel as an * example. The same logic applies to green and blue channel. * Each channel's curve is defined by an array of control points:
*curveRed =
* [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
* 2 <= N <= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}
* These are sorted in order of increasing Pin
; it is always
* guaranteed that input values 0.0 and 1.0 are included in the list to
* define a complete mapping. For input values between control points,
* the camera device must linearly interpolate between the control
* points.
Each curve can have an independent number of points, and the number * of points can be less than max (that is, the request doesn't have to * always provide a curve with number of points equivalent to * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).
*A few examples, and their corresponding graphical mappings; these * only specify the red channel and the precision is limited to 4 * digits, for conciseness.
*Linear mapping:
*curveRed = [ (0, 0), (1.0, 1.0) ]
*
*
* Invert mapping:
*curveRed = [ (0, 1.0), (1.0, 0) ]
*
*
* Gamma 1/2.2 mapping, with 16 control points:
*curveRed = [
* (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
* (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
* (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
* (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
*
*
* Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
*curveRed = [
* (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
* (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
* (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
* (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
*
*
* Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS * @see CaptureRequest#TONEMAP_MODE */ @PublicKey @SyntheticKey public static final KeyHigh-level global contrast/gamma/tonemapping control.
*When switching to an application-defined contrast curve by setting
* {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined
* per-channel with a set of (in, out)
points that specify the
* mapping from input high-bit-depth pixel value to the output
* low-bit-depth value. Since the actual pixel ranges of both input
* and output may change depending on the camera pipeline, the values
* are specified by normalized floating-point numbers.
More-complex color mapping operations such as 3D color look-up * tables, selective chroma enhancement, or other non-linear color * transforms will be disabled when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * CONTRAST_CURVE.
*When using either FAST or HIGH_QUALITY, the camera device will * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}. * These values are always available, and as close as possible to the * actually used nonlinear/nonglobal transforms.
*If a request is sent with CONTRAST_CURVE with the camera device's * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be * roughly the same.
*Possible values: *
Available values for this device:
* {@link CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES android.tonemap.availableToneMapModes}
Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES * @see CaptureRequest#TONEMAP_CURVE * @see CaptureRequest#TONEMAP_MODE * @see #TONEMAP_MODE_CONTRAST_CURVE * @see #TONEMAP_MODE_FAST * @see #TONEMAP_MODE_HIGH_QUALITY * @see #TONEMAP_MODE_GAMMA_VALUE * @see #TONEMAP_MODE_PRESET_CURVE */ @PublicKey public static final KeyTonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * GAMMA_VALUE
*The tonemap curve will be defined the following formula: * * OUT = pow(IN, 1.0 / gamma) * where IN and OUT is the input pixel value scaled to range [0.0, 1.0], * pow is the power function and gamma is the gamma value specified by this * key.
*The same curve will be applied to all color channels. The camera device * may clip the input gamma value to its supported range. The actual applied * value will be returned in capture result.
*The valid range of gamma value varies on different devices, but values * within [1.0, 5.0] are guaranteed not to be clipped.
*Optional - This value may be {@code null} on some devices.
* * @see CaptureRequest#TONEMAP_MODE */ @PublicKey public static final KeyTonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * PRESET_CURVE
*The tonemap curve will be defined by specified standard.
*sRGB (approximated by 16 control points):
* *Rec. 709 (approximated by 16 control points):
* *Note that above figures show a 16 control points approximation of preset * curves. Camera devices may apply a different approximation to the curve.
*Possible values: *
Optional - This value may be {@code null} on some devices.
* * @see CaptureRequest#TONEMAP_MODE * @see #TONEMAP_PRESET_CURVE_SRGB * @see #TONEMAP_PRESET_CURVE_REC709 */ @PublicKey public static final KeyThis LED is nominally used to indicate to the user * that the camera is powered on and may be streaming images back to the * Application Processor. In certain rare circumstances, the OS may * disable this when video is processed locally and not transmitted to * any untrusted applications.
*In particular, the LED must always be on when the data could be * transmitted off the device. The LED should always be on whenever * data is stored locally on the device.
*The LED may be off if a trusted application is using the data that * doesn't violate the above rules.
*Optional - This value may be {@code null} on some devices.
* @hide */ public static final KeyWhether black-level compensation is locked * to its current values, or is free to vary.
*Whether the black level offset was locked for this frame. Should be * ON if {@link CaptureRequest#BLACK_LEVEL_LOCK android.blackLevel.lock} was ON in the capture request, unless * a change in other capture settings forced the camera device to * perform a black level reset.
*Optional - This value may be {@code null} on some devices.
*Full capability - * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#BLACK_LEVEL_LOCK * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL */ @PublicKey public static final KeyThe frame number corresponding to the last request * with which the output result (metadata + buffers) has been fully * synchronized.
*When a request is submitted to the camera device, there is usually a * delay of several frames before the controls get applied. A camera * device may either choose to account for this delay by implementing a * pipeline and carefully submit well-timed atomic control updates, or * it may start streaming control changes that span over several frame * boundaries.
*In the latter case, whenever a request's settings change relative to * the previous submitted request, the full set of changes may take * multiple frame durations to fully take effect. Some settings may * take effect sooner (in less frame durations) than others.
*While a set of control changes are being propagated, this value * will be CONVERGING.
*Once it is fully known that a set of control changes have been * finished propagating, and the resulting updated control settings * have been read back by the camera device, this value will be set * to a non-negative frame number (corresponding to the request to * which the results have synchronized to).
*Older camera device implementations may not have a way to detect * when all camera controls have been applied, and will always set this * value to UNKNOWN.
*FULL capability devices will always have this value set to the * frame number of the request corresponding to this result.
*Further details:
*Pipeline considerations:
*Submitting a request with updated controls relative to the previously * submitted requests may also invalidate the synchronization state * of all the results corresponding to currently in-flight requests.
*In other words, results for this current request and up to * {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} prior requests may have their * android.sync.frameNumber change to CONVERGING.
*Possible values: *
Available values for this device:
* Either a non-negative value corresponding to a
* frame_number
, or one of the two enums (CONVERGING / UNKNOWN).
This key is available on all devices.
* * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH * @see #SYNC_FRAME_NUMBER_CONVERGING * @see #SYNC_FRAME_NUMBER_UNKNOWN * @hide */ public static final KeyThe amount of exposure time increase factor applied to the original output * frame by the application processing before sending for reprocessing.
*This is optional, and will be supported if the camera device supports YUV_REPROCESSING * capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains YUV_REPROCESSING).
*For some YUV reprocessing use cases, the application may choose to filter the original * output frames to effectively reduce the noise to the same level as a frame that was * captured with longer exposure time. To be more specific, assuming the original captured * images were captured with a sensitivity of S and an exposure time of T, the model in * the camera device is that the amount of noise in the image would be approximately what * would be expected if the original capture parameters had been a sensitivity of * S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather * than S and T respectively. If the captured images were processed by the application * before being sent for reprocessing, then the application may have used image processing * algorithms and/or multi-frame image fusion to reduce the noise in the * application-processed images (input images). By using the effectiveExposureFactor * control, the application can communicate to the camera device the actual noise level * improvement in the application-processed image. With this information, the camera * device can select appropriate noise reduction and edge enhancement parameters to avoid * excessive noise reduction ({@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}) and insufficient edge * enhancement ({@link CaptureRequest#EDGE_MODE android.edge.mode}) being applied to the reprocessed frames.
*For example, for multi-frame image fusion use case, the application may fuse * multiple output frames together to a final frame for reprocessing. When N image are * fused into 1 image for reprocessing, the exposure time increase factor could be up to * square root of N (based on a simple photon shot noise model). The camera device will * adjust the reprocessing noise reduction and edge enhancement parameters accordingly to * produce the best quality images.
*This is relative factor, 1.0 indicates the application hasn't processed the input * buffer in a way that affects its effective exposure time.
*This control is only effective for YUV reprocessing capture request. For noise
* reduction reprocessing, it is only effective when {@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode} != OFF
.
* Similarly, for edge enhancement reprocessing, it is only effective when
* {@link CaptureRequest#EDGE_MODE android.edge.mode} != OFF
.
Units: Relative exposure time increase factor.
*Range of valid values:
* >= 1.0
Optional - This value may be {@code null} on some devices.
*Limited capability - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key
* * @see CaptureRequest#EDGE_MODE * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CaptureRequest#NOISE_REDUCTION_MODE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ @PublicKey public static final Key