/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; import android.annotation.NonNull; import android.annotation.Nullable; import android.util.Log; import android.util.Pair; import android.util.Range; import android.util.Rational; import android.util.Size; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Set; import static android.media.Utils.intersectSortedDistinctRanges; import static android.media.Utils.sortDistinctRanges; /** * Provides information about a given media codec available on the device. You can * iterate through all codecs available by querying {@link MediaCodecList}. For example, * here's how to find an encoder that supports a given MIME type: *
* private static MediaCodecInfo selectCodec(String mimeType) { * int numCodecs = MediaCodecList.getCodecCount(); * for (int i = 0; i < numCodecs; i++) { * MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); * * if (!codecInfo.isEncoder()) { * continue; * } * * String[] types = codecInfo.getSupportedTypes(); * for (int j = 0; j < types.length; j++) { * if (types[j].equalsIgnoreCase(mimeType)) { * return codecInfo; * } * } * } * return null; * }* */ public final class MediaCodecInfo { private boolean mIsEncoder; private String mName; private Map
You can get an instance for a given {@link MediaCodecInfo} object with * {@link MediaCodecInfo#getCapabilitiesForType getCapabilitiesForType()}, passing a MIME type. */ public static final class CodecCapabilities { public CodecCapabilities() { } // CLASSIFICATION private String mMime; private int mMaxSupportedInstances; // LEGACY FIELDS // Enumerates supported profile/level combinations as defined // by the type of encoded data. These combinations impose restrictions // on video resolution, bitrate... and limit the available encoder tools // such as B-frame support, arithmetic coding... public CodecProfileLevel[] profileLevels; // NOTE this array is modifiable by user // from OMX_COLOR_FORMATTYPE /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ public static final int COLOR_FormatMonochrome = 1; /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ public static final int COLOR_Format8bitRGB332 = 2; /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ public static final int COLOR_Format12bitRGB444 = 3; /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ public static final int COLOR_Format16bitARGB4444 = 4; /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ public static final int COLOR_Format16bitARGB1555 = 5; /** * 16 bits per pixel RGB color format, with 5-bit red & blue and 6-bit green component. *
* Using 16-bit little-endian representation, colors stored as Red 15:11, Green 10:5, Blue 4:0. *
* byte byte * <--------- i --------> | <------ i + 1 ------> * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ * | BLUE | GREEN | RED | * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ * 0 4 5 7 0 2 3 7 * bit ** * This format corresponds to {@link android.graphics.PixelFormat#RGB_565} and * {@link android.graphics.ImageFormat#RGB_565}. */ public static final int COLOR_Format16bitRGB565 = 6; /** @deprecated Use {@link #COLOR_Format16bitRGB565}. */ public static final int COLOR_Format16bitBGR565 = 7; /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ public static final int COLOR_Format18bitRGB666 = 8; /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ public static final int COLOR_Format18bitARGB1665 = 9; /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ public static final int COLOR_Format19bitARGB1666 = 10; /** @deprecated Use {@link #COLOR_Format24bitBGR888} or {@link #COLOR_FormatRGBFlexible}. */ public static final int COLOR_Format24bitRGB888 = 11; /** * 24 bits per pixel RGB color format, with 8-bit red, green & blue components. *
* Using 24-bit little-endian representation, colors stored as Red 7:0, Green 15:8, Blue 23:16. *
* byte byte byte * <------ i -----> | <---- i+1 ----> | <---- i+2 -----> * +-----------------+-----------------+-----------------+ * | RED | GREEN | BLUE | * +-----------------+-----------------+-----------------+ ** * This format corresponds to {@link android.graphics.PixelFormat#RGB_888}, and can also be * represented as a flexible format by {@link #COLOR_FormatRGBFlexible}. */ public static final int COLOR_Format24bitBGR888 = 12; /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ public static final int COLOR_Format24bitARGB1887 = 13; /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ public static final int COLOR_Format25bitARGB1888 = 14; /** * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}. */ public static final int COLOR_Format32bitBGRA8888 = 15; /** * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}. */ public static final int COLOR_Format32bitARGB8888 = 16; /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ public static final int COLOR_FormatYUV411Planar = 17; /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ public static final int COLOR_FormatYUV411PackedPlanar = 18; /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ public static final int COLOR_FormatYUV420Planar = 19; /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ public static final int COLOR_FormatYUV420PackedPlanar = 20; /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ public static final int COLOR_FormatYUV420SemiPlanar = 21; /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ public static final int COLOR_FormatYUV422Planar = 22; /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ public static final int COLOR_FormatYUV422PackedPlanar = 23; /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ public static final int COLOR_FormatYUV422SemiPlanar = 24; /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ public static final int COLOR_FormatYCbYCr = 25; /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ public static final int COLOR_FormatYCrYCb = 26; /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ public static final int COLOR_FormatCbYCrY = 27; /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ public static final int COLOR_FormatCrYCbY = 28; /** @deprecated Use {@link #COLOR_FormatYUV444Flexible}. */ public static final int COLOR_FormatYUV444Interleaved = 29; /** * SMIA 8-bit Bayer format. * Each byte represents the top 8-bits of a 10-bit signal. */ public static final int COLOR_FormatRawBayer8bit = 30; /** * SMIA 10-bit Bayer format. */ public static final int COLOR_FormatRawBayer10bit = 31; /** * SMIA 8-bit compressed Bayer format. * Each byte represents a sample from the 10-bit signal that is compressed into 8-bits * using DPCM/PCM compression, as defined by the SMIA Functional Specification. */ public static final int COLOR_FormatRawBayer8bitcompressed = 32; /** @deprecated Use {@link #COLOR_FormatL8}. */ public static final int COLOR_FormatL2 = 33; /** @deprecated Use {@link #COLOR_FormatL8}. */ public static final int COLOR_FormatL4 = 34; /** * 8 bits per pixel Y color format. *
* Each byte contains a single pixel. * This format corresponds to {@link android.graphics.PixelFormat#L_8}. */ public static final int COLOR_FormatL8 = 35; /** * 16 bits per pixel, little-endian Y color format. *
*
* byte byte * <--------- i --------> | <------ i + 1 ------> * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ * | Y | * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ * 0 7 0 7 * bit **/ public static final int COLOR_FormatL16 = 36; /** @deprecated Use {@link #COLOR_FormatL16}. */ public static final int COLOR_FormatL24 = 37; /** * 32 bits per pixel, little-endian Y color format. *
*
* byte byte byte byte * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 -----> * +-----------------+-----------------+-----------------+-----------------+ * | Y | * +-----------------+-----------------+-----------------+-----------------+ * 0 7 0 7 0 7 0 7 * bit ** * @deprecated Use {@link #COLOR_FormatL16}. */ public static final int COLOR_FormatL32 = 38; /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ public static final int COLOR_FormatYUV420PackedSemiPlanar = 39; /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ public static final int COLOR_FormatYUV422PackedSemiPlanar = 40; /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ public static final int COLOR_Format18BitBGR666 = 41; /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ public static final int COLOR_Format24BitARGB6666 = 42; /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ public static final int COLOR_Format24BitABGR6666 = 43; /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ public static final int COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100; // COLOR_FormatSurface indicates that the data will be a GraphicBuffer metadata reference. // In OMX this is called OMX_COLOR_FormatAndroidOpaque. public static final int COLOR_FormatSurface = 0x7F000789; /** * 32 bits per pixel RGBA color format, with 8-bit red, green, blue, and alpha components. *
* Using 32-bit little-endian representation, colors stored as Red 7:0, Green 15:8, * Blue 23:16, and Alpha 31:24. *
* byte byte byte byte * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 -----> * +-----------------+-----------------+-----------------+-----------------+ * | RED | GREEN | BLUE | ALPHA | * +-----------------+-----------------+-----------------+-----------------+ ** * This corresponds to {@link android.graphics.PixelFormat#RGBA_8888}. */ public static final int COLOR_Format32bitABGR8888 = 0x7F00A000; /** * Flexible 12 bits per pixel, subsampled YUV color format with 8-bit chroma and luma * components. *
* Chroma planes are subsampled by 2 both horizontally and vertically. * Use this format with {@link Image}. * This format corresponds to {@link android.graphics.ImageFormat#YUV_420_888}, * and can represent the {@link #COLOR_FormatYUV411Planar}, * {@link #COLOR_FormatYUV411PackedPlanar}, {@link #COLOR_FormatYUV420Planar}, * {@link #COLOR_FormatYUV420PackedPlanar}, {@link #COLOR_FormatYUV420SemiPlanar} * and {@link #COLOR_FormatYUV420PackedSemiPlanar} formats. * * @see Image#getFormat */ public static final int COLOR_FormatYUV420Flexible = 0x7F420888; /** * Flexible 16 bits per pixel, subsampled YUV color format with 8-bit chroma and luma * components. *
* Chroma planes are horizontally subsampled by 2. Use this format with {@link Image}. * This format corresponds to {@link android.graphics.ImageFormat#YUV_422_888}, * and can represent the {@link #COLOR_FormatYCbYCr}, {@link #COLOR_FormatYCrYCb}, * {@link #COLOR_FormatCbYCrY}, {@link #COLOR_FormatCrYCbY}, * {@link #COLOR_FormatYUV422Planar}, {@link #COLOR_FormatYUV422PackedPlanar}, * {@link #COLOR_FormatYUV422SemiPlanar} and {@link #COLOR_FormatYUV422PackedSemiPlanar} * formats. * * @see Image#getFormat */ public static final int COLOR_FormatYUV422Flexible = 0x7F422888; /** * Flexible 24 bits per pixel YUV color format with 8-bit chroma and luma * components. *
* Chroma planes are not subsampled. Use this format with {@link Image}. * This format corresponds to {@link android.graphics.ImageFormat#YUV_444_888}, * and can represent the {@link #COLOR_FormatYUV444Interleaved} format. * @see Image#getFormat */ public static final int COLOR_FormatYUV444Flexible = 0x7F444888; /** * Flexible 24 bits per pixel RGB color format with 8-bit red, green and blue * components. *
* Use this format with {@link Image}. This format corresponds to * {@link android.graphics.ImageFormat#FLEX_RGB_888}, and can represent * {@link #COLOR_Format24bitBGR888} and {@link #COLOR_Format24bitRGB888} formats. * @see Image#getFormat. */ public static final int COLOR_FormatRGBFlexible = 0x7F36B888; /** * Flexible 32 bits per pixel RGBA color format with 8-bit red, green, blue, and alpha * components. *
* Use this format with {@link Image}. This format corresponds to * {@link android.graphics.ImageFormat#FLEX_RGBA_8888}, and can represent * {@link #COLOR_Format32bitBGRA8888}, {@link #COLOR_Format32bitABGR8888} and * {@link #COLOR_Format32bitARGB8888} formats. * * @see Image#getFormat */ public static final int COLOR_FormatRGBAFlexible = 0x7F36A888; /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ public static final int COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00; /** * Defined in the OpenMAX IL specs, color format values are drawn from * OMX_COLOR_FORMATTYPE. */ public int[] colorFormats; // NOTE this array is modifiable by user // FEATURES private int mFlagsSupported; private int mFlagsRequired; private int mFlagsVerified; /** * video decoder only: codec supports seamless resolution changes. */ public static final String FEATURE_AdaptivePlayback = "adaptive-playback"; /** * video decoder only: codec supports secure decryption. */ public static final String FEATURE_SecurePlayback = "secure-playback"; /** * video or audio decoder only: codec supports tunneled playback. */ public static final String FEATURE_TunneledPlayback = "tunneled-playback"; /** * video decoder only: codec supports queuing partial frames. */ public static final String FEATURE_PartialFrame = "partial-frame"; /** * video encoder only: codec supports intra refresh. */ public static final String FEATURE_IntraRefresh = "intra-refresh"; /** * Query codec feature capabilities. *
* These features are supported to be used by the codec. These * include optional features that can be turned on, as well as * features that are always on. */ public final boolean isFeatureSupported(String name) { return checkFeature(name, mFlagsSupported); } /** * Query codec feature requirements. *
* These features are required to be used by the codec, and as such, * they are always turned on. */ public final boolean isFeatureRequired(String name) { return checkFeature(name, mFlagsRequired); } private static final Feature[] decoderFeatures = { new Feature(FEATURE_AdaptivePlayback, (1 << 0), true), new Feature(FEATURE_SecurePlayback, (1 << 1), false), new Feature(FEATURE_TunneledPlayback, (1 << 2), false), new Feature(FEATURE_PartialFrame, (1 << 3), false), }; private static final Feature[] encoderFeatures = { new Feature(FEATURE_IntraRefresh, (1 << 0), false), }; /** @hide */ public String[] validFeatures() { Feature[] features = getValidFeatures(); String[] res = new String[features.length]; for (int i = 0; i < res.length; i++) { res[i] = features[i].mName; } return res; } private Feature[] getValidFeatures() { if (!isEncoder()) { return decoderFeatures; } return encoderFeatures; } private boolean checkFeature(String name, int flags) { for (Feature feat: getValidFeatures()) { if (feat.mName.equals(name)) { return (flags & feat.mValue) != 0; } } return false; } /** @hide */ public boolean isRegular() { // regular codecs only require default features for (Feature feat: getValidFeatures()) { if (!feat.mDefault && isFeatureRequired(feat.mName)) { return false; } } return true; } /** * Query whether codec supports a given {@link MediaFormat}. * *
* Note: On {@link android.os.Build.VERSION_CODES#LOLLIPOP},
* {@code format} must not contain a {@linkplain MediaFormat#KEY_FRAME_RATE
* frame rate}. Use
* format.setString(MediaFormat.KEY_FRAME_RATE, null)
* to clear any existing frame rate setting in the format.
*
* * The following table summarizes the format keys considered by this method. * *
OS Version(s) | *{@code MediaFormat} keys considered for * | ||
---|---|---|---|
Audio Codecs | *Video Codecs | *Encoders | *|
{@link android.os.Build.VERSION_CODES#LOLLIPOP} * | {@link MediaFormat#KEY_MIME}*, * {@link MediaFormat#KEY_SAMPLE_RATE}, * {@link MediaFormat#KEY_CHANNEL_COUNT}, |
* {@link MediaFormat#KEY_MIME}*, * {@link CodecCapabilities#FEATURE_AdaptivePlayback}D, * {@link CodecCapabilities#FEATURE_SecurePlayback}D, * {@link CodecCapabilities#FEATURE_TunneledPlayback}D, * {@link MediaFormat#KEY_WIDTH}, * {@link MediaFormat#KEY_HEIGHT}, * no {@code KEY_FRAME_RATE} |
* {@link MediaFormat#KEY_BITRATE_MODE}, * {@link MediaFormat#KEY_PROFILE} * (and/or {@link MediaFormat#KEY_AAC_PROFILE}~), * * {@link MediaFormat#KEY_COMPLEXITY} * (and/or {@link MediaFormat#KEY_FLAC_COMPRESSION_LEVEL}~) |
*
{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1} * | as above, plus * {@link MediaFormat#KEY_FRAME_RATE} |
* ||
{@link android.os.Build.VERSION_CODES#M} * | |||
{@link android.os.Build.VERSION_CODES#N} * | as above, plus * {@link MediaFormat#KEY_PROFILE}, * * {@link MediaFormat#KEY_BIT_RATE} |
* as above, plus * {@link MediaFormat#KEY_PROFILE}, * {@link MediaFormat#KEY_LEVEL}+, * * {@link MediaFormat#KEY_BIT_RATE}, * {@link CodecCapabilities#FEATURE_IntraRefresh}E |
* |
* Notes: |
*
* This is a hint for an upper bound. Applications should not expect to successfully * operate more instances than the returned value, but the actual number of * concurrently operable instances may be less as it depends on the available * resources at time of use. */ public int getMaxSupportedInstances() { return mMaxSupportedInstances; } private boolean isAudio() { return mAudioCaps != null; } /** * Returns the audio capabilities or {@code null} if this is not an audio codec. */ public AudioCapabilities getAudioCapabilities() { return mAudioCaps; } private boolean isEncoder() { return mEncoderCaps != null; } /** * Returns the encoding capabilities or {@code null} if this is not an encoder. */ public EncoderCapabilities getEncoderCapabilities() { return mEncoderCaps; } private boolean isVideo() { return mVideoCaps != null; } /** * Returns the video capabilities or {@code null} if this is not a video codec. */ public VideoCapabilities getVideoCapabilities() { return mVideoCaps; } /** @hide */ public CodecCapabilities dup() { return new CodecCapabilities( // clone writable arrays Arrays.copyOf(profileLevels, profileLevels.length), Arrays.copyOf(colorFormats, colorFormats.length), isEncoder(), mFlagsVerified, mDefaultFormat, mCapabilitiesInfo); } /** * Retrieve the codec capabilities for a certain {@code mime type}, {@code * profile} and {@code level}. If the type, or profile-level combination * is not understood by the framework, it returns null. *
In {@link android.os.Build.VERSION_CODES#M}, calling this * method without calling any method of the {@link MediaCodecList} class beforehand * results in a {@link NullPointerException}.
*/ public static CodecCapabilities createFromProfileLevel( String mime, int profile, int level) { CodecProfileLevel pl = new CodecProfileLevel(); pl.profile = profile; pl.level = level; MediaFormat defaultFormat = new MediaFormat(); defaultFormat.setString(MediaFormat.KEY_MIME, mime); CodecCapabilities ret = new CodecCapabilities( new CodecProfileLevel[] { pl }, new int[0], true /* encoder */, 0 /* flags */, defaultFormat, new MediaFormat() /* info */); if (ret.mError != 0) { return null; } return ret; } /* package private */ CodecCapabilities( CodecProfileLevel[] profLevs, int[] colFmts, boolean encoder, int flags, Map
* This is not a performance indicator. Rather, it expresses the
* limits specified in the coding standard, based on the complexities
* of encoding material for later playback at a certain frame rate,
* or the decoding of such material in non-realtime.
*/
public Range
* This is not a performance indicator. Rather, it expresses the limits specified in
* the coding standard, based on the complexities of encoding material of a given
* size for later playback at a certain frame rate, or the decoding of such material
* in non-realtime.
* @param width the width of the video
* @param height the height of the video
*/
public Range
* This is a performance estimate provided by the device manufacturer based on statistical
* sampling of full-speed decoding and encoding measurements in various configurations
* of common video sizes supported by the codec. As such it should only be used to
* compare individual codecs on the device. The value is not suitable for comparing
* different devices or even different android releases for the same device.
*
* On {@link android.os.Build.VERSION_CODES#M} release the returned range
* corresponds to the fastest frame rates achieved in the tested configurations. As
* such, it should not be used to gauge guaranteed or even average codec performance
* on the device.
*
* On {@link android.os.Build.VERSION_CODES#N} release the returned range
* corresponds closer to sustained performance in tested configurations.
* One can expect to achieve sustained performance higher than the lower limit more than
* 50% of the time, and higher than half of the lower limit at least 90% of the time
* in tested configurations.
* Conversely, one can expect performance lower than twice the upper limit at least
* 90% of the time.
*
* Tested configurations use a single active codec. For use cases where multiple
* codecs are active, applications can expect lower and in most cases significantly lower
* performance.
*
* The returned range value is interpolated from the nearest frame size(s) tested.
* Codec performance is severely impacted by other activity on the device as well
* as environmental factors (such as battery level, temperature or power source), and can
* vary significantly even in a steady environment.
*
* Use this method in cases where only codec performance matters, e.g. to evaluate if
* a codec has any chance of meeting a performance target. Codecs are listed
* in {@link MediaCodecList} in the preferred order as defined by the device
* manufacturer. As such, applications should use the first suitable codec in the
* list to achieve the best balance between power use and performance.
*
* @param width the width of the video
* @param height the height of the video
*
* @throws IllegalArgumentException if the video size is not supported.
*/
@Nullable
public Range
* Some codecs may support multiple complexity levels, where higher
* complexity values use more encoder tools (e.g. perform more
* intensive calculations) to improve the quality or the compression
* ratio. Use a lower value to save power and/or time.
*/
public Range You can get a set of {@link MediaCodecInfo.CodecProfileLevel} objects for a given
* {@link MediaCodecInfo} object from the
* {@link MediaCodecInfo.CodecCapabilities#profileLevels} field.
*/
public static final class CodecProfileLevel {
// from OMX_VIDEO_AVCPROFILETYPE
public static final int AVCProfileBaseline = 0x01;
public static final int AVCProfileMain = 0x02;
public static final int AVCProfileExtended = 0x04;
public static final int AVCProfileHigh = 0x08;
public static final int AVCProfileHigh10 = 0x10;
public static final int AVCProfileHigh422 = 0x20;
public static final int AVCProfileHigh444 = 0x40;
// from OMX_VIDEO_AVCLEVELTYPE
public static final int AVCLevel1 = 0x01;
public static final int AVCLevel1b = 0x02;
public static final int AVCLevel11 = 0x04;
public static final int AVCLevel12 = 0x08;
public static final int AVCLevel13 = 0x10;
public static final int AVCLevel2 = 0x20;
public static final int AVCLevel21 = 0x40;
public static final int AVCLevel22 = 0x80;
public static final int AVCLevel3 = 0x100;
public static final int AVCLevel31 = 0x200;
public static final int AVCLevel32 = 0x400;
public static final int AVCLevel4 = 0x800;
public static final int AVCLevel41 = 0x1000;
public static final int AVCLevel42 = 0x2000;
public static final int AVCLevel5 = 0x4000;
public static final int AVCLevel51 = 0x8000;
public static final int AVCLevel52 = 0x10000;
// from OMX_VIDEO_H263PROFILETYPE
public static final int H263ProfileBaseline = 0x01;
public static final int H263ProfileH320Coding = 0x02;
public static final int H263ProfileBackwardCompatible = 0x04;
public static final int H263ProfileISWV2 = 0x08;
public static final int H263ProfileISWV3 = 0x10;
public static final int H263ProfileHighCompression = 0x20;
public static final int H263ProfileInternet = 0x40;
public static final int H263ProfileInterlace = 0x80;
public static final int H263ProfileHighLatency = 0x100;
// from OMX_VIDEO_H263LEVELTYPE
public static final int H263Level10 = 0x01;
public static final int H263Level20 = 0x02;
public static final int H263Level30 = 0x04;
public static final int H263Level40 = 0x08;
public static final int H263Level45 = 0x10;
public static final int H263Level50 = 0x20;
public static final int H263Level60 = 0x40;
public static final int H263Level70 = 0x80;
// from OMX_VIDEO_MPEG4PROFILETYPE
public static final int MPEG4ProfileSimple = 0x01;
public static final int MPEG4ProfileSimpleScalable = 0x02;
public static final int MPEG4ProfileCore = 0x04;
public static final int MPEG4ProfileMain = 0x08;
public static final int MPEG4ProfileNbit = 0x10;
public static final int MPEG4ProfileScalableTexture = 0x20;
public static final int MPEG4ProfileSimpleFace = 0x40;
public static final int MPEG4ProfileSimpleFBA = 0x80;
public static final int MPEG4ProfileBasicAnimated = 0x100;
public static final int MPEG4ProfileHybrid = 0x200;
public static final int MPEG4ProfileAdvancedRealTime = 0x400;
public static final int MPEG4ProfileCoreScalable = 0x800;
public static final int MPEG4ProfileAdvancedCoding = 0x1000;
public static final int MPEG4ProfileAdvancedCore = 0x2000;
public static final int MPEG4ProfileAdvancedScalable = 0x4000;
public static final int MPEG4ProfileAdvancedSimple = 0x8000;
// from OMX_VIDEO_MPEG4LEVELTYPE
public static final int MPEG4Level0 = 0x01;
public static final int MPEG4Level0b = 0x02;
public static final int MPEG4Level1 = 0x04;
public static final int MPEG4Level2 = 0x08;
public static final int MPEG4Level3 = 0x10;
public static final int MPEG4Level3b = 0x18;
public static final int MPEG4Level4 = 0x20;
public static final int MPEG4Level4a = 0x40;
public static final int MPEG4Level5 = 0x80;
public static final int MPEG4Level6 = 0x100;
// from OMX_VIDEO_MPEG2PROFILETYPE
public static final int MPEG2ProfileSimple = 0x00;
public static final int MPEG2ProfileMain = 0x01;
public static final int MPEG2Profile422 = 0x02;
public static final int MPEG2ProfileSNR = 0x03;
public static final int MPEG2ProfileSpatial = 0x04;
public static final int MPEG2ProfileHigh = 0x05;
// from OMX_VIDEO_MPEG2LEVELTYPE
public static final int MPEG2LevelLL = 0x00;
public static final int MPEG2LevelML = 0x01;
public static final int MPEG2LevelH14 = 0x02;
public static final int MPEG2LevelHL = 0x03;
public static final int MPEG2LevelHP = 0x04;
// from OMX_AUDIO_AACPROFILETYPE
public static final int AACObjectMain = 1;
public static final int AACObjectLC = 2;
public static final int AACObjectSSR = 3;
public static final int AACObjectLTP = 4;
public static final int AACObjectHE = 5;
public static final int AACObjectScalable = 6;
public static final int AACObjectERLC = 17;
public static final int AACObjectERScalable = 20;
public static final int AACObjectLD = 23;
public static final int AACObjectHE_PS = 29;
public static final int AACObjectELD = 39;
// from OMX_VIDEO_VP8LEVELTYPE
public static final int VP8Level_Version0 = 0x01;
public static final int VP8Level_Version1 = 0x02;
public static final int VP8Level_Version2 = 0x04;
public static final int VP8Level_Version3 = 0x08;
// from OMX_VIDEO_VP8PROFILETYPE
public static final int VP8ProfileMain = 0x01;
// from OMX_VIDEO_VP9PROFILETYPE
public static final int VP9Profile0 = 0x01;
public static final int VP9Profile1 = 0x02;
public static final int VP9Profile2 = 0x04;
public static final int VP9Profile3 = 0x08;
// HDR profiles also support passing HDR metadata
public static final int VP9Profile2HDR = 0x1000;
public static final int VP9Profile3HDR = 0x2000;
// from OMX_VIDEO_VP9LEVELTYPE
public static final int VP9Level1 = 0x1;
public static final int VP9Level11 = 0x2;
public static final int VP9Level2 = 0x4;
public static final int VP9Level21 = 0x8;
public static final int VP9Level3 = 0x10;
public static final int VP9Level31 = 0x20;
public static final int VP9Level4 = 0x40;
public static final int VP9Level41 = 0x80;
public static final int VP9Level5 = 0x100;
public static final int VP9Level51 = 0x200;
public static final int VP9Level52 = 0x400;
public static final int VP9Level6 = 0x800;
public static final int VP9Level61 = 0x1000;
public static final int VP9Level62 = 0x2000;
// from OMX_VIDEO_HEVCPROFILETYPE
public static final int HEVCProfileMain = 0x01;
public static final int HEVCProfileMain10 = 0x02;
public static final int HEVCProfileMain10HDR10 = 0x1000;
// from OMX_VIDEO_HEVCLEVELTYPE
public static final int HEVCMainTierLevel1 = 0x1;
public static final int HEVCHighTierLevel1 = 0x2;
public static final int HEVCMainTierLevel2 = 0x4;
public static final int HEVCHighTierLevel2 = 0x8;
public static final int HEVCMainTierLevel21 = 0x10;
public static final int HEVCHighTierLevel21 = 0x20;
public static final int HEVCMainTierLevel3 = 0x40;
public static final int HEVCHighTierLevel3 = 0x80;
public static final int HEVCMainTierLevel31 = 0x100;
public static final int HEVCHighTierLevel31 = 0x200;
public static final int HEVCMainTierLevel4 = 0x400;
public static final int HEVCHighTierLevel4 = 0x800;
public static final int HEVCMainTierLevel41 = 0x1000;
public static final int HEVCHighTierLevel41 = 0x2000;
public static final int HEVCMainTierLevel5 = 0x4000;
public static final int HEVCHighTierLevel5 = 0x8000;
public static final int HEVCMainTierLevel51 = 0x10000;
public static final int HEVCHighTierLevel51 = 0x20000;
public static final int HEVCMainTierLevel52 = 0x40000;
public static final int HEVCHighTierLevel52 = 0x80000;
public static final int HEVCMainTierLevel6 = 0x100000;
public static final int HEVCHighTierLevel6 = 0x200000;
public static final int HEVCMainTierLevel61 = 0x400000;
public static final int HEVCHighTierLevel61 = 0x800000;
public static final int HEVCMainTierLevel62 = 0x1000000;
public static final int HEVCHighTierLevel62 = 0x2000000;
private static final int HEVCHighTierLevels =
HEVCHighTierLevel1 | HEVCHighTierLevel2 | HEVCHighTierLevel21 | HEVCHighTierLevel3 |
HEVCHighTierLevel31 | HEVCHighTierLevel4 | HEVCHighTierLevel41 | HEVCHighTierLevel5 |
HEVCHighTierLevel51 | HEVCHighTierLevel52 | HEVCHighTierLevel6 | HEVCHighTierLevel61 |
HEVCHighTierLevel62;
// from OMX_VIDEO_DOLBYVISIONPROFILETYPE
public static final int DolbyVisionProfileDvavPer = 0x1;
public static final int DolbyVisionProfileDvavPen = 0x2;
public static final int DolbyVisionProfileDvheDer = 0x4;
public static final int DolbyVisionProfileDvheDen = 0x8;
public static final int DolbyVisionProfileDvheDtr = 0x10;
public static final int DolbyVisionProfileDvheStn = 0x20;
public static final int DolbyVisionProfileDvheDth = 0x40;
public static final int DolbyVisionProfileDvheDtb = 0x80;
// from OMX_VIDEO_DOLBYVISIONLEVELTYPE
public static final int DolbyVisionLevelHd24 = 0x1;
public static final int DolbyVisionLevelHd30 = 0x2;
public static final int DolbyVisionLevelFhd24 = 0x4;
public static final int DolbyVisionLevelFhd30 = 0x8;
public static final int DolbyVisionLevelFhd60 = 0x10;
public static final int DolbyVisionLevelUhd24 = 0x20;
public static final int DolbyVisionLevelUhd30 = 0x40;
public static final int DolbyVisionLevelUhd48 = 0x80;
public static final int DolbyVisionLevelUhd60 = 0x100;
/**
* Defined in the OpenMAX IL specs, depending on the type of media
* this can be OMX_VIDEO_AVCPROFILETYPE, OMX_VIDEO_H263PROFILETYPE,
* OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_VP8PROFILETYPE or OMX_VIDEO_VP9PROFILETYPE.
*/
public int profile;
/**
* Defined in the OpenMAX IL specs, depending on the type of media
* this can be OMX_VIDEO_AVCLEVELTYPE, OMX_VIDEO_H263LEVELTYPE
* OMX_VIDEO_MPEG4LEVELTYPE, OMX_VIDEO_VP8LEVELTYPE or OMX_VIDEO_VP9LEVELTYPE.
*
* Note that VP9 decoder on platforms before {@link android.os.Build.VERSION_CODES#N} may
* not advertise a profile level support. For those VP9 decoders, please use
* {@link VideoCapabilities} to determine the codec capabilities.
*/
public int level;
};
/**
* Enumerates the capabilities of the codec component. Since a single
* component can support data of a variety of types, the type has to be
* specified to yield a meaningful result.
* @param type The MIME type to query
*/
public final CodecCapabilities getCapabilitiesForType(
String type) {
CodecCapabilities caps = mCaps.get(type);
if (caps == null) {
throw new IllegalArgumentException("codec does not support type");
}
// clone writable object
return caps.dup();
}
/** @hide */
public MediaCodecInfo makeRegular() {
ArrayList