1 /* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import static android.media.Utils.intersectSortedDistinctRanges; 20 import static android.media.Utils.sortDistinctRanges; 21 import static android.media.codec.Flags.FLAG_DYNAMIC_COLOR_ASPECTS; 22 import static android.media.codec.Flags.FLAG_HLG_EDITING; 23 import static android.media.codec.Flags.FLAG_IN_PROCESS_SW_AUDIO_CODEC; 24 import static android.media.codec.Flags.FLAG_NULL_OUTPUT_SURFACE; 25 import static android.media.codec.Flags.FLAG_REGION_OF_INTEREST; 26 import static android.media.MediaCodec.GetFlag; 27 28 import android.annotation.FlaggedApi; 29 import android.annotation.IntDef; 30 import android.annotation.IntRange; 31 import android.annotation.NonNull; 32 import android.annotation.Nullable; 33 import android.annotation.SuppressLint; 34 import android.annotation.TestApi; 35 import android.compat.annotation.UnsupportedAppUsage; 36 import android.os.Build; 37 import android.os.Process; 38 import android.os.SystemProperties; 39 import android.sysprop.MediaProperties; 40 import android.util.Log; 41 import android.util.Pair; 42 import android.util.Range; 43 import android.util.Rational; 44 import android.util.Size; 45 46 import java.lang.annotation.Retention; 47 import java.lang.annotation.RetentionPolicy; 48 import java.util.ArrayList; 49 import java.util.Arrays; 50 import java.util.Collections; 51 import java.util.HashMap; 52 import java.util.List; 53 import java.util.Map; 54 import java.util.Set; 55 import java.util.Vector; 56 57 /** 58 * Provides information about a given media codec available on the device. You can 59 * iterate through all codecs available by querying {@link MediaCodecList}. For example, 60 * here's how to find an encoder that supports a given MIME type: 61 * <pre> 62 * private static MediaCodecInfo selectCodec(String mimeType) { 63 * int numCodecs = MediaCodecList.getCodecCount(); 64 * for (int i = 0; i < numCodecs; i++) { 65 * MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); 66 * 67 * if (!codecInfo.isEncoder()) { 68 * continue; 69 * } 70 * 71 * String[] types = codecInfo.getSupportedTypes(); 72 * for (int j = 0; j < types.length; j++) { 73 * if (types[j].equalsIgnoreCase(mimeType)) { 74 * return codecInfo; 75 * } 76 * } 77 * } 78 * return null; 79 * }</pre> 80 * 81 */ 82 public final class MediaCodecInfo { 83 private static final String TAG = "MediaCodecInfo"; 84 85 private static final int FLAG_IS_ENCODER = (1 << 0); 86 private static final int FLAG_IS_VENDOR = (1 << 1); 87 private static final int FLAG_IS_SOFTWARE_ONLY = (1 << 2); 88 private static final int FLAG_IS_HARDWARE_ACCELERATED = (1 << 3); 89 90 private int mFlags; 91 private String mName; 92 private String mCanonicalName; 93 private Map<String, CodecCapabilities> mCaps; 94 MediaCodecInfo( String name, String canonicalName, int flags, CodecCapabilities[] caps)95 /* package private */ MediaCodecInfo( 96 String name, String canonicalName, int flags, CodecCapabilities[] caps) { 97 mName = name; 98 mCanonicalName = canonicalName; 99 mFlags = flags; 100 mCaps = new HashMap<String, CodecCapabilities>(); 101 102 for (CodecCapabilities c: caps) { 103 mCaps.put(c.getMimeType(), c); 104 } 105 } 106 107 /** 108 * Retrieve the codec name. 109 * 110 * <strong>Note:</strong> Implementations may provide multiple aliases (codec 111 * names) for the same underlying codec, any of which can be used to instantiate the same 112 * underlying codec in {@link MediaCodec#createByCodecName}. 113 * 114 * Applications targeting SDK < {@link android.os.Build.VERSION_CODES#Q}, cannot determine if 115 * the multiple codec names listed in MediaCodecList are in-fact for the same codec. 116 */ 117 @NonNull getName()118 public final String getName() { 119 return mName; 120 } 121 122 /** 123 * Retrieve the underlying codec name. 124 * 125 * Device implementations may provide multiple aliases (codec names) for the same underlying 126 * codec to maintain backward app compatibility. This method returns the name of the underlying 127 * codec name, which must not be another alias. For non-aliases this is always the name of the 128 * codec. 129 */ 130 @NonNull getCanonicalName()131 public final String getCanonicalName() { 132 return mCanonicalName; 133 } 134 135 /** 136 * Query if the codec is an alias for another underlying codec. 137 */ isAlias()138 public final boolean isAlias() { 139 return !mName.equals(mCanonicalName); 140 } 141 142 /** 143 * Query if the codec is an encoder. 144 */ isEncoder()145 public final boolean isEncoder() { 146 return (mFlags & FLAG_IS_ENCODER) != 0; 147 } 148 149 /** 150 * Query if the codec is provided by the Android platform (false) or the device manufacturer 151 * (true). 152 */ isVendor()153 public final boolean isVendor() { 154 return (mFlags & FLAG_IS_VENDOR) != 0; 155 } 156 157 /** 158 * Query if the codec is software only. Software-only codecs are more secure as they run in 159 * a tighter security sandbox. On the other hand, software-only codecs do not provide any 160 * performance guarantees. 161 */ isSoftwareOnly()162 public final boolean isSoftwareOnly() { 163 return (mFlags & FLAG_IS_SOFTWARE_ONLY) != 0; 164 } 165 166 /** 167 * Query if the codec is hardware accelerated. This attribute is provided by the device 168 * manufacturer. Note that it cannot be tested for correctness. 169 */ isHardwareAccelerated()170 public final boolean isHardwareAccelerated() { 171 return (mFlags & FLAG_IS_HARDWARE_ACCELERATED) != 0; 172 } 173 174 /** 175 * Query the media types supported by the codec. 176 */ getSupportedTypes()177 public final String[] getSupportedTypes() { 178 Set<String> typeSet = mCaps.keySet(); 179 String[] types = typeSet.toArray(new String[typeSet.size()]); 180 Arrays.sort(types); 181 return types; 182 } 183 checkPowerOfTwo(int value, String message)184 private static int checkPowerOfTwo(int value, String message) { 185 if ((value & (value - 1)) != 0) { 186 throw new IllegalArgumentException(message); 187 } 188 return value; 189 } 190 191 private static class Feature { 192 public String mName; 193 public int mValue; 194 public boolean mDefault; 195 public boolean mInternal; Feature(String name, int value, boolean def)196 public Feature(String name, int value, boolean def) { 197 this(name, value, def, false /* internal */); 198 } Feature(String name, int value, boolean def, boolean internal)199 public Feature(String name, int value, boolean def, boolean internal) { 200 mName = name; 201 mValue = value; 202 mDefault = def; 203 mInternal = internal; 204 } 205 } 206 207 // COMMON CONSTANTS 208 private static final Range<Integer> POSITIVE_INTEGERS = 209 Range.create(1, Integer.MAX_VALUE); 210 private static final Range<Long> POSITIVE_LONGS = 211 Range.create(1L, Long.MAX_VALUE); 212 private static final Range<Rational> POSITIVE_RATIONALS = 213 Range.create(new Rational(1, Integer.MAX_VALUE), 214 new Rational(Integer.MAX_VALUE, 1)); 215 private static final Range<Integer> FRAME_RATE_RANGE = Range.create(0, 960); 216 private static final Range<Integer> BITRATE_RANGE = Range.create(0, 500000000); 217 private static final int DEFAULT_MAX_SUPPORTED_INSTANCES = 32; 218 private static final int MAX_SUPPORTED_INSTANCES_LIMIT = 256; 219 220 private static final class LazyHolder { 221 private static final Range<Integer> SIZE_RANGE = Process.is64Bit() 222 ? Range.create(1, 32768) 223 : Range.create(1, MediaProperties.resolution_limit_32bit().orElse(4096)); 224 } getSizeRange()225 private static Range<Integer> getSizeRange() { 226 return LazyHolder.SIZE_RANGE; 227 } 228 229 // found stuff that is not supported by framework (=> this should not happen) 230 private static final int ERROR_UNRECOGNIZED = (1 << 0); 231 // found profile/level for which we don't have capability estimates 232 private static final int ERROR_UNSUPPORTED = (1 << 1); 233 // have not found any profile/level for which we don't have capability estimate 234 private static final int ERROR_NONE_SUPPORTED = (1 << 2); 235 236 237 /** 238 * Encapsulates the capabilities of a given codec component. 239 * For example, what profile/level combinations it supports and what colorspaces 240 * it is capable of providing the decoded data in, as well as some 241 * codec-type specific capability flags. 242 * <p>You can get an instance for a given {@link MediaCodecInfo} object with 243 * {@link MediaCodecInfo#getCapabilitiesForType getCapabilitiesForType()}, passing a MIME type. 244 */ 245 public static final class CodecCapabilities { CodecCapabilities()246 public CodecCapabilities() { 247 } 248 249 // CLASSIFICATION 250 private String mMime; 251 private int mMaxSupportedInstances; 252 253 // LEGACY FIELDS 254 255 // Enumerates supported profile/level combinations as defined 256 // by the type of encoded data. These combinations impose restrictions 257 // on video resolution, bitrate... and limit the available encoder tools 258 // such as B-frame support, arithmetic coding... 259 public CodecProfileLevel[] profileLevels; // NOTE this array is modifiable by user 260 261 // from MediaCodecConstants 262 /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ 263 public static final int COLOR_FormatMonochrome = 1; 264 /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ 265 public static final int COLOR_Format8bitRGB332 = 2; 266 /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ 267 public static final int COLOR_Format12bitRGB444 = 3; 268 /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ 269 public static final int COLOR_Format16bitARGB4444 = 4; 270 /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ 271 public static final int COLOR_Format16bitARGB1555 = 5; 272 273 /** 274 * 16 bits per pixel RGB color format, with 5-bit red & blue and 6-bit green component. 275 * <p> 276 * Using 16-bit little-endian representation, colors stored as Red 15:11, Green 10:5, Blue 4:0. 277 * <pre> 278 * byte byte 279 * <--------- i --------> | <------ i + 1 ------> 280 * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ 281 * | BLUE | GREEN | RED | 282 * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ 283 * 0 4 5 7 0 2 3 7 284 * bit 285 * </pre> 286 * 287 * This format corresponds to {@link android.graphics.PixelFormat#RGB_565} and 288 * {@link android.graphics.ImageFormat#RGB_565}. 289 */ 290 public static final int COLOR_Format16bitRGB565 = 6; 291 /** @deprecated Use {@link #COLOR_Format16bitRGB565}. */ 292 public static final int COLOR_Format16bitBGR565 = 7; 293 /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ 294 public static final int COLOR_Format18bitRGB666 = 8; 295 /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ 296 public static final int COLOR_Format18bitARGB1665 = 9; 297 /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ 298 public static final int COLOR_Format19bitARGB1666 = 10; 299 300 /** @deprecated Use {@link #COLOR_Format24bitBGR888} or {@link #COLOR_FormatRGBFlexible}. */ 301 public static final int COLOR_Format24bitRGB888 = 11; 302 303 /** 304 * 24 bits per pixel RGB color format, with 8-bit red, green & blue components. 305 * <p> 306 * Using 24-bit little-endian representation, colors stored as Red 7:0, Green 15:8, Blue 23:16. 307 * <pre> 308 * byte byte byte 309 * <------ i -----> | <---- i+1 ----> | <---- i+2 -----> 310 * +-----------------+-----------------+-----------------+ 311 * | RED | GREEN | BLUE | 312 * +-----------------+-----------------+-----------------+ 313 * </pre> 314 * 315 * This format corresponds to {@link android.graphics.PixelFormat#RGB_888}, and can also be 316 * represented as a flexible format by {@link #COLOR_FormatRGBFlexible}. 317 */ 318 public static final int COLOR_Format24bitBGR888 = 12; 319 /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ 320 public static final int COLOR_Format24bitARGB1887 = 13; 321 /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ 322 public static final int COLOR_Format25bitARGB1888 = 14; 323 324 /** 325 * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}. 326 */ 327 public static final int COLOR_Format32bitBGRA8888 = 15; 328 /** 329 * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}. 330 */ 331 public static final int COLOR_Format32bitARGB8888 = 16; 332 /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ 333 public static final int COLOR_FormatYUV411Planar = 17; 334 /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ 335 public static final int COLOR_FormatYUV411PackedPlanar = 18; 336 /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ 337 public static final int COLOR_FormatYUV420Planar = 19; 338 /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ 339 public static final int COLOR_FormatYUV420PackedPlanar = 20; 340 /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ 341 public static final int COLOR_FormatYUV420SemiPlanar = 21; 342 343 /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ 344 public static final int COLOR_FormatYUV422Planar = 22; 345 /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ 346 public static final int COLOR_FormatYUV422PackedPlanar = 23; 347 /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ 348 public static final int COLOR_FormatYUV422SemiPlanar = 24; 349 350 /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ 351 public static final int COLOR_FormatYCbYCr = 25; 352 /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ 353 public static final int COLOR_FormatYCrYCb = 26; 354 /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ 355 public static final int COLOR_FormatCbYCrY = 27; 356 /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ 357 public static final int COLOR_FormatCrYCbY = 28; 358 359 /** @deprecated Use {@link #COLOR_FormatYUV444Flexible}. */ 360 public static final int COLOR_FormatYUV444Interleaved = 29; 361 362 /** 363 * SMIA 8-bit Bayer format. 364 * Each byte represents the top 8-bits of a 10-bit signal. 365 */ 366 public static final int COLOR_FormatRawBayer8bit = 30; 367 /** 368 * SMIA 10-bit Bayer format. 369 */ 370 public static final int COLOR_FormatRawBayer10bit = 31; 371 372 /** 373 * SMIA 8-bit compressed Bayer format. 374 * Each byte represents a sample from the 10-bit signal that is compressed into 8-bits 375 * using DPCM/PCM compression, as defined by the SMIA Functional Specification. 376 */ 377 public static final int COLOR_FormatRawBayer8bitcompressed = 32; 378 379 /** @deprecated Use {@link #COLOR_FormatL8}. */ 380 public static final int COLOR_FormatL2 = 33; 381 /** @deprecated Use {@link #COLOR_FormatL8}. */ 382 public static final int COLOR_FormatL4 = 34; 383 384 /** 385 * 8 bits per pixel Y color format. 386 * <p> 387 * Each byte contains a single pixel. 388 * This format corresponds to {@link android.graphics.PixelFormat#L_8}. 389 */ 390 public static final int COLOR_FormatL8 = 35; 391 392 /** 393 * 16 bits per pixel, little-endian Y color format. 394 * <p> 395 * <pre> 396 * byte byte 397 * <--------- i --------> | <------ i + 1 ------> 398 * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ 399 * | Y | 400 * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ 401 * 0 7 0 7 402 * bit 403 * </pre> 404 */ 405 public static final int COLOR_FormatL16 = 36; 406 /** @deprecated Use {@link #COLOR_FormatL16}. */ 407 public static final int COLOR_FormatL24 = 37; 408 409 /** 410 * 32 bits per pixel, little-endian Y color format. 411 * <p> 412 * <pre> 413 * byte byte byte byte 414 * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 -----> 415 * +-----------------+-----------------+-----------------+-----------------+ 416 * | Y | 417 * +-----------------+-----------------+-----------------+-----------------+ 418 * 0 7 0 7 0 7 0 7 419 * bit 420 * </pre> 421 * 422 * @deprecated Use {@link #COLOR_FormatL16}. 423 */ 424 public static final int COLOR_FormatL32 = 38; 425 426 /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ 427 public static final int COLOR_FormatYUV420PackedSemiPlanar = 39; 428 /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */ 429 public static final int COLOR_FormatYUV422PackedSemiPlanar = 40; 430 431 /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */ 432 public static final int COLOR_Format18BitBGR666 = 41; 433 434 /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ 435 public static final int COLOR_Format24BitARGB6666 = 42; 436 /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */ 437 public static final int COLOR_Format24BitABGR6666 = 43; 438 439 /** 440 * P010 is 10-bit-per component 4:2:0 YCbCr semiplanar format. 441 * <p> 442 * This format uses 24 allocated bits per pixel with 15 bits of 443 * data per pixel. Chroma planes are subsampled by 2 both 444 * horizontally and vertically. Each chroma and luma component 445 * has 16 allocated bits in little-endian configuration with 10 446 * MSB of actual data. 447 * 448 * <pre> 449 * byte byte 450 * <--------- i --------> | <------ i + 1 ------> 451 * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ 452 * | UNUSED | Y/Cb/Cr | 453 * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ 454 * 0 5 6 7 0 7 455 * bit 456 * </pre> 457 * 458 * Use this format with {@link Image}. This format corresponds 459 * to {@link android.graphics.ImageFormat#YCBCR_P010}. 460 * <p> 461 */ 462 @SuppressLint("AllUpper") 463 public static final int COLOR_FormatYUVP010 = 54; 464 465 /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ 466 public static final int COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100; 467 // COLOR_FormatSurface indicates that the data will be a GraphicBuffer metadata reference. 468 // Note: in OMX this is called OMX_COLOR_FormatAndroidOpaque. 469 public static final int COLOR_FormatSurface = 0x7F000789; 470 471 /** 472 * 64 bits per pixel RGBA color format, with 16-bit signed 473 * floating point red, green, blue, and alpha components. 474 * <p> 475 * 476 * <pre> 477 * byte byte byte byte 478 * <-- i -->|<- i+1 ->|<- i+2 ->|<- i+3 ->|<- i+4 ->|<- i+5 ->|<- i+6 ->|<- i+7 -> 479 * +---------+---------+-------------------+---------+---------+---------+---------+ 480 * | RED | GREEN | BLUE | ALPHA | 481 * +---------+---------+-------------------+---------+---------+---------+---------+ 482 * 0 7 0 7 0 7 0 7 0 7 0 7 0 7 0 7 483 * </pre> 484 * 485 * This corresponds to {@link android.graphics.PixelFormat#RGBA_F16}. 486 */ 487 @SuppressLint("AllUpper") 488 public static final int COLOR_Format64bitABGRFloat = 0x7F000F16; 489 490 /** 491 * 32 bits per pixel RGBA color format, with 8-bit red, green, blue, and alpha components. 492 * <p> 493 * Using 32-bit little-endian representation, colors stored as Red 7:0, Green 15:8, 494 * Blue 23:16, and Alpha 31:24. 495 * <pre> 496 * byte byte byte byte 497 * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 -----> 498 * +-----------------+-----------------+-----------------+-----------------+ 499 * | RED | GREEN | BLUE | ALPHA | 500 * +-----------------+-----------------+-----------------+-----------------+ 501 * </pre> 502 * 503 * This corresponds to {@link android.graphics.PixelFormat#RGBA_8888}. 504 */ 505 public static final int COLOR_Format32bitABGR8888 = 0x7F00A000; 506 507 /** 508 * 32 bits per pixel RGBA color format, with 10-bit red, green, 509 * blue, and 2-bit alpha components. 510 * <p> 511 * Using 32-bit little-endian representation, colors stored as 512 * Red 9:0, Green 19:10, Blue 29:20, and Alpha 31:30. 513 * <pre> 514 * byte byte byte byte 515 * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 -----> 516 * +-----------------+---+-------------+-------+---------+-----------+-----+ 517 * | RED | GREEN | BLUE |ALPHA| 518 * +-----------------+---+-------------+-------+---------+-----------+-----+ 519 * 0 7 0 1 2 7 0 3 4 7 0 5 6 7 520 * </pre> 521 * 522 * This corresponds to {@link android.graphics.PixelFormat#RGBA_1010102}. 523 */ 524 @SuppressLint("AllUpper") 525 public static final int COLOR_Format32bitABGR2101010 = 0x7F00AAA2; 526 527 /** 528 * Flexible 12 bits per pixel, subsampled YUV color format with 8-bit chroma and luma 529 * components. 530 * <p> 531 * Chroma planes are subsampled by 2 both horizontally and vertically. 532 * Use this format with {@link Image}. 533 * This format corresponds to {@link android.graphics.ImageFormat#YUV_420_888}, 534 * and can represent the {@link #COLOR_FormatYUV411Planar}, 535 * {@link #COLOR_FormatYUV411PackedPlanar}, {@link #COLOR_FormatYUV420Planar}, 536 * {@link #COLOR_FormatYUV420PackedPlanar}, {@link #COLOR_FormatYUV420SemiPlanar} 537 * and {@link #COLOR_FormatYUV420PackedSemiPlanar} formats. 538 * 539 * @see Image#getFormat 540 */ 541 public static final int COLOR_FormatYUV420Flexible = 0x7F420888; 542 543 /** 544 * Flexible 16 bits per pixel, subsampled YUV color format with 8-bit chroma and luma 545 * components. 546 * <p> 547 * Chroma planes are horizontally subsampled by 2. Use this format with {@link Image}. 548 * This format corresponds to {@link android.graphics.ImageFormat#YUV_422_888}, 549 * and can represent the {@link #COLOR_FormatYCbYCr}, {@link #COLOR_FormatYCrYCb}, 550 * {@link #COLOR_FormatCbYCrY}, {@link #COLOR_FormatCrYCbY}, 551 * {@link #COLOR_FormatYUV422Planar}, {@link #COLOR_FormatYUV422PackedPlanar}, 552 * {@link #COLOR_FormatYUV422SemiPlanar} and {@link #COLOR_FormatYUV422PackedSemiPlanar} 553 * formats. 554 * 555 * @see Image#getFormat 556 */ 557 public static final int COLOR_FormatYUV422Flexible = 0x7F422888; 558 559 /** 560 * Flexible 24 bits per pixel YUV color format with 8-bit chroma and luma 561 * components. 562 * <p> 563 * Chroma planes are not subsampled. Use this format with {@link Image}. 564 * This format corresponds to {@link android.graphics.ImageFormat#YUV_444_888}, 565 * and can represent the {@link #COLOR_FormatYUV444Interleaved} format. 566 * @see Image#getFormat 567 */ 568 public static final int COLOR_FormatYUV444Flexible = 0x7F444888; 569 570 /** 571 * Flexible 24 bits per pixel RGB color format with 8-bit red, green and blue 572 * components. 573 * <p> 574 * Use this format with {@link Image}. This format corresponds to 575 * {@link android.graphics.ImageFormat#FLEX_RGB_888}, and can represent 576 * {@link #COLOR_Format24bitBGR888} and {@link #COLOR_Format24bitRGB888} formats. 577 * @see Image#getFormat() 578 */ 579 public static final int COLOR_FormatRGBFlexible = 0x7F36B888; 580 581 /** 582 * Flexible 32 bits per pixel RGBA color format with 8-bit red, green, blue, and alpha 583 * components. 584 * <p> 585 * Use this format with {@link Image}. This format corresponds to 586 * {@link android.graphics.ImageFormat#FLEX_RGBA_8888}, and can represent 587 * {@link #COLOR_Format32bitBGRA8888}, {@link #COLOR_Format32bitABGR8888} and 588 * {@link #COLOR_Format32bitARGB8888} formats. 589 * 590 * @see Image#getFormat() 591 */ 592 public static final int COLOR_FormatRGBAFlexible = 0x7F36A888; 593 594 /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */ 595 public static final int COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00; 596 597 /** 598 * The color format for the media. This is one of the color constants defined in this class. 599 */ 600 public int[] colorFormats; // NOTE this array is modifiable by user 601 602 // FEATURES 603 604 private int mFlagsSupported; 605 private int mFlagsRequired; 606 private int mFlagsVerified; 607 608 /** 609 * <b>video decoder only</b>: codec supports seamless resolution changes. 610 */ 611 public static final String FEATURE_AdaptivePlayback = "adaptive-playback"; 612 613 /** 614 * <b>video decoder only</b>: codec supports secure decryption. 615 */ 616 public static final String FEATURE_SecurePlayback = "secure-playback"; 617 618 /** 619 * <b>video or audio decoder only</b>: codec supports tunneled playback. 620 */ 621 public static final String FEATURE_TunneledPlayback = "tunneled-playback"; 622 623 /** 624 * If true, the timestamp of each output buffer is derived from the timestamp of the input 625 * buffer that produced the output. If false, the timestamp of each output buffer is 626 * derived from the timestamp of the first input buffer. 627 */ 628 public static final String FEATURE_DynamicTimestamp = "dynamic-timestamp"; 629 630 /** 631 * <b>decoder only</b>If true, the codec supports partial (including multiple) access units 632 * per input buffer. 633 */ 634 public static final String FEATURE_FrameParsing = "frame-parsing"; 635 636 /** 637 * If true, the codec supports multiple access units (for decoding, or to output for 638 * encoders). If false, the codec only supports single access units. Producing multiple 639 * access units for output is an optional feature. 640 */ 641 public static final String FEATURE_MultipleFrames = "multiple-frames"; 642 643 /** 644 * <b>video decoder only</b>: codec supports queuing partial frames. 645 */ 646 public static final String FEATURE_PartialFrame = "partial-frame"; 647 648 /** 649 * <b>video encoder only</b>: codec supports intra refresh. 650 */ 651 public static final String FEATURE_IntraRefresh = "intra-refresh"; 652 653 /** 654 * <b>decoder only</b>: codec supports low latency decoding. 655 * If supported, clients can enable the low latency mode for the decoder. 656 * When the mode is enabled, the decoder doesn't hold input and output data more than 657 * required by the codec standards. 658 */ 659 public static final String FEATURE_LowLatency = "low-latency"; 660 661 /** 662 * Do not include in REGULAR_CODECS list in MediaCodecList. 663 */ 664 private static final String FEATURE_SpecialCodec = "special-codec"; 665 666 /** 667 * <b>video encoder only</b>: codec supports quantization parameter bounds. 668 * @see MediaFormat#KEY_VIDEO_QP_MAX 669 * @see MediaFormat#KEY_VIDEO_QP_MIN 670 */ 671 @SuppressLint("AllUpper") 672 public static final String FEATURE_QpBounds = "qp-bounds"; 673 674 /** 675 * <b>video encoder only</b>: codec supports exporting encoding statistics. 676 * Encoders with this feature can provide the App clients with the encoding statistics 677 * information about the frame. 678 * The scope of encoding statistics is controlled by 679 * {@link MediaFormat#KEY_VIDEO_ENCODING_STATISTICS_LEVEL}. 680 * 681 * @see MediaFormat#KEY_VIDEO_ENCODING_STATISTICS_LEVEL 682 */ 683 @SuppressLint("AllUpper") // for consistency with other FEATURE_* constants 684 public static final String FEATURE_EncodingStatistics = "encoding-statistics"; 685 686 /** 687 * <b>video encoder only</b>: codec supports HDR editing. 688 * <p> 689 * HDR editing support means that the codec accepts 10-bit HDR 690 * input surface, and it is capable of generating any HDR 691 * metadata required from both YUV and RGB input when the 692 * metadata is not present. This feature is only meaningful when 693 * using an HDR capable profile (and 10-bit HDR input). 694 * <p> 695 * This feature implies that the codec is capable of encoding at 696 * least one HDR format, and that it supports RGBA_1010102 as 697 * well as P010, and optionally RGBA_FP16 input formats, and 698 * that the encoder can generate HDR metadata for all supported 699 * HDR input formats. 700 */ 701 @SuppressLint("AllUpper") 702 public static final String FEATURE_HdrEditing = "hdr-editing"; 703 704 /** 705 * <b>video encoder only</b>: codec supports HLG editing. 706 * <p> 707 * HLG editing support means that the codec accepts 10-bit HDR 708 * input surface in both YUV and RGB pixel format. This feature 709 * is only meaningful when using a 10-bit (HLG) profile and 710 * 10-bit input. 711 * <p> 712 * This feature implies that the codec is capable of encoding 713 * 10-bit format, and that it supports RGBA_1010102 as 714 * well as P010, and optionally RGBA_FP16 input formats. 715 * <p> 716 * The difference between this feature and {@link 717 * FEATURE_HdrEditing} is that HLG does not require the 718 * generation of HDR metadata and does not use an explicit HDR 719 * profile. 720 */ 721 @SuppressLint("AllUpper") 722 @FlaggedApi(FLAG_HLG_EDITING) 723 public static final String FEATURE_HlgEditing = "hlg-editing"; 724 725 /** 726 * <b>video decoder only</b>: codec supports dynamically 727 * changing color aspects. 728 * <p> 729 * If true, the codec can propagate color aspect changes during 730 * decoding. This is only meaningful at session boundaries, e.g. 731 * upon processing Picture Parameter Sets prior to a new IDR. 732 * The color aspects may come from the bitstream, or may be 733 * provided using {@link MediaCodec#setParameters} calls. 734 * <p> 735 * If the codec supports both 8-bit and 10-bit profiles, this 736 * feature means that the codec can dynamically switch between 8 737 * and 10-bit profiles, but this is restricted to Surface mode 738 * only. 739 * <p> 740 * If the device supports HDR transfer functions, switching 741 * between SDR and HDR transfer is also supported. Together with 742 * the previous clause this means that switching between SDR and 743 * HDR sessions are supported in Surface mode, as SDR is 744 * typically encoded at 8-bit and HDR at 10-bit. 745 */ 746 @SuppressLint("AllUpper") 747 @FlaggedApi(FLAG_DYNAMIC_COLOR_ASPECTS) 748 public static final String FEATURE_DynamicColorAspects = "dynamic-color-aspects"; 749 750 /** 751 * <b>video encoder only</b>: codec supports region of interest encoding. 752 * <p> 753 * RoI encoding support means the codec accepts information that specifies the relative 754 * importance of different portions of each video frame. This allows the encoder to 755 * separate a video frame into critical and non-critical regions, and use more bits 756 * (better quality) to represent the critical regions and de-prioritize non-critical 757 * regions. In other words, the encoder chooses a negative qp bias for the critical 758 * portions and a zero or positive qp bias for the non-critical portions. 759 * <p> 760 * At a basic level, if the encoder decides to encode each frame with a uniform 761 * quantization value 'qpFrame' and a 'qpBias' is chosen/suggested for an LCU of the 762 * frame, then the actual qp of the LCU will be 'qpFrame + qpBias', although this value 763 * can be clamped basing on the min-max configured qp bounds for the current encoding 764 * session. 765 * <p> 766 * In a shot, if a group of LCUs pan out quickly they can be marked as non-critical 767 * thereby enabling the encoder to reserve fewer bits during their encoding. Contrarily, 768 * LCUs that remain in shot for a prolonged duration can be encoded at better quality in 769 * one frame thereby setting-up an excellent long-term reference for all future frames. 770 * <p> 771 * Note that by offsetting the quantization of each LCU, the overall bit allocation will 772 * differ from the originally estimated bit allocation, and the encoder will adjust the 773 * frame quantization for subsequent frames to meet the bitrate target. An effective 774 * selection of critical regions can set-up a golden reference and this can compensate 775 * for the bit burden that was introduced due to encoding RoI's at better quality. 776 * On the other hand, an ineffective choice of critical regions might increase the 777 * quality of certain parts of the image but this can hamper quality in subsequent frames. 778 * <p> 779 * @see MediaCodec#PARAMETER_KEY_QP_OFFSET_MAP 780 * @see MediaCodec#PARAMETER_KEY_QP_OFFSET_RECTS 781 */ 782 @SuppressLint("AllUpper") 783 @FlaggedApi(FLAG_REGION_OF_INTEREST) 784 public static final String FEATURE_Roi = "region-of-interest"; 785 786 /** 787 * <b>video decoder only</b>: codec supports detaching the 788 * output surface when in Surface mode. 789 * <p> If true, the codec can be configured in Surface mode 790 * without an actual surface (in detached surface mode). 791 * @see MediaCodec#CONFIGURE_FLAG_DETACHED_SURFACE 792 */ 793 @SuppressLint("AllUpper") 794 @FlaggedApi(FLAG_NULL_OUTPUT_SURFACE) 795 public static final String FEATURE_DetachedSurface = "detached-surface"; 796 797 /** 798 * Query codec feature capabilities. 799 * <p> 800 * These features are supported to be used by the codec. These 801 * include optional features that can be turned on, as well as 802 * features that are always on. 803 */ isFeatureSupported(String name)804 public final boolean isFeatureSupported(String name) { 805 return checkFeature(name, mFlagsSupported); 806 } 807 808 /** 809 * Query codec feature requirements. 810 * <p> 811 * These features are required to be used by the codec, and as such, 812 * they are always turned on. 813 */ isFeatureRequired(String name)814 public final boolean isFeatureRequired(String name) { 815 return checkFeature(name, mFlagsRequired); 816 } 817 818 // Flags are used for feature list creation so separate this into a private 819 // static class to delay reading the flags only when constructing the list. 820 private static class FeatureList { getDecoderFeatures()821 private static Feature[] getDecoderFeatures() { 822 ArrayList<Feature> features = new ArrayList(); 823 features.add(new Feature(FEATURE_AdaptivePlayback, (1 << 0), true)); 824 features.add(new Feature(FEATURE_SecurePlayback, (1 << 1), false)); 825 features.add(new Feature(FEATURE_TunneledPlayback, (1 << 2), false)); 826 features.add(new Feature(FEATURE_PartialFrame, (1 << 3), false)); 827 features.add(new Feature(FEATURE_FrameParsing, (1 << 4), false)); 828 features.add(new Feature(FEATURE_MultipleFrames, (1 << 5), false)); 829 features.add(new Feature(FEATURE_DynamicTimestamp, (1 << 6), false)); 830 features.add(new Feature(FEATURE_LowLatency, (1 << 7), true)); 831 if (GetFlag(() -> android.media.codec.Flags.dynamicColorAspects())) { 832 features.add(new Feature(FEATURE_DynamicColorAspects, (1 << 8), true)); 833 } 834 if (GetFlag(() -> android.media.codec.Flags.nullOutputSurface())) { 835 features.add(new Feature(FEATURE_DetachedSurface, (1 << 9), true)); 836 } 837 838 // feature to exclude codec from REGULAR codec list 839 features.add(new Feature(FEATURE_SpecialCodec, (1 << 30), false, true)); 840 841 return features.toArray(new Feature[0]); 842 }; 843 844 private static Feature[] decoderFeatures = getDecoderFeatures(); 845 getEncoderFeatures()846 private static Feature[] getEncoderFeatures() { 847 ArrayList<Feature> features = new ArrayList(); 848 849 features.add(new Feature(FEATURE_IntraRefresh, (1 << 0), false)); 850 features.add(new Feature(FEATURE_MultipleFrames, (1 << 1), false)); 851 features.add(new Feature(FEATURE_DynamicTimestamp, (1 << 2), false)); 852 features.add(new Feature(FEATURE_QpBounds, (1 << 3), false)); 853 features.add(new Feature(FEATURE_EncodingStatistics, (1 << 4), false)); 854 features.add(new Feature(FEATURE_HdrEditing, (1 << 5), false)); 855 if (GetFlag(() -> android.media.codec.Flags.hlgEditing())) { 856 features.add(new Feature(FEATURE_HlgEditing, (1 << 6), true)); 857 } 858 if (GetFlag(() -> android.media.codec.Flags.regionOfInterest())) { 859 features.add(new Feature(FEATURE_Roi, (1 << 7), true)); 860 } 861 862 // feature to exclude codec from REGULAR codec list 863 features.add(new Feature(FEATURE_SpecialCodec, (1 << 30), false, true)); 864 865 return features.toArray(new Feature[0]); 866 }; 867 868 private static Feature[] encoderFeatures = getEncoderFeatures(); 869 getFeatures(boolean isEncoder)870 public static Feature[] getFeatures(boolean isEncoder) { 871 if (isEncoder) { 872 return encoderFeatures; 873 } else { 874 return decoderFeatures; 875 } 876 } 877 } 878 879 /** @hide */ validFeatures()880 public String[] validFeatures() { 881 Feature[] features = getValidFeatures(); 882 String[] res = new String[features.length]; 883 for (int i = 0; i < res.length; i++) { 884 if (!features[i].mInternal) { 885 res[i] = features[i].mName; 886 } 887 } 888 return res; 889 } 890 getValidFeatures()891 private Feature[] getValidFeatures() { 892 return FeatureList.getFeatures(isEncoder()); 893 } 894 checkFeature(String name, int flags)895 private boolean checkFeature(String name, int flags) { 896 for (Feature feat: getValidFeatures()) { 897 if (feat.mName.equals(name)) { 898 return (flags & feat.mValue) != 0; 899 } 900 } 901 return false; 902 } 903 904 /** @hide */ isRegular()905 public boolean isRegular() { 906 // regular codecs only require default features 907 for (Feature feat: getValidFeatures()) { 908 if (!feat.mDefault && isFeatureRequired(feat.mName)) { 909 return false; 910 } 911 } 912 return true; 913 } 914 915 /** 916 * Query whether codec supports a given {@link MediaFormat}. 917 * 918 * <p class=note> 919 * <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP}, 920 * {@code format} must not contain a {@linkplain MediaFormat#KEY_FRAME_RATE 921 * frame rate}. Use 922 * <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code> 923 * to clear any existing frame rate setting in the format. 924 * <p> 925 * 926 * The following table summarizes the format keys considered by this method. 927 * This is especially important to consider when targeting a higher SDK version than the 928 * minimum SDK version, as this method will disregard some keys on devices below the target 929 * SDK version. 930 * 931 * <table style="width: 0%"> 932 * <thead> 933 * <tr> 934 * <th rowspan=3>OS Version(s)</th> 935 * <td colspan=3>{@code MediaFormat} keys considered for</th> 936 * </tr><tr> 937 * <th>Audio Codecs</th> 938 * <th>Video Codecs</th> 939 * <th>Encoders</th> 940 * </tr> 941 * </thead> 942 * <tbody> 943 * <tr> 944 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td> 945 * <td rowspan=3>{@link MediaFormat#KEY_MIME}<sup>*</sup>,<br> 946 * {@link MediaFormat#KEY_SAMPLE_RATE},<br> 947 * {@link MediaFormat#KEY_CHANNEL_COUNT},</td> 948 * <td>{@link MediaFormat#KEY_MIME}<sup>*</sup>,<br> 949 * {@link CodecCapabilities#FEATURE_AdaptivePlayback}<sup>D</sup>,<br> 950 * {@link CodecCapabilities#FEATURE_SecurePlayback}<sup>D</sup>,<br> 951 * {@link CodecCapabilities#FEATURE_TunneledPlayback}<sup>D</sup>,<br> 952 * {@link MediaFormat#KEY_WIDTH},<br> 953 * {@link MediaFormat#KEY_HEIGHT},<br> 954 * <strong>no</strong> {@code KEY_FRAME_RATE}</td> 955 * <td rowspan=10>as to the left, plus<br> 956 * {@link MediaFormat#KEY_BITRATE_MODE},<br> 957 * {@link MediaFormat#KEY_PROFILE} 958 * (and/or {@link MediaFormat#KEY_AAC_PROFILE}<sup>~</sup>),<br> 959 * <!-- {link MediaFormat#KEY_QUALITY},<br> --> 960 * {@link MediaFormat#KEY_COMPLEXITY} 961 * (and/or {@link MediaFormat#KEY_FLAC_COMPRESSION_LEVEL}<sup>~</sup>)</td> 962 * </tr><tr> 963 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td> 964 * <td rowspan=2>as above, plus<br> 965 * {@link MediaFormat#KEY_FRAME_RATE}</td> 966 * </tr><tr> 967 * <td>{@link android.os.Build.VERSION_CODES#M}</td> 968 * </tr><tr> 969 * <td>{@link android.os.Build.VERSION_CODES#N}</td> 970 * <td rowspan=2>as above, plus<br> 971 * {@link MediaFormat#KEY_PROFILE},<br> 972 * <!-- {link MediaFormat#KEY_MAX_BIT_RATE},<br> --> 973 * {@link MediaFormat#KEY_BIT_RATE}</td> 974 * <td rowspan=2>as above, plus<br> 975 * {@link MediaFormat#KEY_PROFILE},<br> 976 * {@link MediaFormat#KEY_LEVEL}<sup>+</sup>,<br> 977 * <!-- {link MediaFormat#KEY_MAX_BIT_RATE},<br> --> 978 * {@link MediaFormat#KEY_BIT_RATE},<br> 979 * {@link CodecCapabilities#FEATURE_IntraRefresh}<sup>E</sup></td> 980 * </tr><tr> 981 * <td>{@link android.os.Build.VERSION_CODES#N_MR1}</td> 982 * </tr><tr> 983 * <td>{@link android.os.Build.VERSION_CODES#O}</td> 984 * <td rowspan=3 colspan=2>as above, plus<br> 985 * {@link CodecCapabilities#FEATURE_PartialFrame}<sup>D</sup></td> 986 * </tr><tr> 987 * <td>{@link android.os.Build.VERSION_CODES#O_MR1}</td> 988 * </tr><tr> 989 * <td>{@link android.os.Build.VERSION_CODES#P}</td> 990 * </tr><tr> 991 * <td>{@link android.os.Build.VERSION_CODES#Q}</td> 992 * <td colspan=2>as above, plus<br> 993 * {@link CodecCapabilities#FEATURE_FrameParsing}<sup>D</sup>,<br> 994 * {@link CodecCapabilities#FEATURE_MultipleFrames},<br> 995 * {@link CodecCapabilities#FEATURE_DynamicTimestamp}</td> 996 * </tr><tr> 997 * <td>{@link android.os.Build.VERSION_CODES#R}</td> 998 * <td colspan=2>as above, plus<br> 999 * {@link CodecCapabilities#FEATURE_LowLatency}<sup>D</sup></td> 1000 * </tr> 1001 * <tr> 1002 * <td colspan=4> 1003 * <p class=note><strong>Notes:</strong><br> 1004 * *: must be specified; otherwise, method returns {@code false}.<br> 1005 * +: method does not verify that the format parameters are supported 1006 * by the specified level.<br> 1007 * D: decoders only<br> 1008 * E: encoders only<br> 1009 * ~: if both keys are provided values must match 1010 * </td> 1011 * </tr> 1012 * </tbody> 1013 * </table> 1014 * 1015 * @param format media format with optional feature directives. 1016 * @throws IllegalArgumentException if format is not a valid media format. 1017 * @return whether the codec capabilities support the given format 1018 * and feature requests. 1019 */ isFormatSupported(MediaFormat format)1020 public final boolean isFormatSupported(MediaFormat format) { 1021 final Map<String, Object> map = format.getMap(); 1022 final String mime = (String)map.get(MediaFormat.KEY_MIME); 1023 1024 // mime must match if present 1025 if (mime != null && !mMime.equalsIgnoreCase(mime)) { 1026 return false; 1027 } 1028 1029 // check feature support 1030 for (Feature feat: getValidFeatures()) { 1031 if (feat.mInternal) { 1032 continue; 1033 } 1034 1035 Integer yesNo = (Integer)map.get(MediaFormat.KEY_FEATURE_ + feat.mName); 1036 if (yesNo == null) { 1037 continue; 1038 } 1039 if ((yesNo == 1 && !isFeatureSupported(feat.mName)) || 1040 (yesNo == 0 && isFeatureRequired(feat.mName))) { 1041 return false; 1042 } 1043 } 1044 1045 Integer profile = (Integer)map.get(MediaFormat.KEY_PROFILE); 1046 Integer level = (Integer)map.get(MediaFormat.KEY_LEVEL); 1047 1048 if (profile != null) { 1049 if (!supportsProfileLevel(profile, level)) { 1050 return false; 1051 } 1052 1053 // If we recognize this profile, check that this format is supported by the 1054 // highest level supported by the codec for that profile. (Ignore specified 1055 // level beyond the above profile/level check as level is only used as a 1056 // guidance. E.g. AVC Level 1 CIF format is supported if codec supports level 1.1 1057 // even though max size for Level 1 is QCIF. However, MPEG2 Simple Profile 1058 // 1080p format is not supported even if codec supports Main Profile Level High, 1059 // as Simple Profile does not support 1080p. 1060 CodecCapabilities levelCaps = null; 1061 int maxLevel = 0; 1062 for (CodecProfileLevel pl : profileLevels) { 1063 if (pl.profile == profile && pl.level > maxLevel) { 1064 // H.263 levels are not completely ordered: 1065 // Level45 support only implies Level10 support 1066 if (!mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_H263) 1067 || pl.level != CodecProfileLevel.H263Level45 1068 || maxLevel == CodecProfileLevel.H263Level10) { 1069 maxLevel = pl.level; 1070 } 1071 } 1072 } 1073 levelCaps = createFromProfileLevel(mMime, profile, maxLevel); 1074 // We must remove the profile from this format otherwise levelCaps.isFormatSupported 1075 // will get into this same condition and loop forever. Furthermore, since levelCaps 1076 // does not contain features and bitrate specific keys, keep only keys relevant for 1077 // a level check. 1078 Map<String, Object> levelCriticalFormatMap = new HashMap<>(map); 1079 final Set<String> criticalKeys = 1080 isVideo() ? VideoCapabilities.VIDEO_LEVEL_CRITICAL_FORMAT_KEYS : 1081 isAudio() ? AudioCapabilities.AUDIO_LEVEL_CRITICAL_FORMAT_KEYS : 1082 null; 1083 1084 // critical keys will always contain KEY_MIME, but should also contain others to be 1085 // meaningful 1086 if (criticalKeys != null && criticalKeys.size() > 1 && levelCaps != null) { 1087 levelCriticalFormatMap.keySet().retainAll(criticalKeys); 1088 1089 MediaFormat levelCriticalFormat = new MediaFormat(levelCriticalFormatMap); 1090 if (!levelCaps.isFormatSupported(levelCriticalFormat)) { 1091 return false; 1092 } 1093 } 1094 } 1095 if (mAudioCaps != null && !mAudioCaps.supportsFormat(format)) { 1096 return false; 1097 } 1098 if (mVideoCaps != null && !mVideoCaps.supportsFormat(format)) { 1099 return false; 1100 } 1101 if (mEncoderCaps != null && !mEncoderCaps.supportsFormat(format)) { 1102 return false; 1103 } 1104 return true; 1105 } 1106 supportsBitrate( Range<Integer> bitrateRange, MediaFormat format)1107 private static boolean supportsBitrate( 1108 Range<Integer> bitrateRange, MediaFormat format) { 1109 Map<String, Object> map = format.getMap(); 1110 1111 // consider max bitrate over average bitrate for support 1112 Integer maxBitrate = (Integer)map.get(MediaFormat.KEY_MAX_BIT_RATE); 1113 Integer bitrate = (Integer)map.get(MediaFormat.KEY_BIT_RATE); 1114 if (bitrate == null) { 1115 bitrate = maxBitrate; 1116 } else if (maxBitrate != null) { 1117 bitrate = Math.max(bitrate, maxBitrate); 1118 } 1119 1120 if (bitrate != null && bitrate > 0) { 1121 return bitrateRange.contains(bitrate); 1122 } 1123 1124 return true; 1125 } 1126 supportsProfileLevel(int profile, Integer level)1127 private boolean supportsProfileLevel(int profile, Integer level) { 1128 for (CodecProfileLevel pl: profileLevels) { 1129 if (pl.profile != profile) { 1130 continue; 1131 } 1132 1133 // No specific level requested 1134 if (level == null) { 1135 return true; 1136 } 1137 1138 // AAC doesn't use levels 1139 if (mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AAC)) { 1140 return true; 1141 } 1142 1143 // DTS doesn't use levels 1144 if (mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_DTS) 1145 || mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_DTS_HD) 1146 || mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_DTS_UHD)) { 1147 return true; 1148 } 1149 1150 // H.263 levels are not completely ordered: 1151 // Level45 support only implies Level10 support 1152 if (mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_H263)) { 1153 if (pl.level != level && pl.level == CodecProfileLevel.H263Level45 1154 && level > CodecProfileLevel.H263Level10) { 1155 continue; 1156 } 1157 } 1158 1159 // MPEG4 levels are not completely ordered: 1160 // Level1 support only implies Level0 (and not Level0b) support 1161 if (mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_MPEG4)) { 1162 if (pl.level != level && pl.level == CodecProfileLevel.MPEG4Level1 1163 && level > CodecProfileLevel.MPEG4Level0) { 1164 continue; 1165 } 1166 } 1167 1168 // HEVC levels incorporate both tiers and levels. Verify tier support. 1169 if (mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_HEVC)) { 1170 boolean supportsHighTier = 1171 (pl.level & CodecProfileLevel.HEVCHighTierLevels) != 0; 1172 boolean checkingHighTier = (level & CodecProfileLevel.HEVCHighTierLevels) != 0; 1173 // high tier levels are only supported by other high tier levels 1174 if (checkingHighTier && !supportsHighTier) { 1175 continue; 1176 } 1177 } 1178 1179 if (pl.level >= level) { 1180 // if we recognize the listed profile/level, we must also recognize the 1181 // profile/level arguments. 1182 if (createFromProfileLevel(mMime, profile, pl.level) != null) { 1183 return createFromProfileLevel(mMime, profile, level) != null; 1184 } 1185 return true; 1186 } 1187 } 1188 return false; 1189 } 1190 1191 // errors while reading profile levels - accessed from sister capabilities 1192 int mError; 1193 1194 private static final String TAG = "CodecCapabilities"; 1195 1196 // NEW-STYLE CAPABILITIES 1197 private AudioCapabilities mAudioCaps; 1198 private VideoCapabilities mVideoCaps; 1199 private EncoderCapabilities mEncoderCaps; 1200 private MediaFormat mDefaultFormat; 1201 1202 /** 1203 * Returns a MediaFormat object with default values for configurations that have 1204 * defaults. 1205 */ getDefaultFormat()1206 public MediaFormat getDefaultFormat() { 1207 return mDefaultFormat; 1208 } 1209 1210 /** 1211 * Returns the mime type for which this codec-capability object was created. 1212 */ getMimeType()1213 public String getMimeType() { 1214 return mMime; 1215 } 1216 1217 /** 1218 * Returns the max number of the supported concurrent codec instances. 1219 * <p> 1220 * This is a hint for an upper bound. Applications should not expect to successfully 1221 * operate more instances than the returned value, but the actual number of 1222 * concurrently operable instances may be less as it depends on the available 1223 * resources at time of use. 1224 */ getMaxSupportedInstances()1225 public int getMaxSupportedInstances() { 1226 return mMaxSupportedInstances; 1227 } 1228 isAudio()1229 private boolean isAudio() { 1230 return mAudioCaps != null; 1231 } 1232 1233 /** 1234 * Returns the audio capabilities or {@code null} if this is not an audio codec. 1235 */ getAudioCapabilities()1236 public AudioCapabilities getAudioCapabilities() { 1237 return mAudioCaps; 1238 } 1239 isEncoder()1240 private boolean isEncoder() { 1241 return mEncoderCaps != null; 1242 } 1243 1244 /** 1245 * Returns the encoding capabilities or {@code null} if this is not an encoder. 1246 */ getEncoderCapabilities()1247 public EncoderCapabilities getEncoderCapabilities() { 1248 return mEncoderCaps; 1249 } 1250 isVideo()1251 private boolean isVideo() { 1252 return mVideoCaps != null; 1253 } 1254 1255 /** 1256 * Returns the video capabilities or {@code null} if this is not a video codec. 1257 */ getVideoCapabilities()1258 public VideoCapabilities getVideoCapabilities() { 1259 return mVideoCaps; 1260 } 1261 1262 /** @hide */ dup()1263 public CodecCapabilities dup() { 1264 CodecCapabilities caps = new CodecCapabilities(); 1265 1266 // profileLevels and colorFormats may be modified by client. 1267 caps.profileLevels = Arrays.copyOf(profileLevels, profileLevels.length); 1268 caps.colorFormats = Arrays.copyOf(colorFormats, colorFormats.length); 1269 1270 caps.mMime = mMime; 1271 caps.mMaxSupportedInstances = mMaxSupportedInstances; 1272 caps.mFlagsRequired = mFlagsRequired; 1273 caps.mFlagsSupported = mFlagsSupported; 1274 caps.mFlagsVerified = mFlagsVerified; 1275 caps.mAudioCaps = mAudioCaps; 1276 caps.mVideoCaps = mVideoCaps; 1277 caps.mEncoderCaps = mEncoderCaps; 1278 caps.mDefaultFormat = mDefaultFormat; 1279 caps.mCapabilitiesInfo = mCapabilitiesInfo; 1280 1281 return caps; 1282 } 1283 1284 /** 1285 * Retrieve the codec capabilities for a certain {@code mime type}, {@code 1286 * profile} and {@code level}. If the type, or profile-level combination 1287 * is not understood by the framework, it returns null. 1288 * <p class=note> In {@link android.os.Build.VERSION_CODES#M}, calling this 1289 * method without calling any method of the {@link MediaCodecList} class beforehand 1290 * results in a {@link NullPointerException}.</p> 1291 */ createFromProfileLevel( String mime, int profile, int level)1292 public static CodecCapabilities createFromProfileLevel( 1293 String mime, int profile, int level) { 1294 CodecProfileLevel pl = new CodecProfileLevel(); 1295 pl.profile = profile; 1296 pl.level = level; 1297 MediaFormat defaultFormat = new MediaFormat(); 1298 defaultFormat.setString(MediaFormat.KEY_MIME, mime); 1299 1300 CodecCapabilities ret = new CodecCapabilities( 1301 new CodecProfileLevel[] { pl }, new int[0], true /* encoder */, 1302 defaultFormat, new MediaFormat() /* info */); 1303 if (ret.mError != 0) { 1304 return null; 1305 } 1306 return ret; 1307 } 1308 CodecCapabilities( CodecProfileLevel[] profLevs, int[] colFmts, boolean encoder, Map<String, Object>defaultFormatMap, Map<String, Object>capabilitiesMap)1309 /* package private */ CodecCapabilities( 1310 CodecProfileLevel[] profLevs, int[] colFmts, 1311 boolean encoder, 1312 Map<String, Object>defaultFormatMap, 1313 Map<String, Object>capabilitiesMap) { 1314 this(profLevs, colFmts, encoder, 1315 new MediaFormat(defaultFormatMap), 1316 new MediaFormat(capabilitiesMap)); 1317 } 1318 1319 private MediaFormat mCapabilitiesInfo; 1320 CodecCapabilities( CodecProfileLevel[] profLevs, int[] colFmts, boolean encoder, MediaFormat defaultFormat, MediaFormat info)1321 /* package private */ CodecCapabilities( 1322 CodecProfileLevel[] profLevs, int[] colFmts, boolean encoder, 1323 MediaFormat defaultFormat, MediaFormat info) { 1324 final Map<String, Object> map = info.getMap(); 1325 colorFormats = colFmts; 1326 mFlagsVerified = 0; // TODO: remove as it is unused 1327 mDefaultFormat = defaultFormat; 1328 mCapabilitiesInfo = info; 1329 mMime = mDefaultFormat.getString(MediaFormat.KEY_MIME); 1330 1331 /* VP9 introduced profiles around 2016, so some VP9 codecs may not advertise any 1332 supported profiles. Determine the level for them using the info they provide. */ 1333 if (profLevs.length == 0 && mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_VP9)) { 1334 CodecProfileLevel profLev = new CodecProfileLevel(); 1335 profLev.profile = CodecProfileLevel.VP9Profile0; 1336 profLev.level = VideoCapabilities.equivalentVP9Level(info); 1337 profLevs = new CodecProfileLevel[] { profLev }; 1338 } 1339 profileLevels = profLevs; 1340 1341 if (mMime.toLowerCase().startsWith("audio/")) { 1342 mAudioCaps = AudioCapabilities.create(info, this); 1343 mAudioCaps.getDefaultFormat(mDefaultFormat); 1344 } else if (mMime.toLowerCase().startsWith("video/") 1345 || mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_IMAGE_ANDROID_HEIC)) { 1346 mVideoCaps = VideoCapabilities.create(info, this); 1347 } 1348 if (encoder) { 1349 mEncoderCaps = EncoderCapabilities.create(info, this); 1350 mEncoderCaps.getDefaultFormat(mDefaultFormat); 1351 } 1352 1353 final Map<String, Object> global = MediaCodecList.getGlobalSettings(); 1354 mMaxSupportedInstances = Utils.parseIntSafely( 1355 global.get("max-concurrent-instances"), DEFAULT_MAX_SUPPORTED_INSTANCES); 1356 1357 int maxInstances = Utils.parseIntSafely( 1358 map.get("max-concurrent-instances"), mMaxSupportedInstances); 1359 mMaxSupportedInstances = 1360 Range.create(1, MAX_SUPPORTED_INSTANCES_LIMIT).clamp(maxInstances); 1361 1362 for (Feature feat: getValidFeatures()) { 1363 String key = MediaFormat.KEY_FEATURE_ + feat.mName; 1364 Integer yesNo = (Integer)map.get(key); 1365 if (yesNo == null) { 1366 continue; 1367 } 1368 if (yesNo > 0) { 1369 mFlagsRequired |= feat.mValue; 1370 } 1371 mFlagsSupported |= feat.mValue; 1372 if (!feat.mInternal) { 1373 mDefaultFormat.setInteger(key, 1); 1374 } 1375 // TODO restrict features by mFlagsVerified once all codecs reliably verify them 1376 } 1377 } 1378 } 1379 1380 /** 1381 * A class that supports querying the audio capabilities of a codec. 1382 */ 1383 public static final class AudioCapabilities { 1384 private static final String TAG = "AudioCapabilities"; 1385 private CodecCapabilities mParent; 1386 private Range<Integer> mBitrateRange; 1387 1388 private int[] mSampleRates; 1389 private Range<Integer>[] mSampleRateRanges; 1390 private Range<Integer>[] mInputChannelRanges; 1391 1392 private static final int MAX_INPUT_CHANNEL_COUNT = 30; 1393 1394 /** 1395 * Returns the range of supported bitrates in bits/second. 1396 */ getBitrateRange()1397 public Range<Integer> getBitrateRange() { 1398 return mBitrateRange; 1399 } 1400 1401 /** 1402 * Returns the array of supported sample rates if the codec 1403 * supports only discrete values. Otherwise, it returns 1404 * {@code null}. The array is sorted in ascending order. 1405 */ getSupportedSampleRates()1406 public int[] getSupportedSampleRates() { 1407 return mSampleRates != null ? Arrays.copyOf(mSampleRates, mSampleRates.length) : null; 1408 } 1409 1410 /** 1411 * Returns the array of supported sample rate ranges. The 1412 * array is sorted in ascending order, and the ranges are 1413 * distinct. 1414 */ getSupportedSampleRateRanges()1415 public Range<Integer>[] getSupportedSampleRateRanges() { 1416 return Arrays.copyOf(mSampleRateRanges, mSampleRateRanges.length); 1417 } 1418 1419 /** 1420 * Returns the maximum number of input channels supported. 1421 * 1422 * Through {@link android.os.Build.VERSION_CODES#R}, this method indicated support 1423 * for any number of input channels between 1 and this maximum value. 1424 * 1425 * As of {@link android.os.Build.VERSION_CODES#S}, 1426 * the implied lower limit of 1 channel is no longer valid. 1427 * As of {@link android.os.Build.VERSION_CODES#S}, {@link #getMaxInputChannelCount} is 1428 * superseded by {@link #getInputChannelCountRanges}, 1429 * which returns an array of ranges of channels. 1430 * The {@link #getMaxInputChannelCount} method will return the highest value 1431 * in the ranges returned by {@link #getInputChannelCountRanges} 1432 * 1433 */ 1434 @IntRange(from = 1, to = 255) getMaxInputChannelCount()1435 public int getMaxInputChannelCount() { 1436 int overall_max = 0; 1437 for (int i = mInputChannelRanges.length - 1; i >= 0; i--) { 1438 int lmax = mInputChannelRanges[i].getUpper(); 1439 if (lmax > overall_max) { 1440 overall_max = lmax; 1441 } 1442 } 1443 return overall_max; 1444 } 1445 1446 /** 1447 * Returns the minimum number of input channels supported. 1448 * This is often 1, but does vary for certain mime types. 1449 * 1450 * This returns the lowest channel count in the ranges returned by 1451 * {@link #getInputChannelCountRanges}. 1452 */ 1453 @IntRange(from = 1, to = 255) getMinInputChannelCount()1454 public int getMinInputChannelCount() { 1455 int overall_min = MAX_INPUT_CHANNEL_COUNT; 1456 for (int i = mInputChannelRanges.length - 1; i >= 0; i--) { 1457 int lmin = mInputChannelRanges[i].getLower(); 1458 if (lmin < overall_min) { 1459 overall_min = lmin; 1460 } 1461 } 1462 return overall_min; 1463 } 1464 1465 /* 1466 * Returns an array of ranges representing the number of input channels supported. 1467 * The codec supports any number of input channels within this range. 1468 * 1469 * This supersedes the {@link #getMaxInputChannelCount} method. 1470 * 1471 * For many codecs, this will be a single range [1..N], for some N. 1472 */ 1473 @SuppressLint("ArrayReturn") 1474 @NonNull getInputChannelCountRanges()1475 public Range<Integer>[] getInputChannelCountRanges() { 1476 return Arrays.copyOf(mInputChannelRanges, mInputChannelRanges.length); 1477 } 1478 1479 /* no public constructor */ AudioCapabilities()1480 private AudioCapabilities() { } 1481 1482 /** @hide */ create( MediaFormat info, CodecCapabilities parent)1483 public static AudioCapabilities create( 1484 MediaFormat info, CodecCapabilities parent) { 1485 AudioCapabilities caps = new AudioCapabilities(); 1486 caps.init(info, parent); 1487 return caps; 1488 } 1489 init(MediaFormat info, CodecCapabilities parent)1490 private void init(MediaFormat info, CodecCapabilities parent) { 1491 mParent = parent; 1492 initWithPlatformLimits(); 1493 applyLevelLimits(); 1494 parseFromInfo(info); 1495 } 1496 initWithPlatformLimits()1497 private void initWithPlatformLimits() { 1498 mBitrateRange = Range.create(0, Integer.MAX_VALUE); 1499 mInputChannelRanges = new Range[] {Range.create(1, MAX_INPUT_CHANNEL_COUNT)}; 1500 // mBitrateRange = Range.create(1, 320000); 1501 final int minSampleRate = SystemProperties. 1502 getInt("ro.mediacodec.min_sample_rate", 7350); 1503 final int maxSampleRate = SystemProperties. 1504 getInt("ro.mediacodec.max_sample_rate", 192000); 1505 mSampleRateRanges = new Range[] { Range.create(minSampleRate, maxSampleRate) }; 1506 mSampleRates = null; 1507 } 1508 supports(Integer sampleRate, Integer inputChannels)1509 private boolean supports(Integer sampleRate, Integer inputChannels) { 1510 // channels and sample rates are checked orthogonally 1511 if (inputChannels != null) { 1512 int ix = Utils.binarySearchDistinctRanges( 1513 mInputChannelRanges, inputChannels); 1514 if (ix < 0) { 1515 return false; 1516 } 1517 } 1518 if (sampleRate != null) { 1519 int ix = Utils.binarySearchDistinctRanges( 1520 mSampleRateRanges, sampleRate); 1521 if (ix < 0) { 1522 return false; 1523 } 1524 } 1525 return true; 1526 } 1527 1528 /** 1529 * Query whether the sample rate is supported by the codec. 1530 */ isSampleRateSupported(int sampleRate)1531 public boolean isSampleRateSupported(int sampleRate) { 1532 return supports(sampleRate, null); 1533 } 1534 1535 /** modifies rates */ limitSampleRates(int[] rates)1536 private void limitSampleRates(int[] rates) { 1537 Arrays.sort(rates); 1538 ArrayList<Range<Integer>> ranges = new ArrayList<Range<Integer>>(); 1539 for (int rate: rates) { 1540 if (supports(rate, null /* channels */)) { 1541 ranges.add(Range.create(rate, rate)); 1542 } 1543 } 1544 mSampleRateRanges = ranges.toArray(new Range[ranges.size()]); 1545 createDiscreteSampleRates(); 1546 } 1547 createDiscreteSampleRates()1548 private void createDiscreteSampleRates() { 1549 mSampleRates = new int[mSampleRateRanges.length]; 1550 for (int i = 0; i < mSampleRateRanges.length; i++) { 1551 mSampleRates[i] = mSampleRateRanges[i].getLower(); 1552 } 1553 } 1554 1555 /** modifies rateRanges */ limitSampleRates(Range<Integer>[] rateRanges)1556 private void limitSampleRates(Range<Integer>[] rateRanges) { 1557 sortDistinctRanges(rateRanges); 1558 mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, rateRanges); 1559 1560 // check if all values are discrete 1561 for (Range<Integer> range: mSampleRateRanges) { 1562 if (!range.getLower().equals(range.getUpper())) { 1563 mSampleRates = null; 1564 return; 1565 } 1566 } 1567 createDiscreteSampleRates(); 1568 } 1569 applyLevelLimits()1570 private void applyLevelLimits() { 1571 int[] sampleRates = null; 1572 Range<Integer> sampleRateRange = null, bitRates = null; 1573 int maxChannels = MAX_INPUT_CHANNEL_COUNT; 1574 CodecProfileLevel[] profileLevels = mParent.profileLevels; 1575 String mime = mParent.getMimeType(); 1576 1577 if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_MPEG)) { 1578 sampleRates = new int[] { 1579 8000, 11025, 12000, 1580 16000, 22050, 24000, 1581 32000, 44100, 48000 }; 1582 bitRates = Range.create(8000, 320000); 1583 maxChannels = 2; 1584 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AMR_NB)) { 1585 sampleRates = new int[] { 8000 }; 1586 bitRates = Range.create(4750, 12200); 1587 maxChannels = 1; 1588 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AMR_WB)) { 1589 sampleRates = new int[] { 16000 }; 1590 bitRates = Range.create(6600, 23850); 1591 maxChannels = 1; 1592 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AAC)) { 1593 sampleRates = new int[] { 1594 7350, 8000, 1595 11025, 12000, 16000, 1596 22050, 24000, 32000, 1597 44100, 48000, 64000, 1598 88200, 96000 }; 1599 bitRates = Range.create(8000, 510000); 1600 maxChannels = 48; 1601 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_VORBIS)) { 1602 bitRates = Range.create(32000, 500000); 1603 sampleRateRange = Range.create(8000, 192000); 1604 maxChannels = 255; 1605 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_OPUS)) { 1606 bitRates = Range.create(6000, 510000); 1607 sampleRates = new int[] { 8000, 12000, 16000, 24000, 48000 }; 1608 maxChannels = 255; 1609 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_RAW)) { 1610 sampleRateRange = Range.create(1, 192000); 1611 bitRates = Range.create(1, 10000000); 1612 maxChannels = AudioSystem.OUT_CHANNEL_COUNT_MAX; 1613 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_FLAC)) { 1614 sampleRateRange = Range.create(1, 655350); 1615 // lossless codec, so bitrate is ignored 1616 maxChannels = 255; 1617 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_G711_ALAW) 1618 || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_G711_MLAW)) { 1619 sampleRates = new int[] { 8000 }; 1620 bitRates = Range.create(64000, 64000); 1621 // platform allows multiple channels for this format 1622 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_MSGSM)) { 1623 sampleRates = new int[] { 8000 }; 1624 bitRates = Range.create(13000, 13000); 1625 maxChannels = 1; 1626 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AC3)) { 1627 maxChannels = 6; 1628 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_EAC3)) { 1629 maxChannels = 16; 1630 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_EAC3_JOC)) { 1631 sampleRates = new int[] { 48000 }; 1632 bitRates = Range.create(32000, 6144000); 1633 maxChannels = 16; 1634 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AC4)) { 1635 sampleRates = new int[] { 44100, 48000, 96000, 192000 }; 1636 bitRates = Range.create(16000, 2688000); 1637 maxChannels = 24; 1638 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_DTS)) { 1639 sampleRates = new int[] { 44100, 48000 }; 1640 bitRates = Range.create(96000, 1524000); 1641 maxChannels = 6; 1642 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_DTS_HD)) { 1643 for (CodecProfileLevel profileLevel: profileLevels) { 1644 switch (profileLevel.profile) { 1645 case CodecProfileLevel.DTS_HDProfileLBR: 1646 sampleRates = new int[]{ 22050, 24000, 44100, 48000 }; 1647 bitRates = Range.create(32000, 768000); 1648 break; 1649 case CodecProfileLevel.DTS_HDProfileHRA: 1650 case CodecProfileLevel.DTS_HDProfileMA: 1651 sampleRates = new int[]{ 44100, 48000, 88200, 96000, 176400, 192000 }; 1652 bitRates = Range.create(96000, 24500000); 1653 break; 1654 default: 1655 Log.w(TAG, "Unrecognized profile " 1656 + profileLevel.profile + " for " + mime); 1657 mParent.mError |= ERROR_UNRECOGNIZED; 1658 sampleRates = new int[]{ 44100, 48000, 88200, 96000, 176400, 192000 }; 1659 bitRates = Range.create(96000, 24500000); 1660 } 1661 } 1662 maxChannels = 8; 1663 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_DTS_UHD)) { 1664 for (CodecProfileLevel profileLevel: profileLevels) { 1665 switch (profileLevel.profile) { 1666 case CodecProfileLevel.DTS_UHDProfileP2: 1667 sampleRates = new int[]{ 48000 }; 1668 bitRates = Range.create(96000, 768000); 1669 maxChannels = 10; 1670 break; 1671 case CodecProfileLevel.DTS_UHDProfileP1: 1672 sampleRates = new int[]{ 44100, 48000, 88200, 96000, 176400, 192000 }; 1673 bitRates = Range.create(96000, 24500000); 1674 maxChannels = 32; 1675 break; 1676 default: 1677 Log.w(TAG, "Unrecognized profile " 1678 + profileLevel.profile + " for " + mime); 1679 mParent.mError |= ERROR_UNRECOGNIZED; 1680 sampleRates = new int[]{ 44100, 48000, 88200, 96000, 176400, 192000 }; 1681 bitRates = Range.create(96000, 24500000); 1682 maxChannels = 32; 1683 } 1684 } 1685 } else { 1686 Log.w(TAG, "Unsupported mime " + mime); 1687 mParent.mError |= ERROR_UNSUPPORTED; 1688 } 1689 1690 // restrict ranges 1691 if (sampleRates != null) { 1692 limitSampleRates(sampleRates); 1693 } else if (sampleRateRange != null) { 1694 limitSampleRates(new Range[] { sampleRateRange }); 1695 } 1696 1697 Range<Integer> channelRange = Range.create(1, maxChannels); 1698 1699 applyLimits(new Range[] { channelRange }, bitRates); 1700 } 1701 applyLimits(Range<Integer>[] inputChannels, Range<Integer> bitRates)1702 private void applyLimits(Range<Integer>[] inputChannels, Range<Integer> bitRates) { 1703 1704 // clamp & make a local copy 1705 Range<Integer>[] myInputChannels = new Range[inputChannels.length]; 1706 for (int i = 0; i < inputChannels.length; i++) { 1707 int lower = inputChannels[i].clamp(1); 1708 int upper = inputChannels[i].clamp(MAX_INPUT_CHANNEL_COUNT); 1709 myInputChannels[i] = Range.create(lower, upper); 1710 } 1711 1712 // sort, intersect with existing, & save channel list 1713 sortDistinctRanges(myInputChannels); 1714 Range<Integer>[] joinedChannelList = 1715 intersectSortedDistinctRanges(myInputChannels, mInputChannelRanges); 1716 mInputChannelRanges = joinedChannelList; 1717 1718 if (bitRates != null) { 1719 mBitrateRange = mBitrateRange.intersect(bitRates); 1720 } 1721 } 1722 parseFromInfo(MediaFormat info)1723 private void parseFromInfo(MediaFormat info) { 1724 int maxInputChannels = MAX_INPUT_CHANNEL_COUNT; 1725 Range<Integer>[] channels = new Range[] { Range.create(1, maxInputChannels)}; 1726 Range<Integer> bitRates = POSITIVE_INTEGERS; 1727 1728 if (info.containsKey("sample-rate-ranges")) { 1729 String[] rateStrings = info.getString("sample-rate-ranges").split(","); 1730 Range<Integer>[] rateRanges = new Range[rateStrings.length]; 1731 for (int i = 0; i < rateStrings.length; i++) { 1732 rateRanges[i] = Utils.parseIntRange(rateStrings[i], null); 1733 } 1734 limitSampleRates(rateRanges); 1735 } 1736 1737 // we will prefer channel-ranges over max-channel-count 1738 if (info.containsKey("channel-ranges")) { 1739 String[] channelStrings = info.getString("channel-ranges").split(","); 1740 Range<Integer>[] channelRanges = new Range[channelStrings.length]; 1741 for (int i = 0; i < channelStrings.length; i++) { 1742 channelRanges[i] = Utils.parseIntRange(channelStrings[i], null); 1743 } 1744 channels = channelRanges; 1745 } else if (info.containsKey("channel-range")) { 1746 Range<Integer> oneRange = Utils.parseIntRange(info.getString("channel-range"), 1747 null); 1748 channels = new Range[] { oneRange }; 1749 } else if (info.containsKey("max-channel-count")) { 1750 maxInputChannels = Utils.parseIntSafely( 1751 info.getString("max-channel-count"), maxInputChannels); 1752 if (maxInputChannels == 0) { 1753 channels = new Range[] {Range.create(0, 0)}; 1754 } else { 1755 channels = new Range[] {Range.create(1, maxInputChannels)}; 1756 } 1757 } else if ((mParent.mError & ERROR_UNSUPPORTED) != 0) { 1758 maxInputChannels = 0; 1759 channels = new Range[] {Range.create(0, 0)}; 1760 } 1761 1762 if (info.containsKey("bitrate-range")) { 1763 bitRates = bitRates.intersect( 1764 Utils.parseIntRange(info.getString("bitrate-range"), bitRates)); 1765 } 1766 1767 applyLimits(channels, bitRates); 1768 } 1769 1770 /** @hide */ getDefaultFormat(MediaFormat format)1771 public void getDefaultFormat(MediaFormat format) { 1772 // report settings that have only a single choice 1773 if (mBitrateRange.getLower().equals(mBitrateRange.getUpper())) { 1774 format.setInteger(MediaFormat.KEY_BIT_RATE, mBitrateRange.getLower()); 1775 } 1776 if (getMaxInputChannelCount() == 1) { 1777 // mono-only format 1778 format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); 1779 } 1780 if (mSampleRates != null && mSampleRates.length == 1) { 1781 format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mSampleRates[0]); 1782 } 1783 } 1784 1785 /* package private */ 1786 // must not contain KEY_PROFILE 1787 static final Set<String> AUDIO_LEVEL_CRITICAL_FORMAT_KEYS = Set.of( 1788 // We don't set level-specific limits for audio codecs today. Key candidates would 1789 // be sample rate, bit rate or channel count. 1790 // MediaFormat.KEY_SAMPLE_RATE, 1791 // MediaFormat.KEY_CHANNEL_COUNT, 1792 // MediaFormat.KEY_BIT_RATE, 1793 MediaFormat.KEY_MIME); 1794 1795 /** @hide */ supportsFormat(MediaFormat format)1796 public boolean supportsFormat(MediaFormat format) { 1797 Map<String, Object> map = format.getMap(); 1798 Integer sampleRate = (Integer)map.get(MediaFormat.KEY_SAMPLE_RATE); 1799 Integer channels = (Integer)map.get(MediaFormat.KEY_CHANNEL_COUNT); 1800 1801 if (!supports(sampleRate, channels)) { 1802 return false; 1803 } 1804 1805 if (!CodecCapabilities.supportsBitrate(mBitrateRange, format)) { 1806 return false; 1807 } 1808 1809 // nothing to do for: 1810 // KEY_CHANNEL_MASK: codecs don't get this 1811 // KEY_IS_ADTS: required feature for all AAC decoders 1812 return true; 1813 } 1814 } 1815 1816 /** @hide */ 1817 @IntDef(prefix = {"SECURITY_MODEL_"}, value = { 1818 SECURITY_MODEL_SANDBOXED, 1819 SECURITY_MODEL_MEMORY_SAFE, 1820 SECURITY_MODEL_TRUSTED_CONTENT_ONLY, 1821 }) 1822 @Retention(RetentionPolicy.SOURCE) 1823 public @interface SecurityModel {} 1824 1825 /** 1826 * In this model the codec is running in a sandboxed process. Even if a 1827 * malicious content was fed to the codecs in this model, the impact will 1828 * be contained in the sandboxed process. 1829 */ 1830 @FlaggedApi(FLAG_IN_PROCESS_SW_AUDIO_CODEC) 1831 public static final int SECURITY_MODEL_SANDBOXED = 0; 1832 /** 1833 * In this model the codec is not running in a sandboxed process, but 1834 * written in a memory-safe way. It typically means that the software 1835 * implementation of the codec is written in a memory-safe language such 1836 * as Rust. 1837 */ 1838 @FlaggedApi(FLAG_IN_PROCESS_SW_AUDIO_CODEC) 1839 public static final int SECURITY_MODEL_MEMORY_SAFE = 1; 1840 /** 1841 * In this model the codec is suitable only for trusted content where 1842 * the input can be verified to be well-formed and no malicious actor 1843 * can alter it. For example, codecs in this model are not suitable 1844 * for arbitrary media downloaded from the internet or present in a user 1845 * directory. On the other hand, they could be suitable for media encoded 1846 * in the backend that the app developer wholly controls. 1847 * <p> 1848 * Codecs with this security model is not included in 1849 * {@link MediaCodecList#REGULAR_CODECS}, but included in 1850 * {@link MediaCodecList#ALL_CODECS}. 1851 */ 1852 @FlaggedApi(FLAG_IN_PROCESS_SW_AUDIO_CODEC) 1853 public static final int SECURITY_MODEL_TRUSTED_CONTENT_ONLY = 2; 1854 1855 /** 1856 * Query the security model of the codec. 1857 */ 1858 @FlaggedApi(FLAG_IN_PROCESS_SW_AUDIO_CODEC) 1859 @SecurityModel getSecurityModel()1860 public int getSecurityModel() { 1861 // TODO b/297922713 --- detect security model of out-of-sandbox codecs 1862 return SECURITY_MODEL_SANDBOXED; 1863 } 1864 1865 /** 1866 * A class that supports querying the video capabilities of a codec. 1867 */ 1868 public static final class VideoCapabilities { 1869 private static final String TAG = "VideoCapabilities"; 1870 private CodecCapabilities mParent; 1871 private Range<Integer> mBitrateRange; 1872 1873 private Range<Integer> mHeightRange; 1874 private Range<Integer> mWidthRange; 1875 private Range<Integer> mBlockCountRange; 1876 private Range<Integer> mHorizontalBlockRange; 1877 private Range<Integer> mVerticalBlockRange; 1878 private Range<Rational> mAspectRatioRange; 1879 private Range<Rational> mBlockAspectRatioRange; 1880 private Range<Long> mBlocksPerSecondRange; 1881 private Map<Size, Range<Long>> mMeasuredFrameRates; 1882 private List<PerformancePoint> mPerformancePoints; 1883 private Range<Integer> mFrameRateRange; 1884 1885 private int mBlockWidth; 1886 private int mBlockHeight; 1887 private int mWidthAlignment; 1888 private int mHeightAlignment; 1889 private int mSmallerDimensionUpperLimit; 1890 1891 private boolean mAllowMbOverride; // allow XML to override calculated limits 1892 1893 /** 1894 * Returns the range of supported bitrates in bits/second. 1895 */ getBitrateRange()1896 public Range<Integer> getBitrateRange() { 1897 return mBitrateRange; 1898 } 1899 1900 /** 1901 * Returns the range of supported video widths. 1902 * <p class=note> 1903 * 32-bit processes will not support resolutions larger than 4096x4096 due to 1904 * the limited address space. 1905 */ getSupportedWidths()1906 public Range<Integer> getSupportedWidths() { 1907 return mWidthRange; 1908 } 1909 1910 /** 1911 * Returns the range of supported video heights. 1912 * <p class=note> 1913 * 32-bit processes will not support resolutions larger than 4096x4096 due to 1914 * the limited address space. 1915 */ getSupportedHeights()1916 public Range<Integer> getSupportedHeights() { 1917 return mHeightRange; 1918 } 1919 1920 /** 1921 * Returns the alignment requirement for video width (in pixels). 1922 * 1923 * This is a power-of-2 value that video width must be a 1924 * multiple of. 1925 */ getWidthAlignment()1926 public int getWidthAlignment() { 1927 return mWidthAlignment; 1928 } 1929 1930 /** 1931 * Returns the alignment requirement for video height (in pixels). 1932 * 1933 * This is a power-of-2 value that video height must be a 1934 * multiple of. 1935 */ getHeightAlignment()1936 public int getHeightAlignment() { 1937 return mHeightAlignment; 1938 } 1939 1940 /** 1941 * Return the upper limit on the smaller dimension of width or height. 1942 * <p></p> 1943 * Some codecs have a limit on the smaller dimension, whether it be 1944 * the width or the height. E.g. a codec may only be able to handle 1945 * up to 1920x1080 both in landscape and portrait mode (1080x1920). 1946 * In this case the maximum width and height are both 1920, but the 1947 * smaller dimension limit will be 1080. For other codecs, this is 1948 * {@code Math.min(getSupportedWidths().getUpper(), 1949 * getSupportedHeights().getUpper())}. 1950 * 1951 * @hide 1952 */ getSmallerDimensionUpperLimit()1953 public int getSmallerDimensionUpperLimit() { 1954 return mSmallerDimensionUpperLimit; 1955 } 1956 1957 /** 1958 * Returns the range of supported frame rates. 1959 * <p> 1960 * This is not a performance indicator. Rather, it expresses the 1961 * limits specified in the coding standard, based on the complexities 1962 * of encoding material for later playback at a certain frame rate, 1963 * or the decoding of such material in non-realtime. 1964 */ getSupportedFrameRates()1965 public Range<Integer> getSupportedFrameRates() { 1966 return mFrameRateRange; 1967 } 1968 1969 /** 1970 * Returns the range of supported video widths for a video height. 1971 * @param height the height of the video 1972 */ getSupportedWidthsFor(int height)1973 public Range<Integer> getSupportedWidthsFor(int height) { 1974 try { 1975 Range<Integer> range = mWidthRange; 1976 if (!mHeightRange.contains(height) 1977 || (height % mHeightAlignment) != 0) { 1978 throw new IllegalArgumentException("unsupported height"); 1979 } 1980 final int heightInBlocks = Utils.divUp(height, mBlockHeight); 1981 1982 // constrain by block count and by block aspect ratio 1983 final int minWidthInBlocks = Math.max( 1984 Utils.divUp(mBlockCountRange.getLower(), heightInBlocks), 1985 (int)Math.ceil(mBlockAspectRatioRange.getLower().doubleValue() 1986 * heightInBlocks)); 1987 final int maxWidthInBlocks = Math.min( 1988 mBlockCountRange.getUpper() / heightInBlocks, 1989 (int)(mBlockAspectRatioRange.getUpper().doubleValue() 1990 * heightInBlocks)); 1991 range = range.intersect( 1992 (minWidthInBlocks - 1) * mBlockWidth + mWidthAlignment, 1993 maxWidthInBlocks * mBlockWidth); 1994 1995 // constrain by smaller dimension limit 1996 if (height > mSmallerDimensionUpperLimit) { 1997 range = range.intersect(1, mSmallerDimensionUpperLimit); 1998 } 1999 2000 // constrain by aspect ratio 2001 range = range.intersect( 2002 (int)Math.ceil(mAspectRatioRange.getLower().doubleValue() 2003 * height), 2004 (int)(mAspectRatioRange.getUpper().doubleValue() * height)); 2005 return range; 2006 } catch (IllegalArgumentException e) { 2007 // height is not supported because there are no suitable widths 2008 Log.v(TAG, "could not get supported widths for " + height); 2009 throw new IllegalArgumentException("unsupported height"); 2010 } 2011 } 2012 2013 /** 2014 * Returns the range of supported video heights for a video width 2015 * @param width the width of the video 2016 */ getSupportedHeightsFor(int width)2017 public Range<Integer> getSupportedHeightsFor(int width) { 2018 try { 2019 Range<Integer> range = mHeightRange; 2020 if (!mWidthRange.contains(width) 2021 || (width % mWidthAlignment) != 0) { 2022 throw new IllegalArgumentException("unsupported width"); 2023 } 2024 final int widthInBlocks = Utils.divUp(width, mBlockWidth); 2025 2026 // constrain by block count and by block aspect ratio 2027 final int minHeightInBlocks = Math.max( 2028 Utils.divUp(mBlockCountRange.getLower(), widthInBlocks), 2029 (int)Math.ceil(widthInBlocks / 2030 mBlockAspectRatioRange.getUpper().doubleValue())); 2031 final int maxHeightInBlocks = Math.min( 2032 mBlockCountRange.getUpper() / widthInBlocks, 2033 (int)(widthInBlocks / 2034 mBlockAspectRatioRange.getLower().doubleValue())); 2035 range = range.intersect( 2036 (minHeightInBlocks - 1) * mBlockHeight + mHeightAlignment, 2037 maxHeightInBlocks * mBlockHeight); 2038 2039 // constrain by smaller dimension limit 2040 if (width > mSmallerDimensionUpperLimit) { 2041 range = range.intersect(1, mSmallerDimensionUpperLimit); 2042 } 2043 2044 // constrain by aspect ratio 2045 range = range.intersect( 2046 (int)Math.ceil(width / 2047 mAspectRatioRange.getUpper().doubleValue()), 2048 (int)(width / mAspectRatioRange.getLower().doubleValue())); 2049 return range; 2050 } catch (IllegalArgumentException e) { 2051 // width is not supported because there are no suitable heights 2052 Log.v(TAG, "could not get supported heights for " + width); 2053 throw new IllegalArgumentException("unsupported width"); 2054 } 2055 } 2056 2057 /** 2058 * Returns the range of supported video frame rates for a video size. 2059 * <p> 2060 * This is not a performance indicator. Rather, it expresses the limits specified in 2061 * the coding standard, based on the complexities of encoding material of a given 2062 * size for later playback at a certain frame rate, or the decoding of such material 2063 * in non-realtime. 2064 2065 * @param width the width of the video 2066 * @param height the height of the video 2067 */ getSupportedFrameRatesFor(int width, int height)2068 public Range<Double> getSupportedFrameRatesFor(int width, int height) { 2069 Range<Integer> range = mHeightRange; 2070 if (!supports(width, height, null)) { 2071 throw new IllegalArgumentException("unsupported size"); 2072 } 2073 final int blockCount = 2074 Utils.divUp(width, mBlockWidth) * Utils.divUp(height, mBlockHeight); 2075 2076 return Range.create( 2077 Math.max(mBlocksPerSecondRange.getLower() / (double) blockCount, 2078 (double) mFrameRateRange.getLower()), 2079 Math.min(mBlocksPerSecondRange.getUpper() / (double) blockCount, 2080 (double) mFrameRateRange.getUpper())); 2081 } 2082 getBlockCount(int width, int height)2083 private int getBlockCount(int width, int height) { 2084 return Utils.divUp(width, mBlockWidth) * Utils.divUp(height, mBlockHeight); 2085 } 2086 2087 @NonNull findClosestSize(int width, int height)2088 private Size findClosestSize(int width, int height) { 2089 int targetBlockCount = getBlockCount(width, height); 2090 Size closestSize = null; 2091 int minDiff = Integer.MAX_VALUE; 2092 for (Size size : mMeasuredFrameRates.keySet()) { 2093 int diff = Math.abs(targetBlockCount - 2094 getBlockCount(size.getWidth(), size.getHeight())); 2095 if (diff < minDiff) { 2096 minDiff = diff; 2097 closestSize = size; 2098 } 2099 } 2100 return closestSize; 2101 } 2102 estimateFrameRatesFor(int width, int height)2103 private Range<Double> estimateFrameRatesFor(int width, int height) { 2104 Size size = findClosestSize(width, height); 2105 Range<Long> range = mMeasuredFrameRates.get(size); 2106 Double ratio = getBlockCount(size.getWidth(), size.getHeight()) 2107 / (double)Math.max(getBlockCount(width, height), 1); 2108 return Range.create(range.getLower() * ratio, range.getUpper() * ratio); 2109 } 2110 2111 /** 2112 * Returns the range of achievable video frame rates for a video size. 2113 * May return {@code null}, if the codec did not publish any measurement 2114 * data. 2115 * <p> 2116 * This is a performance estimate provided by the device manufacturer based on statistical 2117 * sampling of full-speed decoding and encoding measurements in various configurations 2118 * of common video sizes supported by the codec. As such it should only be used to 2119 * compare individual codecs on the device. The value is not suitable for comparing 2120 * different devices or even different android releases for the same device. 2121 * <p> 2122 * <em>On {@link android.os.Build.VERSION_CODES#M} release</em> the returned range 2123 * corresponds to the fastest frame rates achieved in the tested configurations. As 2124 * such, it should not be used to gauge guaranteed or even average codec performance 2125 * on the device. 2126 * <p> 2127 * <em>On {@link android.os.Build.VERSION_CODES#N} release</em> the returned range 2128 * corresponds closer to sustained performance <em>in tested configurations</em>. 2129 * One can expect to achieve sustained performance higher than the lower limit more than 2130 * 50% of the time, and higher than half of the lower limit at least 90% of the time 2131 * <em>in tested configurations</em>. 2132 * Conversely, one can expect performance lower than twice the upper limit at least 2133 * 90% of the time. 2134 * <p class=note> 2135 * Tested configurations use a single active codec. For use cases where multiple 2136 * codecs are active, applications can expect lower and in most cases significantly lower 2137 * performance. 2138 * <p class=note> 2139 * The returned range value is interpolated from the nearest frame size(s) tested. 2140 * Codec performance is severely impacted by other activity on the device as well 2141 * as environmental factors (such as battery level, temperature or power source), and can 2142 * vary significantly even in a steady environment. 2143 * <p class=note> 2144 * Use this method in cases where only codec performance matters, e.g. to evaluate if 2145 * a codec has any chance of meeting a performance target. Codecs are listed 2146 * in {@link MediaCodecList} in the preferred order as defined by the device 2147 * manufacturer. As such, applications should use the first suitable codec in the 2148 * list to achieve the best balance between power use and performance. 2149 * 2150 * @param width the width of the video 2151 * @param height the height of the video 2152 * 2153 * @throws IllegalArgumentException if the video size is not supported. 2154 */ 2155 @Nullable getAchievableFrameRatesFor(int width, int height)2156 public Range<Double> getAchievableFrameRatesFor(int width, int height) { 2157 if (!supports(width, height, null)) { 2158 throw new IllegalArgumentException("unsupported size"); 2159 } 2160 2161 if (mMeasuredFrameRates == null || mMeasuredFrameRates.size() <= 0) { 2162 Log.w(TAG, "Codec did not publish any measurement data."); 2163 return null; 2164 } 2165 2166 return estimateFrameRatesFor(width, height); 2167 } 2168 2169 /** 2170 * Video performance points are a set of standard performance points defined by number of 2171 * pixels, pixel rate and frame rate. Performance point represents an upper bound. This 2172 * means that it covers all performance points with fewer pixels, pixel rate and frame 2173 * rate. 2174 */ 2175 public static final class PerformancePoint { 2176 private Size mBlockSize; // codec block size in macroblocks 2177 private int mWidth; // width in macroblocks 2178 private int mHeight; // height in macroblocks 2179 private int mMaxFrameRate; // max frames per second 2180 private long mMaxMacroBlockRate; // max macro block rate 2181 2182 /** 2183 * Maximum number of macroblocks in the frame. 2184 * 2185 * Video frames are conceptually divided into 16-by-16 pixel blocks called macroblocks. 2186 * Most coding standards operate on these 16-by-16 pixel blocks; thus, codec performance 2187 * is characterized using such blocks. 2188 * 2189 * @hide 2190 */ 2191 @TestApi getMaxMacroBlocks()2192 public int getMaxMacroBlocks() { 2193 return saturateLongToInt(mWidth * (long)mHeight); 2194 } 2195 2196 /** 2197 * Maximum frame rate in frames per second. 2198 * 2199 * @hide 2200 */ 2201 @TestApi getMaxFrameRate()2202 public int getMaxFrameRate() { 2203 return mMaxFrameRate; 2204 } 2205 2206 /** 2207 * Maximum number of macroblocks processed per second. 2208 * 2209 * @hide 2210 */ 2211 @TestApi getMaxMacroBlockRate()2212 public long getMaxMacroBlockRate() { 2213 return mMaxMacroBlockRate; 2214 } 2215 2216 /** Convert to a debug string */ toString()2217 public String toString() { 2218 int blockWidth = 16 * mBlockSize.getWidth(); 2219 int blockHeight = 16 * mBlockSize.getHeight(); 2220 int origRate = (int)Utils.divUp(mMaxMacroBlockRate, getMaxMacroBlocks()); 2221 String info = (mWidth * 16) + "x" + (mHeight * 16) + "@" + origRate; 2222 if (origRate < mMaxFrameRate) { 2223 info += ", max " + mMaxFrameRate + "fps"; 2224 } 2225 if (blockWidth > 16 || blockHeight > 16) { 2226 info += ", " + blockWidth + "x" + blockHeight + " blocks"; 2227 } 2228 return "PerformancePoint(" + info + ")"; 2229 } 2230 2231 @Override hashCode()2232 public int hashCode() { 2233 // only max frame rate must equal between performance points that equal to one 2234 // another 2235 return mMaxFrameRate; 2236 } 2237 2238 /** 2239 * Create a detailed performance point with custom max frame rate and macroblock size. 2240 * 2241 * @param width frame width in pixels 2242 * @param height frame height in pixels 2243 * @param frameRate frames per second for frame width and height 2244 * @param maxFrameRate maximum frames per second for any frame size 2245 * @param blockSize block size for codec implementation. Must be powers of two in both 2246 * width and height. 2247 * 2248 * @throws IllegalArgumentException if the blockSize dimensions are not powers of two. 2249 * 2250 * @hide 2251 */ 2252 @TestApi PerformancePoint( int width, int height, int frameRate, int maxFrameRate, @NonNull Size blockSize)2253 public PerformancePoint( 2254 int width, int height, int frameRate, int maxFrameRate, 2255 @NonNull Size blockSize) { 2256 checkPowerOfTwo(blockSize.getWidth(), "block width"); 2257 checkPowerOfTwo(blockSize.getHeight(), "block height"); 2258 2259 mBlockSize = new Size(Utils.divUp(blockSize.getWidth(), 16), 2260 Utils.divUp(blockSize.getHeight(), 16)); 2261 // these are guaranteed not to overflow as we decimate by 16 2262 mWidth = (int)(Utils.divUp(Math.max(1L, width), 2263 Math.max(blockSize.getWidth(), 16)) 2264 * mBlockSize.getWidth()); 2265 mHeight = (int)(Utils.divUp(Math.max(1L, height), 2266 Math.max(blockSize.getHeight(), 16)) 2267 * mBlockSize.getHeight()); 2268 mMaxFrameRate = Math.max(1, Math.max(frameRate, maxFrameRate)); 2269 mMaxMacroBlockRate = Math.max(1, frameRate) * getMaxMacroBlocks(); 2270 } 2271 2272 /** 2273 * Convert a performance point to a larger blocksize. 2274 * 2275 * @param pp performance point 2276 * @param blockSize block size for codec implementation 2277 * 2278 * @hide 2279 */ 2280 @TestApi PerformancePoint(@onNull PerformancePoint pp, @NonNull Size newBlockSize)2281 public PerformancePoint(@NonNull PerformancePoint pp, @NonNull Size newBlockSize) { 2282 this( 2283 pp.mWidth * 16, pp.mHeight * 16, 2284 // guaranteed not to overflow as these were multiplied at construction 2285 (int)Utils.divUp(pp.mMaxMacroBlockRate, pp.getMaxMacroBlocks()), 2286 pp.mMaxFrameRate, 2287 new Size(Math.max(newBlockSize.getWidth(), pp.mBlockSize.getWidth() * 16), 2288 Math.max(newBlockSize.getHeight(), pp.mBlockSize.getHeight() * 16)) 2289 ); 2290 } 2291 2292 /** 2293 * Create a performance point for a given frame size and frame rate. 2294 * 2295 * @param width width of the frame in pixels 2296 * @param height height of the frame in pixels 2297 * @param frameRate frame rate in frames per second 2298 */ PerformancePoint(int width, int height, int frameRate)2299 public PerformancePoint(int width, int height, int frameRate) { 2300 this(width, height, frameRate, frameRate /* maxFrameRate */, new Size(16, 16)); 2301 } 2302 2303 /** Saturates a long value to int */ saturateLongToInt(long value)2304 private int saturateLongToInt(long value) { 2305 if (value < Integer.MIN_VALUE) { 2306 return Integer.MIN_VALUE; 2307 } else if (value > Integer.MAX_VALUE) { 2308 return Integer.MAX_VALUE; 2309 } else { 2310 return (int)value; 2311 } 2312 } 2313 2314 /* This method may overflow */ align(int value, int alignment)2315 private int align(int value, int alignment) { 2316 return Utils.divUp(value, alignment) * alignment; 2317 } 2318 2319 /** Checks that value is a power of two. */ checkPowerOfTwo2(int value, @NonNull String description)2320 private void checkPowerOfTwo2(int value, @NonNull String description) { 2321 if (value == 0 || (value & (value - 1)) != 0) { 2322 throw new IllegalArgumentException( 2323 description + " (" + value + ") must be a power of 2"); 2324 } 2325 } 2326 2327 /** 2328 * Checks whether the performance point covers a media format. 2329 * 2330 * @param format Stream format considered 2331 * 2332 * @return {@code true} if the performance point covers the format. 2333 */ covers(@onNull MediaFormat format)2334 public boolean covers(@NonNull MediaFormat format) { 2335 PerformancePoint other = new PerformancePoint( 2336 format.getInteger(MediaFormat.KEY_WIDTH, 0), 2337 format.getInteger(MediaFormat.KEY_HEIGHT, 0), 2338 // safely convert ceil(double) to int through float cast and Math.round 2339 Math.round((float)( 2340 Math.ceil(format.getNumber(MediaFormat.KEY_FRAME_RATE, 0) 2341 .doubleValue())))); 2342 return covers(other); 2343 } 2344 2345 /** 2346 * Checks whether the performance point covers another performance point. Use this 2347 * method to determine if a performance point advertised by a codec covers the 2348 * performance point required. This method can also be used for loose ordering as this 2349 * method is transitive. 2350 * 2351 * @param other other performance point considered 2352 * 2353 * @return {@code true} if the performance point covers the other. 2354 */ covers(@onNull PerformancePoint other)2355 public boolean covers(@NonNull PerformancePoint other) { 2356 // convert performance points to common block size 2357 Size commonSize = getCommonBlockSize(other); 2358 PerformancePoint aligned = new PerformancePoint(this, commonSize); 2359 PerformancePoint otherAligned = new PerformancePoint(other, commonSize); 2360 2361 return (aligned.getMaxMacroBlocks() >= otherAligned.getMaxMacroBlocks() 2362 && aligned.mMaxFrameRate >= otherAligned.mMaxFrameRate 2363 && aligned.mMaxMacroBlockRate >= otherAligned.mMaxMacroBlockRate); 2364 } 2365 getCommonBlockSize(@onNull PerformancePoint other)2366 private @NonNull Size getCommonBlockSize(@NonNull PerformancePoint other) { 2367 return new Size( 2368 Math.max(mBlockSize.getWidth(), other.mBlockSize.getWidth()) * 16, 2369 Math.max(mBlockSize.getHeight(), other.mBlockSize.getHeight()) * 16); 2370 } 2371 2372 @Override equals(Object o)2373 public boolean equals(Object o) { 2374 if (o instanceof PerformancePoint) { 2375 // convert performance points to common block size 2376 PerformancePoint other = (PerformancePoint)o; 2377 Size commonSize = getCommonBlockSize(other); 2378 PerformancePoint aligned = new PerformancePoint(this, commonSize); 2379 PerformancePoint otherAligned = new PerformancePoint(other, commonSize); 2380 2381 return (aligned.getMaxMacroBlocks() == otherAligned.getMaxMacroBlocks() 2382 && aligned.mMaxFrameRate == otherAligned.mMaxFrameRate 2383 && aligned.mMaxMacroBlockRate == otherAligned.mMaxMacroBlockRate); 2384 } 2385 return false; 2386 } 2387 2388 /** 480p 24fps */ 2389 @NonNull 2390 public static final PerformancePoint SD_24 = new PerformancePoint(720, 480, 24); 2391 /** 576p 25fps */ 2392 @NonNull 2393 public static final PerformancePoint SD_25 = new PerformancePoint(720, 576, 25); 2394 /** 480p 30fps */ 2395 @NonNull 2396 public static final PerformancePoint SD_30 = new PerformancePoint(720, 480, 30); 2397 /** 480p 48fps */ 2398 @NonNull 2399 public static final PerformancePoint SD_48 = new PerformancePoint(720, 480, 48); 2400 /** 576p 50fps */ 2401 @NonNull 2402 public static final PerformancePoint SD_50 = new PerformancePoint(720, 576, 50); 2403 /** 480p 60fps */ 2404 @NonNull 2405 public static final PerformancePoint SD_60 = new PerformancePoint(720, 480, 60); 2406 2407 /** 720p 24fps */ 2408 @NonNull 2409 public static final PerformancePoint HD_24 = new PerformancePoint(1280, 720, 24); 2410 /** 720p 25fps */ 2411 @NonNull 2412 public static final PerformancePoint HD_25 = new PerformancePoint(1280, 720, 25); 2413 /** 720p 30fps */ 2414 @NonNull 2415 public static final PerformancePoint HD_30 = new PerformancePoint(1280, 720, 30); 2416 /** 720p 50fps */ 2417 @NonNull 2418 public static final PerformancePoint HD_50 = new PerformancePoint(1280, 720, 50); 2419 /** 720p 60fps */ 2420 @NonNull 2421 public static final PerformancePoint HD_60 = new PerformancePoint(1280, 720, 60); 2422 /** 720p 100fps */ 2423 @NonNull 2424 public static final PerformancePoint HD_100 = new PerformancePoint(1280, 720, 100); 2425 /** 720p 120fps */ 2426 @NonNull 2427 public static final PerformancePoint HD_120 = new PerformancePoint(1280, 720, 120); 2428 /** 720p 200fps */ 2429 @NonNull 2430 public static final PerformancePoint HD_200 = new PerformancePoint(1280, 720, 200); 2431 /** 720p 240fps */ 2432 @NonNull 2433 public static final PerformancePoint HD_240 = new PerformancePoint(1280, 720, 240); 2434 2435 /** 1080p 24fps */ 2436 @NonNull 2437 public static final PerformancePoint FHD_24 = new PerformancePoint(1920, 1080, 24); 2438 /** 1080p 25fps */ 2439 @NonNull 2440 public static final PerformancePoint FHD_25 = new PerformancePoint(1920, 1080, 25); 2441 /** 1080p 30fps */ 2442 @NonNull 2443 public static final PerformancePoint FHD_30 = new PerformancePoint(1920, 1080, 30); 2444 /** 1080p 50fps */ 2445 @NonNull 2446 public static final PerformancePoint FHD_50 = new PerformancePoint(1920, 1080, 50); 2447 /** 1080p 60fps */ 2448 @NonNull 2449 public static final PerformancePoint FHD_60 = new PerformancePoint(1920, 1080, 60); 2450 /** 1080p 100fps */ 2451 @NonNull 2452 public static final PerformancePoint FHD_100 = new PerformancePoint(1920, 1080, 100); 2453 /** 1080p 120fps */ 2454 @NonNull 2455 public static final PerformancePoint FHD_120 = new PerformancePoint(1920, 1080, 120); 2456 /** 1080p 200fps */ 2457 @NonNull 2458 public static final PerformancePoint FHD_200 = new PerformancePoint(1920, 1080, 200); 2459 /** 1080p 240fps */ 2460 @NonNull 2461 public static final PerformancePoint FHD_240 = new PerformancePoint(1920, 1080, 240); 2462 2463 /** 2160p 24fps */ 2464 @NonNull 2465 public static final PerformancePoint UHD_24 = new PerformancePoint(3840, 2160, 24); 2466 /** 2160p 25fps */ 2467 @NonNull 2468 public static final PerformancePoint UHD_25 = new PerformancePoint(3840, 2160, 25); 2469 /** 2160p 30fps */ 2470 @NonNull 2471 public static final PerformancePoint UHD_30 = new PerformancePoint(3840, 2160, 30); 2472 /** 2160p 50fps */ 2473 @NonNull 2474 public static final PerformancePoint UHD_50 = new PerformancePoint(3840, 2160, 50); 2475 /** 2160p 60fps */ 2476 @NonNull 2477 public static final PerformancePoint UHD_60 = new PerformancePoint(3840, 2160, 60); 2478 /** 2160p 100fps */ 2479 @NonNull 2480 public static final PerformancePoint UHD_100 = new PerformancePoint(3840, 2160, 100); 2481 /** 2160p 120fps */ 2482 @NonNull 2483 public static final PerformancePoint UHD_120 = new PerformancePoint(3840, 2160, 120); 2484 /** 2160p 200fps */ 2485 @NonNull 2486 public static final PerformancePoint UHD_200 = new PerformancePoint(3840, 2160, 200); 2487 /** 2160p 240fps */ 2488 @NonNull 2489 public static final PerformancePoint UHD_240 = new PerformancePoint(3840, 2160, 240); 2490 } 2491 2492 /** 2493 * Returns the supported performance points. May return {@code null} if the codec did not 2494 * publish any performance point information (e.g. the vendor codecs have not been updated 2495 * to the latest android release). May return an empty list if the codec published that 2496 * if does not guarantee any performance points. 2497 * <p> 2498 * This is a performance guarantee provided by the device manufacturer for hardware codecs 2499 * based on hardware capabilities of the device. 2500 * <p> 2501 * The returned list is sorted first by decreasing number of pixels, then by decreasing 2502 * width, and finally by decreasing frame rate. 2503 * Performance points assume a single active codec. For use cases where multiple 2504 * codecs are active, should use that highest pixel count, and add the frame rates of 2505 * each individual codec. 2506 * <p class=note> 2507 * 32-bit processes will not support resolutions larger than 4096x4096 due to 2508 * the limited address space, but performance points will be presented as is. 2509 * In other words, even though a component publishes a performance point for 2510 * a resolution higher than 4096x4096, it does not mean that the resolution is supported 2511 * for 32-bit processes. 2512 */ 2513 @Nullable getSupportedPerformancePoints()2514 public List<PerformancePoint> getSupportedPerformancePoints() { 2515 return mPerformancePoints; 2516 } 2517 2518 /** 2519 * Returns whether a given video size ({@code width} and 2520 * {@code height}) and {@code frameRate} combination is supported. 2521 */ areSizeAndRateSupported( int width, int height, double frameRate)2522 public boolean areSizeAndRateSupported( 2523 int width, int height, double frameRate) { 2524 return supports(width, height, frameRate); 2525 } 2526 2527 /** 2528 * Returns whether a given video size ({@code width} and 2529 * {@code height}) is supported. 2530 */ isSizeSupported(int width, int height)2531 public boolean isSizeSupported(int width, int height) { 2532 return supports(width, height, null); 2533 } 2534 supports(Integer width, Integer height, Number rate)2535 private boolean supports(Integer width, Integer height, Number rate) { 2536 boolean ok = true; 2537 2538 if (ok && width != null) { 2539 ok = mWidthRange.contains(width) 2540 && (width % mWidthAlignment == 0); 2541 } 2542 if (ok && height != null) { 2543 ok = mHeightRange.contains(height) 2544 && (height % mHeightAlignment == 0); 2545 } 2546 if (ok && rate != null) { 2547 ok = mFrameRateRange.contains(Utils.intRangeFor(rate.doubleValue())); 2548 } 2549 if (ok && height != null && width != null) { 2550 ok = Math.min(height, width) <= mSmallerDimensionUpperLimit; 2551 2552 final int widthInBlocks = Utils.divUp(width, mBlockWidth); 2553 final int heightInBlocks = Utils.divUp(height, mBlockHeight); 2554 final int blockCount = widthInBlocks * heightInBlocks; 2555 ok = ok && mBlockCountRange.contains(blockCount) 2556 && mBlockAspectRatioRange.contains( 2557 new Rational(widthInBlocks, heightInBlocks)) 2558 && mAspectRatioRange.contains(new Rational(width, height)); 2559 if (ok && rate != null) { 2560 double blocksPerSec = blockCount * rate.doubleValue(); 2561 ok = mBlocksPerSecondRange.contains( 2562 Utils.longRangeFor(blocksPerSec)); 2563 } 2564 } 2565 return ok; 2566 } 2567 2568 /* package private */ 2569 // must not contain KEY_PROFILE 2570 static final Set<String> VIDEO_LEVEL_CRITICAL_FORMAT_KEYS = Set.of( 2571 MediaFormat.KEY_WIDTH, 2572 MediaFormat.KEY_HEIGHT, 2573 MediaFormat.KEY_FRAME_RATE, 2574 MediaFormat.KEY_BIT_RATE, 2575 MediaFormat.KEY_MIME); 2576 2577 /** 2578 * @hide 2579 * @throws java.lang.ClassCastException */ supportsFormat(MediaFormat format)2580 public boolean supportsFormat(MediaFormat format) { 2581 final Map<String, Object> map = format.getMap(); 2582 Integer width = (Integer)map.get(MediaFormat.KEY_WIDTH); 2583 Integer height = (Integer)map.get(MediaFormat.KEY_HEIGHT); 2584 Number rate = (Number)map.get(MediaFormat.KEY_FRAME_RATE); 2585 2586 if (!supports(width, height, rate)) { 2587 return false; 2588 } 2589 2590 if (!CodecCapabilities.supportsBitrate(mBitrateRange, format)) { 2591 return false; 2592 } 2593 2594 // we ignore color-format for now as it is not reliably reported by codec 2595 return true; 2596 } 2597 2598 /* no public constructor */ VideoCapabilities()2599 private VideoCapabilities() { } 2600 2601 /** @hide */ 2602 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.P, trackingBug = 115609023) create( MediaFormat info, CodecCapabilities parent)2603 public static VideoCapabilities create( 2604 MediaFormat info, CodecCapabilities parent) { 2605 VideoCapabilities caps = new VideoCapabilities(); 2606 caps.init(info, parent); 2607 return caps; 2608 } 2609 init(MediaFormat info, CodecCapabilities parent)2610 private void init(MediaFormat info, CodecCapabilities parent) { 2611 mParent = parent; 2612 initWithPlatformLimits(); 2613 applyLevelLimits(); 2614 parseFromInfo(info); 2615 updateLimits(); 2616 } 2617 2618 /** @hide */ getBlockSize()2619 public Size getBlockSize() { 2620 return new Size(mBlockWidth, mBlockHeight); 2621 } 2622 2623 /** @hide */ getBlockCountRange()2624 public Range<Integer> getBlockCountRange() { 2625 return mBlockCountRange; 2626 } 2627 2628 /** @hide */ getBlocksPerSecondRange()2629 public Range<Long> getBlocksPerSecondRange() { 2630 return mBlocksPerSecondRange; 2631 } 2632 2633 /** @hide */ getAspectRatioRange(boolean blocks)2634 public Range<Rational> getAspectRatioRange(boolean blocks) { 2635 return blocks ? mBlockAspectRatioRange : mAspectRatioRange; 2636 } 2637 initWithPlatformLimits()2638 private void initWithPlatformLimits() { 2639 mBitrateRange = BITRATE_RANGE; 2640 2641 mWidthRange = getSizeRange(); 2642 mHeightRange = getSizeRange(); 2643 mFrameRateRange = FRAME_RATE_RANGE; 2644 2645 mHorizontalBlockRange = getSizeRange(); 2646 mVerticalBlockRange = getSizeRange(); 2647 2648 // full positive ranges are supported as these get calculated 2649 mBlockCountRange = POSITIVE_INTEGERS; 2650 mBlocksPerSecondRange = POSITIVE_LONGS; 2651 2652 mBlockAspectRatioRange = POSITIVE_RATIONALS; 2653 mAspectRatioRange = POSITIVE_RATIONALS; 2654 2655 mWidthAlignment = 1; 2656 mHeightAlignment = 1; 2657 mBlockWidth = 1; 2658 mBlockHeight = 1; 2659 mSmallerDimensionUpperLimit = getSizeRange().getUpper(); 2660 } 2661 getPerformancePoints(Map<String, Object> map)2662 private @Nullable List<PerformancePoint> getPerformancePoints(Map<String, Object> map) { 2663 Vector<PerformancePoint> ret = new Vector<>(); 2664 final String prefix = "performance-point-"; 2665 Set<String> keys = map.keySet(); 2666 for (String key : keys) { 2667 // looking for: performance-point-WIDTHxHEIGHT-range 2668 if (!key.startsWith(prefix)) { 2669 continue; 2670 } 2671 String subKey = key.substring(prefix.length()); 2672 if (subKey.equals("none") && ret.size() == 0) { 2673 // This means that component knowingly did not publish performance points. 2674 // This is different from when the component forgot to publish performance 2675 // points. 2676 return Collections.unmodifiableList(ret); 2677 } 2678 String[] temp = key.split("-"); 2679 if (temp.length != 4) { 2680 continue; 2681 } 2682 String sizeStr = temp[2]; 2683 Size size = Utils.parseSize(sizeStr, null); 2684 if (size == null || size.getWidth() * size.getHeight() <= 0) { 2685 continue; 2686 } 2687 Range<Long> range = Utils.parseLongRange(map.get(key), null); 2688 if (range == null || range.getLower() < 0 || range.getUpper() < 0) { 2689 continue; 2690 } 2691 PerformancePoint given = new PerformancePoint( 2692 size.getWidth(), size.getHeight(), range.getLower().intValue(), 2693 range.getUpper().intValue(), new Size(mBlockWidth, mBlockHeight)); 2694 PerformancePoint rotated = new PerformancePoint( 2695 size.getHeight(), size.getWidth(), range.getLower().intValue(), 2696 range.getUpper().intValue(), new Size(mBlockWidth, mBlockHeight)); 2697 ret.add(given); 2698 if (!given.covers(rotated)) { 2699 ret.add(rotated); 2700 } 2701 } 2702 2703 // check if the component specified no performance point indication 2704 if (ret.size() == 0) { 2705 return null; 2706 } 2707 2708 // sort reversed by area first, then by frame rate 2709 ret.sort((a, b) -> 2710 -((a.getMaxMacroBlocks() != b.getMaxMacroBlocks()) ? 2711 (a.getMaxMacroBlocks() < b.getMaxMacroBlocks() ? -1 : 1) : 2712 (a.getMaxMacroBlockRate() != b.getMaxMacroBlockRate()) ? 2713 (a.getMaxMacroBlockRate() < b.getMaxMacroBlockRate() ? -1 : 1) : 2714 (a.getMaxFrameRate() != b.getMaxFrameRate()) ? 2715 (a.getMaxFrameRate() < b.getMaxFrameRate() ? -1 : 1) : 0)); 2716 2717 return Collections.unmodifiableList(ret); 2718 } 2719 2720 private Map<Size, Range<Long>> getMeasuredFrameRates(Map<String, Object> map) { 2721 Map<Size, Range<Long>> ret = new HashMap<Size, Range<Long>>(); 2722 final String prefix = "measured-frame-rate-"; 2723 Set<String> keys = map.keySet(); 2724 for (String key : keys) { 2725 // looking for: measured-frame-rate-WIDTHxHEIGHT-range 2726 if (!key.startsWith(prefix)) { 2727 continue; 2728 } 2729 String subKey = key.substring(prefix.length()); 2730 String[] temp = key.split("-"); 2731 if (temp.length != 5) { 2732 continue; 2733 } 2734 String sizeStr = temp[3]; 2735 Size size = Utils.parseSize(sizeStr, null); 2736 if (size == null || size.getWidth() * size.getHeight() <= 0) { 2737 continue; 2738 } 2739 Range<Long> range = Utils.parseLongRange(map.get(key), null); 2740 if (range == null || range.getLower() < 0 || range.getUpper() < 0) { 2741 continue; 2742 } 2743 ret.put(size, range); 2744 } 2745 return ret; 2746 } 2747 2748 private static Pair<Range<Integer>, Range<Integer>> parseWidthHeightRanges(Object o) { 2749 Pair<Size, Size> range = Utils.parseSizeRange(o); 2750 if (range != null) { 2751 try { 2752 return Pair.create( 2753 Range.create(range.first.getWidth(), range.second.getWidth()), 2754 Range.create(range.first.getHeight(), range.second.getHeight())); 2755 } catch (IllegalArgumentException e) { 2756 Log.w(TAG, "could not parse size range '" + o + "'"); 2757 } 2758 } 2759 return null; 2760 } 2761 2762 /** @hide */ 2763 public static int equivalentVP9Level(MediaFormat info) { 2764 final Map<String, Object> map = info.getMap(); 2765 2766 Size blockSize = Utils.parseSize(map.get("block-size"), new Size(8, 8)); 2767 int BS = blockSize.getWidth() * blockSize.getHeight(); 2768 2769 Range<Integer> counts = Utils.parseIntRange(map.get("block-count-range"), null); 2770 int FS = counts == null ? 0 : BS * counts.getUpper(); 2771 2772 Range<Long> blockRates = 2773 Utils.parseLongRange(map.get("blocks-per-second-range"), null); 2774 long SR = blockRates == null ? 0 : BS * blockRates.getUpper(); 2775 2776 Pair<Range<Integer>, Range<Integer>> dimensionRanges = 2777 parseWidthHeightRanges(map.get("size-range")); 2778 int D = dimensionRanges == null ? 0 : Math.max( 2779 dimensionRanges.first.getUpper(), dimensionRanges.second.getUpper()); 2780 2781 Range<Integer> bitRates = Utils.parseIntRange(map.get("bitrate-range"), null); 2782 int BR = bitRates == null ? 0 : Utils.divUp(bitRates.getUpper(), 1000); 2783 2784 if (SR <= 829440 && FS <= 36864 && BR <= 200 && D <= 512) 2785 return CodecProfileLevel.VP9Level1; 2786 if (SR <= 2764800 && FS <= 73728 && BR <= 800 && D <= 768) 2787 return CodecProfileLevel.VP9Level11; 2788 if (SR <= 4608000 && FS <= 122880 && BR <= 1800 && D <= 960) 2789 return CodecProfileLevel.VP9Level2; 2790 if (SR <= 9216000 && FS <= 245760 && BR <= 3600 && D <= 1344) 2791 return CodecProfileLevel.VP9Level21; 2792 if (SR <= 20736000 && FS <= 552960 && BR <= 7200 && D <= 2048) 2793 return CodecProfileLevel.VP9Level3; 2794 if (SR <= 36864000 && FS <= 983040 && BR <= 12000 && D <= 2752) 2795 return CodecProfileLevel.VP9Level31; 2796 if (SR <= 83558400 && FS <= 2228224 && BR <= 18000 && D <= 4160) 2797 return CodecProfileLevel.VP9Level4; 2798 if (SR <= 160432128 && FS <= 2228224 && BR <= 30000 && D <= 4160) 2799 return CodecProfileLevel.VP9Level41; 2800 if (SR <= 311951360 && FS <= 8912896 && BR <= 60000 && D <= 8384) 2801 return CodecProfileLevel.VP9Level5; 2802 if (SR <= 588251136 && FS <= 8912896 && BR <= 120000 && D <= 8384) 2803 return CodecProfileLevel.VP9Level51; 2804 if (SR <= 1176502272 && FS <= 8912896 && BR <= 180000 && D <= 8384) 2805 return CodecProfileLevel.VP9Level52; 2806 if (SR <= 1176502272 && FS <= 35651584 && BR <= 180000 && D <= 16832) 2807 return CodecProfileLevel.VP9Level6; 2808 if (SR <= 2353004544L && FS <= 35651584 && BR <= 240000 && D <= 16832) 2809 return CodecProfileLevel.VP9Level61; 2810 if (SR <= 4706009088L && FS <= 35651584 && BR <= 480000 && D <= 16832) 2811 return CodecProfileLevel.VP9Level62; 2812 // returning largest level 2813 return CodecProfileLevel.VP9Level62; 2814 } 2815 2816 private void parseFromInfo(MediaFormat info) { 2817 final Map<String, Object> map = info.getMap(); 2818 Size blockSize = new Size(mBlockWidth, mBlockHeight); 2819 Size alignment = new Size(mWidthAlignment, mHeightAlignment); 2820 Range<Integer> counts = null, widths = null, heights = null; 2821 Range<Integer> frameRates = null, bitRates = null; 2822 Range<Long> blockRates = null; 2823 Range<Rational> ratios = null, blockRatios = null; 2824 2825 blockSize = Utils.parseSize(map.get("block-size"), blockSize); 2826 alignment = Utils.parseSize(map.get("alignment"), alignment); 2827 counts = Utils.parseIntRange(map.get("block-count-range"), null); 2828 blockRates = 2829 Utils.parseLongRange(map.get("blocks-per-second-range"), null); 2830 mMeasuredFrameRates = getMeasuredFrameRates(map); 2831 mPerformancePoints = getPerformancePoints(map); 2832 Pair<Range<Integer>, Range<Integer>> sizeRanges = 2833 parseWidthHeightRanges(map.get("size-range")); 2834 if (sizeRanges != null) { 2835 widths = sizeRanges.first; 2836 heights = sizeRanges.second; 2837 } 2838 // for now this just means using the smaller max size as 2nd 2839 // upper limit. 2840 // for now we are keeping the profile specific "width/height 2841 // in macroblocks" limits. 2842 if (map.containsKey("feature-can-swap-width-height")) { 2843 if (widths != null) { 2844 mSmallerDimensionUpperLimit = 2845 Math.min(widths.getUpper(), heights.getUpper()); 2846 widths = heights = widths.extend(heights); 2847 } else { 2848 Log.w(TAG, "feature can-swap-width-height is best used with size-range"); 2849 mSmallerDimensionUpperLimit = 2850 Math.min(mWidthRange.getUpper(), mHeightRange.getUpper()); 2851 mWidthRange = mHeightRange = mWidthRange.extend(mHeightRange); 2852 } 2853 } 2854 2855 ratios = Utils.parseRationalRange( 2856 map.get("block-aspect-ratio-range"), null); 2857 blockRatios = Utils.parseRationalRange( 2858 map.get("pixel-aspect-ratio-range"), null); 2859 frameRates = Utils.parseIntRange(map.get("frame-rate-range"), null); 2860 if (frameRates != null) { 2861 try { 2862 frameRates = frameRates.intersect(FRAME_RATE_RANGE); 2863 } catch (IllegalArgumentException e) { 2864 Log.w(TAG, "frame rate range (" + frameRates 2865 + ") is out of limits: " + FRAME_RATE_RANGE); 2866 frameRates = null; 2867 } 2868 } 2869 bitRates = Utils.parseIntRange(map.get("bitrate-range"), null); 2870 if (bitRates != null) { 2871 try { 2872 bitRates = bitRates.intersect(BITRATE_RANGE); 2873 } catch (IllegalArgumentException e) { 2874 Log.w(TAG, "bitrate range (" + bitRates 2875 + ") is out of limits: " + BITRATE_RANGE); 2876 bitRates = null; 2877 } 2878 } 2879 2880 checkPowerOfTwo( 2881 blockSize.getWidth(), "block-size width must be power of two"); 2882 checkPowerOfTwo( 2883 blockSize.getHeight(), "block-size height must be power of two"); 2884 2885 checkPowerOfTwo( 2886 alignment.getWidth(), "alignment width must be power of two"); 2887 checkPowerOfTwo( 2888 alignment.getHeight(), "alignment height must be power of two"); 2889 2890 // update block-size and alignment 2891 applyMacroBlockLimits( 2892 Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, 2893 Long.MAX_VALUE, blockSize.getWidth(), blockSize.getHeight(), 2894 alignment.getWidth(), alignment.getHeight()); 2895 2896 if ((mParent.mError & ERROR_UNSUPPORTED) != 0 || mAllowMbOverride) { 2897 // codec supports profiles that we don't know. 2898 // Use supplied values clipped to platform limits 2899 if (widths != null) { 2900 mWidthRange = getSizeRange().intersect(widths); 2901 } 2902 if (heights != null) { 2903 mHeightRange = getSizeRange().intersect(heights); 2904 } 2905 if (counts != null) { 2906 mBlockCountRange = POSITIVE_INTEGERS.intersect( 2907 Utils.factorRange(counts, mBlockWidth * mBlockHeight 2908 / blockSize.getWidth() / blockSize.getHeight())); 2909 } 2910 if (blockRates != null) { 2911 mBlocksPerSecondRange = POSITIVE_LONGS.intersect( 2912 Utils.factorRange(blockRates, mBlockWidth * mBlockHeight 2913 / blockSize.getWidth() / blockSize.getHeight())); 2914 } 2915 if (blockRatios != null) { 2916 mBlockAspectRatioRange = POSITIVE_RATIONALS.intersect( 2917 Utils.scaleRange(blockRatios, 2918 mBlockHeight / blockSize.getHeight(), 2919 mBlockWidth / blockSize.getWidth())); 2920 } 2921 if (ratios != null) { 2922 mAspectRatioRange = POSITIVE_RATIONALS.intersect(ratios); 2923 } 2924 if (frameRates != null) { 2925 mFrameRateRange = FRAME_RATE_RANGE.intersect(frameRates); 2926 } 2927 if (bitRates != null) { 2928 // only allow bitrate override if unsupported profiles were encountered 2929 if ((mParent.mError & ERROR_UNSUPPORTED) != 0) { 2930 mBitrateRange = BITRATE_RANGE.intersect(bitRates); 2931 } else { 2932 mBitrateRange = mBitrateRange.intersect(bitRates); 2933 } 2934 } 2935 } else { 2936 // no unsupported profile/levels, so restrict values to known limits 2937 if (widths != null) { 2938 mWidthRange = mWidthRange.intersect(widths); 2939 } 2940 if (heights != null) { 2941 mHeightRange = mHeightRange.intersect(heights); 2942 } 2943 if (counts != null) { 2944 mBlockCountRange = mBlockCountRange.intersect( 2945 Utils.factorRange(counts, mBlockWidth * mBlockHeight 2946 / blockSize.getWidth() / blockSize.getHeight())); 2947 } 2948 if (blockRates != null) { 2949 mBlocksPerSecondRange = mBlocksPerSecondRange.intersect( 2950 Utils.factorRange(blockRates, mBlockWidth * mBlockHeight 2951 / blockSize.getWidth() / blockSize.getHeight())); 2952 } 2953 if (blockRatios != null) { 2954 mBlockAspectRatioRange = mBlockAspectRatioRange.intersect( 2955 Utils.scaleRange(blockRatios, 2956 mBlockHeight / blockSize.getHeight(), 2957 mBlockWidth / blockSize.getWidth())); 2958 } 2959 if (ratios != null) { 2960 mAspectRatioRange = mAspectRatioRange.intersect(ratios); 2961 } 2962 if (frameRates != null) { 2963 mFrameRateRange = mFrameRateRange.intersect(frameRates); 2964 } 2965 if (bitRates != null) { 2966 mBitrateRange = mBitrateRange.intersect(bitRates); 2967 } 2968 } 2969 updateLimits(); 2970 } 2971 2972 private void applyBlockLimits( 2973 int blockWidth, int blockHeight, 2974 Range<Integer> counts, Range<Long> rates, Range<Rational> ratios) { 2975 checkPowerOfTwo(blockWidth, "blockWidth must be a power of two"); 2976 checkPowerOfTwo(blockHeight, "blockHeight must be a power of two"); 2977 2978 final int newBlockWidth = Math.max(blockWidth, mBlockWidth); 2979 final int newBlockHeight = Math.max(blockHeight, mBlockHeight); 2980 2981 // factor will always be a power-of-2 2982 int factor = 2983 newBlockWidth * newBlockHeight / mBlockWidth / mBlockHeight; 2984 if (factor != 1) { 2985 mBlockCountRange = Utils.factorRange(mBlockCountRange, factor); 2986 mBlocksPerSecondRange = Utils.factorRange( 2987 mBlocksPerSecondRange, factor); 2988 mBlockAspectRatioRange = Utils.scaleRange( 2989 mBlockAspectRatioRange, 2990 newBlockHeight / mBlockHeight, 2991 newBlockWidth / mBlockWidth); 2992 mHorizontalBlockRange = Utils.factorRange( 2993 mHorizontalBlockRange, newBlockWidth / mBlockWidth); 2994 mVerticalBlockRange = Utils.factorRange( 2995 mVerticalBlockRange, newBlockHeight / mBlockHeight); 2996 } 2997 factor = newBlockWidth * newBlockHeight / blockWidth / blockHeight; 2998 if (factor != 1) { 2999 counts = Utils.factorRange(counts, factor); 3000 rates = Utils.factorRange(rates, factor); 3001 ratios = Utils.scaleRange( 3002 ratios, newBlockHeight / blockHeight, 3003 newBlockWidth / blockWidth); 3004 } 3005 mBlockCountRange = mBlockCountRange.intersect(counts); 3006 mBlocksPerSecondRange = mBlocksPerSecondRange.intersect(rates); 3007 mBlockAspectRatioRange = mBlockAspectRatioRange.intersect(ratios); 3008 mBlockWidth = newBlockWidth; 3009 mBlockHeight = newBlockHeight; 3010 } 3011 3012 private void applyAlignment(int widthAlignment, int heightAlignment) { 3013 checkPowerOfTwo(widthAlignment, "widthAlignment must be a power of two"); 3014 checkPowerOfTwo(heightAlignment, "heightAlignment must be a power of two"); 3015 3016 if (widthAlignment > mBlockWidth || heightAlignment > mBlockHeight) { 3017 // maintain assumption that 0 < alignment <= block-size 3018 applyBlockLimits( 3019 Math.max(widthAlignment, mBlockWidth), 3020 Math.max(heightAlignment, mBlockHeight), 3021 POSITIVE_INTEGERS, POSITIVE_LONGS, POSITIVE_RATIONALS); 3022 } 3023 3024 mWidthAlignment = Math.max(widthAlignment, mWidthAlignment); 3025 mHeightAlignment = Math.max(heightAlignment, mHeightAlignment); 3026 3027 mWidthRange = Utils.alignRange(mWidthRange, mWidthAlignment); 3028 mHeightRange = Utils.alignRange(mHeightRange, mHeightAlignment); 3029 } 3030 3031 private void updateLimits() { 3032 // pixels -> blocks <- counts 3033 mHorizontalBlockRange = mHorizontalBlockRange.intersect( 3034 Utils.factorRange(mWidthRange, mBlockWidth)); 3035 mHorizontalBlockRange = mHorizontalBlockRange.intersect( 3036 Range.create( 3037 mBlockCountRange.getLower() / mVerticalBlockRange.getUpper(), 3038 mBlockCountRange.getUpper() / mVerticalBlockRange.getLower())); 3039 mVerticalBlockRange = mVerticalBlockRange.intersect( 3040 Utils.factorRange(mHeightRange, mBlockHeight)); 3041 mVerticalBlockRange = mVerticalBlockRange.intersect( 3042 Range.create( 3043 mBlockCountRange.getLower() / mHorizontalBlockRange.getUpper(), 3044 mBlockCountRange.getUpper() / mHorizontalBlockRange.getLower())); 3045 mBlockCountRange = mBlockCountRange.intersect( 3046 Range.create( 3047 mHorizontalBlockRange.getLower() 3048 * mVerticalBlockRange.getLower(), 3049 mHorizontalBlockRange.getUpper() 3050 * mVerticalBlockRange.getUpper())); 3051 mBlockAspectRatioRange = mBlockAspectRatioRange.intersect( 3052 new Rational( 3053 mHorizontalBlockRange.getLower(), mVerticalBlockRange.getUpper()), 3054 new Rational( 3055 mHorizontalBlockRange.getUpper(), mVerticalBlockRange.getLower())); 3056 3057 // blocks -> pixels 3058 mWidthRange = mWidthRange.intersect( 3059 (mHorizontalBlockRange.getLower() - 1) * mBlockWidth + mWidthAlignment, 3060 mHorizontalBlockRange.getUpper() * mBlockWidth); 3061 mHeightRange = mHeightRange.intersect( 3062 (mVerticalBlockRange.getLower() - 1) * mBlockHeight + mHeightAlignment, 3063 mVerticalBlockRange.getUpper() * mBlockHeight); 3064 mAspectRatioRange = mAspectRatioRange.intersect( 3065 new Rational(mWidthRange.getLower(), mHeightRange.getUpper()), 3066 new Rational(mWidthRange.getUpper(), mHeightRange.getLower())); 3067 3068 mSmallerDimensionUpperLimit = Math.min( 3069 mSmallerDimensionUpperLimit, 3070 Math.min(mWidthRange.getUpper(), mHeightRange.getUpper())); 3071 3072 // blocks -> rate 3073 mBlocksPerSecondRange = mBlocksPerSecondRange.intersect( 3074 mBlockCountRange.getLower() * (long)mFrameRateRange.getLower(), 3075 mBlockCountRange.getUpper() * (long)mFrameRateRange.getUpper()); 3076 mFrameRateRange = mFrameRateRange.intersect( 3077 (int)(mBlocksPerSecondRange.getLower() 3078 / mBlockCountRange.getUpper()), 3079 (int)(mBlocksPerSecondRange.getUpper() 3080 / (double)mBlockCountRange.getLower())); 3081 } 3082 3083 private void applyMacroBlockLimits( 3084 int maxHorizontalBlocks, int maxVerticalBlocks, 3085 int maxBlocks, long maxBlocksPerSecond, 3086 int blockWidth, int blockHeight, 3087 int widthAlignment, int heightAlignment) { 3088 applyMacroBlockLimits( 3089 1 /* minHorizontalBlocks */, 1 /* minVerticalBlocks */, 3090 maxHorizontalBlocks, maxVerticalBlocks, 3091 maxBlocks, maxBlocksPerSecond, 3092 blockWidth, blockHeight, widthAlignment, heightAlignment); 3093 } 3094 3095 private void applyMacroBlockLimits( 3096 int minHorizontalBlocks, int minVerticalBlocks, 3097 int maxHorizontalBlocks, int maxVerticalBlocks, 3098 int maxBlocks, long maxBlocksPerSecond, 3099 int blockWidth, int blockHeight, 3100 int widthAlignment, int heightAlignment) { 3101 applyAlignment(widthAlignment, heightAlignment); 3102 applyBlockLimits( 3103 blockWidth, blockHeight, Range.create(1, maxBlocks), 3104 Range.create(1L, maxBlocksPerSecond), 3105 Range.create( 3106 new Rational(1, maxVerticalBlocks), 3107 new Rational(maxHorizontalBlocks, 1))); 3108 mHorizontalBlockRange = 3109 mHorizontalBlockRange.intersect( 3110 Utils.divUp(minHorizontalBlocks, (mBlockWidth / blockWidth)), 3111 maxHorizontalBlocks / (mBlockWidth / blockWidth)); 3112 mVerticalBlockRange = 3113 mVerticalBlockRange.intersect( 3114 Utils.divUp(minVerticalBlocks, (mBlockHeight / blockHeight)), 3115 maxVerticalBlocks / (mBlockHeight / blockHeight)); 3116 } 3117 3118 private void applyLevelLimits() { 3119 long maxBlocksPerSecond = 0; 3120 int maxBlocks = 0; 3121 int maxBps = 0; 3122 int maxDPBBlocks = 0; 3123 3124 int errors = ERROR_NONE_SUPPORTED; 3125 CodecProfileLevel[] profileLevels = mParent.profileLevels; 3126 String mime = mParent.getMimeType(); 3127 3128 if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_AVC)) { 3129 maxBlocks = 99; 3130 maxBlocksPerSecond = 1485; 3131 maxBps = 64000; 3132 maxDPBBlocks = 396; 3133 for (CodecProfileLevel profileLevel: profileLevels) { 3134 int MBPS = 0, FS = 0, BR = 0, DPB = 0; 3135 boolean supported = true; 3136 switch (profileLevel.level) { 3137 case CodecProfileLevel.AVCLevel1: 3138 MBPS = 1485; FS = 99; BR = 64; DPB = 396; break; 3139 case CodecProfileLevel.AVCLevel1b: 3140 MBPS = 1485; FS = 99; BR = 128; DPB = 396; break; 3141 case CodecProfileLevel.AVCLevel11: 3142 MBPS = 3000; FS = 396; BR = 192; DPB = 900; break; 3143 case CodecProfileLevel.AVCLevel12: 3144 MBPS = 6000; FS = 396; BR = 384; DPB = 2376; break; 3145 case CodecProfileLevel.AVCLevel13: 3146 MBPS = 11880; FS = 396; BR = 768; DPB = 2376; break; 3147 case CodecProfileLevel.AVCLevel2: 3148 MBPS = 11880; FS = 396; BR = 2000; DPB = 2376; break; 3149 case CodecProfileLevel.AVCLevel21: 3150 MBPS = 19800; FS = 792; BR = 4000; DPB = 4752; break; 3151 case CodecProfileLevel.AVCLevel22: 3152 MBPS = 20250; FS = 1620; BR = 4000; DPB = 8100; break; 3153 case CodecProfileLevel.AVCLevel3: 3154 MBPS = 40500; FS = 1620; BR = 10000; DPB = 8100; break; 3155 case CodecProfileLevel.AVCLevel31: 3156 MBPS = 108000; FS = 3600; BR = 14000; DPB = 18000; break; 3157 case CodecProfileLevel.AVCLevel32: 3158 MBPS = 216000; FS = 5120; BR = 20000; DPB = 20480; break; 3159 case CodecProfileLevel.AVCLevel4: 3160 MBPS = 245760; FS = 8192; BR = 20000; DPB = 32768; break; 3161 case CodecProfileLevel.AVCLevel41: 3162 MBPS = 245760; FS = 8192; BR = 50000; DPB = 32768; break; 3163 case CodecProfileLevel.AVCLevel42: 3164 MBPS = 522240; FS = 8704; BR = 50000; DPB = 34816; break; 3165 case CodecProfileLevel.AVCLevel5: 3166 MBPS = 589824; FS = 22080; BR = 135000; DPB = 110400; break; 3167 case CodecProfileLevel.AVCLevel51: 3168 MBPS = 983040; FS = 36864; BR = 240000; DPB = 184320; break; 3169 case CodecProfileLevel.AVCLevel52: 3170 MBPS = 2073600; FS = 36864; BR = 240000; DPB = 184320; break; 3171 case CodecProfileLevel.AVCLevel6: 3172 MBPS = 4177920; FS = 139264; BR = 240000; DPB = 696320; break; 3173 case CodecProfileLevel.AVCLevel61: 3174 MBPS = 8355840; FS = 139264; BR = 480000; DPB = 696320; break; 3175 case CodecProfileLevel.AVCLevel62: 3176 MBPS = 16711680; FS = 139264; BR = 800000; DPB = 696320; break; 3177 default: 3178 Log.w(TAG, "Unrecognized level " 3179 + profileLevel.level + " for " + mime); 3180 errors |= ERROR_UNRECOGNIZED; 3181 } 3182 switch (profileLevel.profile) { 3183 case CodecProfileLevel.AVCProfileConstrainedHigh: 3184 case CodecProfileLevel.AVCProfileHigh: 3185 BR *= 1250; break; 3186 case CodecProfileLevel.AVCProfileHigh10: 3187 BR *= 3000; break; 3188 case CodecProfileLevel.AVCProfileExtended: 3189 case CodecProfileLevel.AVCProfileHigh422: 3190 case CodecProfileLevel.AVCProfileHigh444: 3191 Log.w(TAG, "Unsupported profile " 3192 + profileLevel.profile + " for " + mime); 3193 errors |= ERROR_UNSUPPORTED; 3194 supported = false; 3195 // fall through - treat as base profile 3196 case CodecProfileLevel.AVCProfileConstrainedBaseline: 3197 case CodecProfileLevel.AVCProfileBaseline: 3198 case CodecProfileLevel.AVCProfileMain: 3199 BR *= 1000; break; 3200 default: 3201 Log.w(TAG, "Unrecognized profile " 3202 + profileLevel.profile + " for " + mime); 3203 errors |= ERROR_UNRECOGNIZED; 3204 BR *= 1000; 3205 } 3206 if (supported) { 3207 errors &= ~ERROR_NONE_SUPPORTED; 3208 } 3209 maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond); 3210 maxBlocks = Math.max(FS, maxBlocks); 3211 maxBps = Math.max(BR, maxBps); 3212 maxDPBBlocks = Math.max(maxDPBBlocks, DPB); 3213 } 3214 3215 int maxLengthInBlocks = (int)(Math.sqrt(maxBlocks * 8)); 3216 applyMacroBlockLimits( 3217 maxLengthInBlocks, maxLengthInBlocks, 3218 maxBlocks, maxBlocksPerSecond, 3219 16 /* blockWidth */, 16 /* blockHeight */, 3220 1 /* widthAlignment */, 1 /* heightAlignment */); 3221 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_MPEG2)) { 3222 int maxWidth = 11, maxHeight = 9, maxRate = 15; 3223 maxBlocks = 99; 3224 maxBlocksPerSecond = 1485; 3225 maxBps = 64000; 3226 for (CodecProfileLevel profileLevel: profileLevels) { 3227 int MBPS = 0, FS = 0, BR = 0, FR = 0, W = 0, H = 0; 3228 boolean supported = true; 3229 switch (profileLevel.profile) { 3230 case CodecProfileLevel.MPEG2ProfileSimple: 3231 switch (profileLevel.level) { 3232 case CodecProfileLevel.MPEG2LevelML: 3233 FR = 30; W = 45; H = 36; MBPS = 40500; FS = 1620; BR = 15000; break; 3234 default: 3235 Log.w(TAG, "Unrecognized profile/level " 3236 + profileLevel.profile + "/" 3237 + profileLevel.level + " for " + mime); 3238 errors |= ERROR_UNRECOGNIZED; 3239 } 3240 break; 3241 case CodecProfileLevel.MPEG2ProfileMain: 3242 switch (profileLevel.level) { 3243 case CodecProfileLevel.MPEG2LevelLL: 3244 FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 4000; break; 3245 case CodecProfileLevel.MPEG2LevelML: 3246 FR = 30; W = 45; H = 36; MBPS = 40500; FS = 1620; BR = 15000; break; 3247 case CodecProfileLevel.MPEG2LevelH14: 3248 FR = 60; W = 90; H = 68; MBPS = 183600; FS = 6120; BR = 60000; break; 3249 case CodecProfileLevel.MPEG2LevelHL: 3250 FR = 60; W = 120; H = 68; MBPS = 244800; FS = 8160; BR = 80000; break; 3251 case CodecProfileLevel.MPEG2LevelHP: 3252 FR = 60; W = 120; H = 68; MBPS = 489600; FS = 8160; BR = 80000; break; 3253 default: 3254 Log.w(TAG, "Unrecognized profile/level " 3255 + profileLevel.profile + "/" 3256 + profileLevel.level + " for " + mime); 3257 errors |= ERROR_UNRECOGNIZED; 3258 } 3259 break; 3260 case CodecProfileLevel.MPEG2Profile422: 3261 case CodecProfileLevel.MPEG2ProfileSNR: 3262 case CodecProfileLevel.MPEG2ProfileSpatial: 3263 case CodecProfileLevel.MPEG2ProfileHigh: 3264 Log.i(TAG, "Unsupported profile " 3265 + profileLevel.profile + " for " + mime); 3266 errors |= ERROR_UNSUPPORTED; 3267 supported = false; 3268 break; 3269 default: 3270 Log.w(TAG, "Unrecognized profile " 3271 + profileLevel.profile + " for " + mime); 3272 errors |= ERROR_UNRECOGNIZED; 3273 } 3274 if (supported) { 3275 errors &= ~ERROR_NONE_SUPPORTED; 3276 } 3277 maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond); 3278 maxBlocks = Math.max(FS, maxBlocks); 3279 maxBps = Math.max(BR * 1000, maxBps); 3280 maxWidth = Math.max(W, maxWidth); 3281 maxHeight = Math.max(H, maxHeight); 3282 maxRate = Math.max(FR, maxRate); 3283 } 3284 applyMacroBlockLimits(maxWidth, maxHeight, 3285 maxBlocks, maxBlocksPerSecond, 3286 16 /* blockWidth */, 16 /* blockHeight */, 3287 1 /* widthAlignment */, 1 /* heightAlignment */); 3288 mFrameRateRange = mFrameRateRange.intersect(12, maxRate); 3289 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_MPEG4)) { 3290 int maxWidth = 11, maxHeight = 9, maxRate = 15; 3291 maxBlocks = 99; 3292 maxBlocksPerSecond = 1485; 3293 maxBps = 64000; 3294 for (CodecProfileLevel profileLevel: profileLevels) { 3295 int MBPS = 0, FS = 0, BR = 0, FR = 0, W = 0, H = 0; 3296 boolean strict = false; // true: W, H and FR are individual max limits 3297 boolean supported = true; 3298 switch (profileLevel.profile) { 3299 case CodecProfileLevel.MPEG4ProfileSimple: 3300 switch (profileLevel.level) { 3301 case CodecProfileLevel.MPEG4Level0: 3302 strict = true; 3303 FR = 15; W = 11; H = 9; MBPS = 1485; FS = 99; BR = 64; break; 3304 case CodecProfileLevel.MPEG4Level1: 3305 FR = 30; W = 11; H = 9; MBPS = 1485; FS = 99; BR = 64; break; 3306 case CodecProfileLevel.MPEG4Level0b: 3307 strict = true; 3308 FR = 15; W = 11; H = 9; MBPS = 1485; FS = 99; BR = 128; break; 3309 case CodecProfileLevel.MPEG4Level2: 3310 FR = 30; W = 22; H = 18; MBPS = 5940; FS = 396; BR = 128; break; 3311 case CodecProfileLevel.MPEG4Level3: 3312 FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 384; break; 3313 case CodecProfileLevel.MPEG4Level4a: 3314 FR = 30; W = 40; H = 30; MBPS = 36000; FS = 1200; BR = 4000; break; 3315 case CodecProfileLevel.MPEG4Level5: 3316 FR = 30; W = 45; H = 36; MBPS = 40500; FS = 1620; BR = 8000; break; 3317 case CodecProfileLevel.MPEG4Level6: 3318 FR = 30; W = 80; H = 45; MBPS = 108000; FS = 3600; BR = 12000; break; 3319 default: 3320 Log.w(TAG, "Unrecognized profile/level " 3321 + profileLevel.profile + "/" 3322 + profileLevel.level + " for " + mime); 3323 errors |= ERROR_UNRECOGNIZED; 3324 } 3325 break; 3326 case CodecProfileLevel.MPEG4ProfileAdvancedSimple: 3327 switch (profileLevel.level) { 3328 case CodecProfileLevel.MPEG4Level0: 3329 case CodecProfileLevel.MPEG4Level1: 3330 FR = 30; W = 11; H = 9; MBPS = 2970; FS = 99; BR = 128; break; 3331 case CodecProfileLevel.MPEG4Level2: 3332 FR = 30; W = 22; H = 18; MBPS = 5940; FS = 396; BR = 384; break; 3333 case CodecProfileLevel.MPEG4Level3: 3334 FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 768; break; 3335 case CodecProfileLevel.MPEG4Level3b: 3336 FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 1500; break; 3337 case CodecProfileLevel.MPEG4Level4: 3338 FR = 30; W = 44; H = 36; MBPS = 23760; FS = 792; BR = 3000; break; 3339 case CodecProfileLevel.MPEG4Level5: 3340 FR = 30; W = 45; H = 36; MBPS = 48600; FS = 1620; BR = 8000; break; 3341 default: 3342 Log.w(TAG, "Unrecognized profile/level " 3343 + profileLevel.profile + "/" 3344 + profileLevel.level + " for " + mime); 3345 errors |= ERROR_UNRECOGNIZED; 3346 } 3347 break; 3348 case CodecProfileLevel.MPEG4ProfileMain: // 2-4 3349 case CodecProfileLevel.MPEG4ProfileNbit: // 2 3350 case CodecProfileLevel.MPEG4ProfileAdvancedRealTime: // 1-4 3351 case CodecProfileLevel.MPEG4ProfileCoreScalable: // 1-3 3352 case CodecProfileLevel.MPEG4ProfileAdvancedCoding: // 1-4 3353 case CodecProfileLevel.MPEG4ProfileCore: // 1-2 3354 case CodecProfileLevel.MPEG4ProfileAdvancedCore: // 1-4 3355 case CodecProfileLevel.MPEG4ProfileSimpleScalable: // 0-2 3356 case CodecProfileLevel.MPEG4ProfileHybrid: // 1-2 3357 3358 // Studio profiles are not supported by our codecs. 3359 3360 // Only profiles that can decode simple object types are considered. 3361 // The following profiles are not able to. 3362 case CodecProfileLevel.MPEG4ProfileBasicAnimated: // 1-2 3363 case CodecProfileLevel.MPEG4ProfileScalableTexture: // 1 3364 case CodecProfileLevel.MPEG4ProfileSimpleFace: // 1-2 3365 case CodecProfileLevel.MPEG4ProfileAdvancedScalable: // 1-3 3366 case CodecProfileLevel.MPEG4ProfileSimpleFBA: // 1-2 3367 Log.i(TAG, "Unsupported profile " 3368 + profileLevel.profile + " for " + mime); 3369 errors |= ERROR_UNSUPPORTED; 3370 supported = false; 3371 break; 3372 default: 3373 Log.w(TAG, "Unrecognized profile " 3374 + profileLevel.profile + " for " + mime); 3375 errors |= ERROR_UNRECOGNIZED; 3376 } 3377 if (supported) { 3378 errors &= ~ERROR_NONE_SUPPORTED; 3379 } 3380 maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond); 3381 maxBlocks = Math.max(FS, maxBlocks); 3382 maxBps = Math.max(BR * 1000, maxBps); 3383 if (strict) { 3384 maxWidth = Math.max(W, maxWidth); 3385 maxHeight = Math.max(H, maxHeight); 3386 maxRate = Math.max(FR, maxRate); 3387 } else { 3388 // assuming max 60 fps frame rate and 1:2 aspect ratio 3389 int maxDim = (int)Math.sqrt(FS * 2); 3390 maxWidth = Math.max(maxDim, maxWidth); 3391 maxHeight = Math.max(maxDim, maxHeight); 3392 maxRate = Math.max(Math.max(FR, 60), maxRate); 3393 } 3394 } 3395 applyMacroBlockLimits(maxWidth, maxHeight, 3396 maxBlocks, maxBlocksPerSecond, 3397 16 /* blockWidth */, 16 /* blockHeight */, 3398 1 /* widthAlignment */, 1 /* heightAlignment */); 3399 mFrameRateRange = mFrameRateRange.intersect(12, maxRate); 3400 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_H263)) { 3401 int maxWidth = 11, maxHeight = 9, maxRate = 15; 3402 int minWidth = maxWidth, minHeight = maxHeight; 3403 int minAlignment = 16; 3404 maxBlocks = 99; 3405 maxBlocksPerSecond = 1485; 3406 maxBps = 64000; 3407 for (CodecProfileLevel profileLevel: profileLevels) { 3408 int MBPS = 0, BR = 0, FR = 0, W = 0, H = 0, minW = minWidth, minH = minHeight; 3409 boolean strict = false; // true: support only sQCIF, QCIF (maybe CIF) 3410 switch (profileLevel.level) { 3411 case CodecProfileLevel.H263Level10: 3412 strict = true; // only supports sQCIF & QCIF 3413 FR = 15; W = 11; H = 9; BR = 1; MBPS = W * H * FR; break; 3414 case CodecProfileLevel.H263Level20: 3415 strict = true; // only supports sQCIF, QCIF & CIF 3416 FR = 30; W = 22; H = 18; BR = 2; MBPS = W * H * 15; break; 3417 case CodecProfileLevel.H263Level30: 3418 strict = true; // only supports sQCIF, QCIF & CIF 3419 FR = 30; W = 22; H = 18; BR = 6; MBPS = W * H * FR; break; 3420 case CodecProfileLevel.H263Level40: 3421 strict = true; // only supports sQCIF, QCIF & CIF 3422 FR = 30; W = 22; H = 18; BR = 32; MBPS = W * H * FR; break; 3423 case CodecProfileLevel.H263Level45: 3424 // only implies level 10 support 3425 strict = profileLevel.profile == CodecProfileLevel.H263ProfileBaseline 3426 || profileLevel.profile == 3427 CodecProfileLevel.H263ProfileBackwardCompatible; 3428 if (!strict) { 3429 minW = 1; minH = 1; minAlignment = 4; 3430 } 3431 FR = 15; W = 11; H = 9; BR = 2; MBPS = W * H * FR; break; 3432 case CodecProfileLevel.H263Level50: 3433 // only supports 50fps for H > 15 3434 minW = 1; minH = 1; minAlignment = 4; 3435 FR = 60; W = 22; H = 18; BR = 64; MBPS = W * H * 50; break; 3436 case CodecProfileLevel.H263Level60: 3437 // only supports 50fps for H > 15 3438 minW = 1; minH = 1; minAlignment = 4; 3439 FR = 60; W = 45; H = 18; BR = 128; MBPS = W * H * 50; break; 3440 case CodecProfileLevel.H263Level70: 3441 // only supports 50fps for H > 30 3442 minW = 1; minH = 1; minAlignment = 4; 3443 FR = 60; W = 45; H = 36; BR = 256; MBPS = W * H * 50; break; 3444 default: 3445 Log.w(TAG, "Unrecognized profile/level " + profileLevel.profile 3446 + "/" + profileLevel.level + " for " + mime); 3447 errors |= ERROR_UNRECOGNIZED; 3448 } 3449 switch (profileLevel.profile) { 3450 case CodecProfileLevel.H263ProfileBackwardCompatible: 3451 case CodecProfileLevel.H263ProfileBaseline: 3452 case CodecProfileLevel.H263ProfileH320Coding: 3453 case CodecProfileLevel.H263ProfileHighCompression: 3454 case CodecProfileLevel.H263ProfileHighLatency: 3455 case CodecProfileLevel.H263ProfileInterlace: 3456 case CodecProfileLevel.H263ProfileInternet: 3457 case CodecProfileLevel.H263ProfileISWV2: 3458 case CodecProfileLevel.H263ProfileISWV3: 3459 break; 3460 default: 3461 Log.w(TAG, "Unrecognized profile " 3462 + profileLevel.profile + " for " + mime); 3463 errors |= ERROR_UNRECOGNIZED; 3464 } 3465 if (strict) { 3466 // Strict levels define sub-QCIF min size and enumerated sizes. We cannot 3467 // express support for "only sQCIF & QCIF (& CIF)" using VideoCapabilities 3468 // but we can express "only QCIF (& CIF)", so set minimume size at QCIF. 3469 // minW = 8; minH = 6; 3470 minW = 11; minH = 9; 3471 } else { 3472 // any support for non-strict levels (including unrecognized profiles or 3473 // levels) allow custom frame size support beyond supported limits 3474 // (other than bitrate) 3475 mAllowMbOverride = true; 3476 } 3477 errors &= ~ERROR_NONE_SUPPORTED; 3478 maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond); 3479 maxBlocks = Math.max(W * H, maxBlocks); 3480 maxBps = Math.max(BR * 64000, maxBps); 3481 maxWidth = Math.max(W, maxWidth); 3482 maxHeight = Math.max(H, maxHeight); 3483 maxRate = Math.max(FR, maxRate); 3484 minWidth = Math.min(minW, minWidth); 3485 minHeight = Math.min(minH, minHeight); 3486 } 3487 // unless we encountered custom frame size support, limit size to QCIF and CIF 3488 // using aspect ratio. 3489 if (!mAllowMbOverride) { 3490 mBlockAspectRatioRange = 3491 Range.create(new Rational(11, 9), new Rational(11, 9)); 3492 } 3493 applyMacroBlockLimits( 3494 minWidth, minHeight, 3495 maxWidth, maxHeight, 3496 maxBlocks, maxBlocksPerSecond, 3497 16 /* blockWidth */, 16 /* blockHeight */, 3498 minAlignment /* widthAlignment */, minAlignment /* heightAlignment */); 3499 mFrameRateRange = Range.create(1, maxRate); 3500 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_VP8)) { 3501 maxBlocks = Integer.MAX_VALUE; 3502 maxBlocksPerSecond = Integer.MAX_VALUE; 3503 3504 // TODO: set to 100Mbps for now, need a number for VP8 3505 maxBps = 100000000; 3506 3507 // profile levels are not indicative for VPx, but verify 3508 // them nonetheless 3509 for (CodecProfileLevel profileLevel: profileLevels) { 3510 switch (profileLevel.level) { 3511 case CodecProfileLevel.VP8Level_Version0: 3512 case CodecProfileLevel.VP8Level_Version1: 3513 case CodecProfileLevel.VP8Level_Version2: 3514 case CodecProfileLevel.VP8Level_Version3: 3515 break; 3516 default: 3517 Log.w(TAG, "Unrecognized level " 3518 + profileLevel.level + " for " + mime); 3519 errors |= ERROR_UNRECOGNIZED; 3520 } 3521 switch (profileLevel.profile) { 3522 case CodecProfileLevel.VP8ProfileMain: 3523 break; 3524 default: 3525 Log.w(TAG, "Unrecognized profile " 3526 + profileLevel.profile + " for " + mime); 3527 errors |= ERROR_UNRECOGNIZED; 3528 } 3529 errors &= ~ERROR_NONE_SUPPORTED; 3530 } 3531 3532 final int blockSize = 16; 3533 applyMacroBlockLimits(Short.MAX_VALUE, Short.MAX_VALUE, 3534 maxBlocks, maxBlocksPerSecond, blockSize, blockSize, 3535 1 /* widthAlignment */, 1 /* heightAlignment */); 3536 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_VP9)) { 3537 maxBlocksPerSecond = 829440; 3538 maxBlocks = 36864; 3539 maxBps = 200000; 3540 int maxDim = 512; 3541 3542 for (CodecProfileLevel profileLevel: profileLevels) { 3543 long SR = 0; // luma sample rate 3544 int FS = 0; // luma picture size 3545 int BR = 0; // bit rate kbps 3546 int D = 0; // luma dimension 3547 switch (profileLevel.level) { 3548 case CodecProfileLevel.VP9Level1: 3549 SR = 829440; FS = 36864; BR = 200; D = 512; break; 3550 case CodecProfileLevel.VP9Level11: 3551 SR = 2764800; FS = 73728; BR = 800; D = 768; break; 3552 case CodecProfileLevel.VP9Level2: 3553 SR = 4608000; FS = 122880; BR = 1800; D = 960; break; 3554 case CodecProfileLevel.VP9Level21: 3555 SR = 9216000; FS = 245760; BR = 3600; D = 1344; break; 3556 case CodecProfileLevel.VP9Level3: 3557 SR = 20736000; FS = 552960; BR = 7200; D = 2048; break; 3558 case CodecProfileLevel.VP9Level31: 3559 SR = 36864000; FS = 983040; BR = 12000; D = 2752; break; 3560 case CodecProfileLevel.VP9Level4: 3561 SR = 83558400; FS = 2228224; BR = 18000; D = 4160; break; 3562 case CodecProfileLevel.VP9Level41: 3563 SR = 160432128; FS = 2228224; BR = 30000; D = 4160; break; 3564 case CodecProfileLevel.VP9Level5: 3565 SR = 311951360; FS = 8912896; BR = 60000; D = 8384; break; 3566 case CodecProfileLevel.VP9Level51: 3567 SR = 588251136; FS = 8912896; BR = 120000; D = 8384; break; 3568 case CodecProfileLevel.VP9Level52: 3569 SR = 1176502272; FS = 8912896; BR = 180000; D = 8384; break; 3570 case CodecProfileLevel.VP9Level6: 3571 SR = 1176502272; FS = 35651584; BR = 180000; D = 16832; break; 3572 case CodecProfileLevel.VP9Level61: 3573 SR = 2353004544L; FS = 35651584; BR = 240000; D = 16832; break; 3574 case CodecProfileLevel.VP9Level62: 3575 SR = 4706009088L; FS = 35651584; BR = 480000; D = 16832; break; 3576 default: 3577 Log.w(TAG, "Unrecognized level " 3578 + profileLevel.level + " for " + mime); 3579 errors |= ERROR_UNRECOGNIZED; 3580 } 3581 switch (profileLevel.profile) { 3582 case CodecProfileLevel.VP9Profile0: 3583 case CodecProfileLevel.VP9Profile1: 3584 case CodecProfileLevel.VP9Profile2: 3585 case CodecProfileLevel.VP9Profile3: 3586 case CodecProfileLevel.VP9Profile2HDR: 3587 case CodecProfileLevel.VP9Profile3HDR: 3588 case CodecProfileLevel.VP9Profile2HDR10Plus: 3589 case CodecProfileLevel.VP9Profile3HDR10Plus: 3590 break; 3591 default: 3592 Log.w(TAG, "Unrecognized profile " 3593 + profileLevel.profile + " for " + mime); 3594 errors |= ERROR_UNRECOGNIZED; 3595 } 3596 errors &= ~ERROR_NONE_SUPPORTED; 3597 maxBlocksPerSecond = Math.max(SR, maxBlocksPerSecond); 3598 maxBlocks = Math.max(FS, maxBlocks); 3599 maxBps = Math.max(BR * 1000, maxBps); 3600 maxDim = Math.max(D, maxDim); 3601 } 3602 3603 final int blockSize = 8; 3604 int maxLengthInBlocks = Utils.divUp(maxDim, blockSize); 3605 maxBlocks = Utils.divUp(maxBlocks, blockSize * blockSize); 3606 maxBlocksPerSecond = Utils.divUp(maxBlocksPerSecond, blockSize * blockSize); 3607 3608 applyMacroBlockLimits( 3609 maxLengthInBlocks, maxLengthInBlocks, 3610 maxBlocks, maxBlocksPerSecond, 3611 blockSize, blockSize, 3612 1 /* widthAlignment */, 1 /* heightAlignment */); 3613 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_HEVC)) { 3614 // CTBs are at least 8x8 so use 8x8 block size 3615 maxBlocks = 36864 >> 6; // 192x192 pixels == 576 8x8 blocks 3616 maxBlocksPerSecond = maxBlocks * 15; 3617 maxBps = 128000; 3618 for (CodecProfileLevel profileLevel: profileLevels) { 3619 double FR = 0; 3620 int FS = 0; 3621 int BR = 0; 3622 switch (profileLevel.level) { 3623 /* The HEVC spec talks only in a very convoluted manner about the 3624 existence of levels 1-3.1 for High tier, which could also be 3625 understood as 'decoders and encoders should treat these levels 3626 as if they were Main tier', so we do that. */ 3627 case CodecProfileLevel.HEVCMainTierLevel1: 3628 case CodecProfileLevel.HEVCHighTierLevel1: 3629 FR = 15; FS = 36864; BR = 128; break; 3630 case CodecProfileLevel.HEVCMainTierLevel2: 3631 case CodecProfileLevel.HEVCHighTierLevel2: 3632 FR = 30; FS = 122880; BR = 1500; break; 3633 case CodecProfileLevel.HEVCMainTierLevel21: 3634 case CodecProfileLevel.HEVCHighTierLevel21: 3635 FR = 30; FS = 245760; BR = 3000; break; 3636 case CodecProfileLevel.HEVCMainTierLevel3: 3637 case CodecProfileLevel.HEVCHighTierLevel3: 3638 FR = 30; FS = 552960; BR = 6000; break; 3639 case CodecProfileLevel.HEVCMainTierLevel31: 3640 case CodecProfileLevel.HEVCHighTierLevel31: 3641 FR = 33.75; FS = 983040; BR = 10000; break; 3642 case CodecProfileLevel.HEVCMainTierLevel4: 3643 FR = 30; FS = 2228224; BR = 12000; break; 3644 case CodecProfileLevel.HEVCHighTierLevel4: 3645 FR = 30; FS = 2228224; BR = 30000; break; 3646 case CodecProfileLevel.HEVCMainTierLevel41: 3647 FR = 60; FS = 2228224; BR = 20000; break; 3648 case CodecProfileLevel.HEVCHighTierLevel41: 3649 FR = 60; FS = 2228224; BR = 50000; break; 3650 case CodecProfileLevel.HEVCMainTierLevel5: 3651 FR = 30; FS = 8912896; BR = 25000; break; 3652 case CodecProfileLevel.HEVCHighTierLevel5: 3653 FR = 30; FS = 8912896; BR = 100000; break; 3654 case CodecProfileLevel.HEVCMainTierLevel51: 3655 FR = 60; FS = 8912896; BR = 40000; break; 3656 case CodecProfileLevel.HEVCHighTierLevel51: 3657 FR = 60; FS = 8912896; BR = 160000; break; 3658 case CodecProfileLevel.HEVCMainTierLevel52: 3659 FR = 120; FS = 8912896; BR = 60000; break; 3660 case CodecProfileLevel.HEVCHighTierLevel52: 3661 FR = 120; FS = 8912896; BR = 240000; break; 3662 case CodecProfileLevel.HEVCMainTierLevel6: 3663 FR = 30; FS = 35651584; BR = 60000; break; 3664 case CodecProfileLevel.HEVCHighTierLevel6: 3665 FR = 30; FS = 35651584; BR = 240000; break; 3666 case CodecProfileLevel.HEVCMainTierLevel61: 3667 FR = 60; FS = 35651584; BR = 120000; break; 3668 case CodecProfileLevel.HEVCHighTierLevel61: 3669 FR = 60; FS = 35651584; BR = 480000; break; 3670 case CodecProfileLevel.HEVCMainTierLevel62: 3671 FR = 120; FS = 35651584; BR = 240000; break; 3672 case CodecProfileLevel.HEVCHighTierLevel62: 3673 FR = 120; FS = 35651584; BR = 800000; break; 3674 default: 3675 Log.w(TAG, "Unrecognized level " 3676 + profileLevel.level + " for " + mime); 3677 errors |= ERROR_UNRECOGNIZED; 3678 } 3679 switch (profileLevel.profile) { 3680 case CodecProfileLevel.HEVCProfileMain: 3681 case CodecProfileLevel.HEVCProfileMain10: 3682 case CodecProfileLevel.HEVCProfileMainStill: 3683 case CodecProfileLevel.HEVCProfileMain10HDR10: 3684 case CodecProfileLevel.HEVCProfileMain10HDR10Plus: 3685 break; 3686 default: 3687 Log.w(TAG, "Unrecognized profile " 3688 + profileLevel.profile + " for " + mime); 3689 errors |= ERROR_UNRECOGNIZED; 3690 } 3691 3692 /* DPB logic: 3693 if (width * height <= FS / 4) DPB = 16; 3694 else if (width * height <= FS / 2) DPB = 12; 3695 else if (width * height <= FS * 0.75) DPB = 8; 3696 else DPB = 6; 3697 */ 3698 3699 FS >>= 6; // convert pixels to blocks 3700 errors &= ~ERROR_NONE_SUPPORTED; 3701 maxBlocksPerSecond = Math.max((int)(FR * FS), maxBlocksPerSecond); 3702 maxBlocks = Math.max(FS, maxBlocks); 3703 maxBps = Math.max(BR * 1000, maxBps); 3704 } 3705 3706 int maxLengthInBlocks = (int)(Math.sqrt(maxBlocks * 8)); 3707 applyMacroBlockLimits( 3708 maxLengthInBlocks, maxLengthInBlocks, 3709 maxBlocks, maxBlocksPerSecond, 3710 8 /* blockWidth */, 8 /* blockHeight */, 3711 1 /* widthAlignment */, 1 /* heightAlignment */); 3712 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_AV1)) { 3713 maxBlocksPerSecond = 829440; 3714 maxBlocks = 36864; 3715 maxBps = 200000; 3716 int maxDim = 512; 3717 3718 // Sample rate, Picture Size, Bit rate and luma dimension for AV1 Codec, 3719 // corresponding to the definitions in 3720 // "AV1 Bitstream & Decoding Process Specification", Annex A 3721 // found at https://aomedia.org/av1-bitstream-and-decoding-process-specification/ 3722 for (CodecProfileLevel profileLevel: profileLevels) { 3723 long SR = 0; // luma sample rate 3724 int FS = 0; // luma picture size 3725 int BR = 0; // bit rate kbps 3726 int D = 0; // luma D 3727 switch (profileLevel.level) { 3728 case CodecProfileLevel.AV1Level2: 3729 SR = 5529600; FS = 147456; BR = 1500; D = 2048; break; 3730 case CodecProfileLevel.AV1Level21: 3731 case CodecProfileLevel.AV1Level22: 3732 case CodecProfileLevel.AV1Level23: 3733 SR = 10454400; FS = 278784; BR = 3000; D = 2816; break; 3734 3735 case CodecProfileLevel.AV1Level3: 3736 SR = 24969600; FS = 665856; BR = 6000; D = 4352; break; 3737 case CodecProfileLevel.AV1Level31: 3738 case CodecProfileLevel.AV1Level32: 3739 case CodecProfileLevel.AV1Level33: 3740 SR = 39938400; FS = 1065024; BR = 10000; D = 5504; break; 3741 3742 case CodecProfileLevel.AV1Level4: 3743 SR = 77856768; FS = 2359296; BR = 12000; D = 6144; break; 3744 case CodecProfileLevel.AV1Level41: 3745 case CodecProfileLevel.AV1Level42: 3746 case CodecProfileLevel.AV1Level43: 3747 SR = 155713536; FS = 2359296; BR = 20000; D = 6144; break; 3748 3749 case CodecProfileLevel.AV1Level5: 3750 SR = 273715200; FS = 8912896; BR = 30000; D = 8192; break; 3751 case CodecProfileLevel.AV1Level51: 3752 SR = 547430400; FS = 8912896; BR = 40000; D = 8192; break; 3753 case CodecProfileLevel.AV1Level52: 3754 SR = 1094860800; FS = 8912896; BR = 60000; D = 8192; break; 3755 case CodecProfileLevel.AV1Level53: 3756 SR = 1176502272; FS = 8912896; BR = 60000; D = 8192; break; 3757 3758 case CodecProfileLevel.AV1Level6: 3759 SR = 1176502272; FS = 35651584; BR = 60000; D = 16384; break; 3760 case CodecProfileLevel.AV1Level61: 3761 SR = 2189721600L; FS = 35651584; BR = 100000; D = 16384; break; 3762 case CodecProfileLevel.AV1Level62: 3763 SR = 4379443200L; FS = 35651584; BR = 160000; D = 16384; break; 3764 case CodecProfileLevel.AV1Level63: 3765 SR = 4706009088L; FS = 35651584; BR = 160000; D = 16384; break; 3766 3767 default: 3768 Log.w(TAG, "Unrecognized level " 3769 + profileLevel.level + " for " + mime); 3770 errors |= ERROR_UNRECOGNIZED; 3771 } 3772 switch (profileLevel.profile) { 3773 case CodecProfileLevel.AV1ProfileMain8: 3774 case CodecProfileLevel.AV1ProfileMain10: 3775 case CodecProfileLevel.AV1ProfileMain10HDR10: 3776 case CodecProfileLevel.AV1ProfileMain10HDR10Plus: 3777 break; 3778 default: 3779 Log.w(TAG, "Unrecognized profile " 3780 + profileLevel.profile + " for " + mime); 3781 errors |= ERROR_UNRECOGNIZED; 3782 } 3783 errors &= ~ERROR_NONE_SUPPORTED; 3784 maxBlocksPerSecond = Math.max(SR, maxBlocksPerSecond); 3785 maxBlocks = Math.max(FS, maxBlocks); 3786 maxBps = Math.max(BR * 1000, maxBps); 3787 maxDim = Math.max(D, maxDim); 3788 } 3789 3790 final int blockSize = 8; 3791 int maxLengthInBlocks = Utils.divUp(maxDim, blockSize); 3792 maxBlocks = Utils.divUp(maxBlocks, blockSize * blockSize); 3793 maxBlocksPerSecond = Utils.divUp(maxBlocksPerSecond, blockSize * blockSize); 3794 applyMacroBlockLimits( 3795 maxLengthInBlocks, maxLengthInBlocks, 3796 maxBlocks, maxBlocksPerSecond, 3797 blockSize, blockSize, 3798 1 /* widthAlignment */, 1 /* heightAlignment */); 3799 } else { 3800 Log.w(TAG, "Unsupported mime " + mime); 3801 // using minimal bitrate here. should be overriden by 3802 // info from media_codecs.xml 3803 maxBps = 64000; 3804 errors |= ERROR_UNSUPPORTED; 3805 } 3806 mBitrateRange = Range.create(1, maxBps); 3807 mParent.mError |= errors; 3808 } 3809 } 3810 3811 /** 3812 * A class that supports querying the encoding capabilities of a codec. 3813 */ 3814 public static final class EncoderCapabilities { 3815 /** 3816 * Returns the supported range of quality values. 3817 * 3818 * Quality is implementation-specific. As a general rule, a higher quality 3819 * setting results in a better image quality and a lower compression ratio. 3820 */ 3821 public Range<Integer> getQualityRange() { 3822 return mQualityRange; 3823 } 3824 3825 /** 3826 * Returns the supported range of encoder complexity values. 3827 * <p> 3828 * Some codecs may support multiple complexity levels, where higher 3829 * complexity values use more encoder tools (e.g. perform more 3830 * intensive calculations) to improve the quality or the compression 3831 * ratio. Use a lower value to save power and/or time. 3832 */ 3833 public Range<Integer> getComplexityRange() { 3834 return mComplexityRange; 3835 } 3836 3837 /** Constant quality mode */ 3838 public static final int BITRATE_MODE_CQ = 0; 3839 /** Variable bitrate mode */ 3840 public static final int BITRATE_MODE_VBR = 1; 3841 /** Constant bitrate mode */ 3842 public static final int BITRATE_MODE_CBR = 2; 3843 /** Constant bitrate mode with frame drops */ 3844 public static final int BITRATE_MODE_CBR_FD = 3; 3845 3846 private static final Feature[] bitrates = new Feature[] { 3847 new Feature("VBR", BITRATE_MODE_VBR, true), 3848 new Feature("CBR", BITRATE_MODE_CBR, false), 3849 new Feature("CQ", BITRATE_MODE_CQ, false), 3850 new Feature("CBR-FD", BITRATE_MODE_CBR_FD, false) 3851 }; 3852 3853 private static int parseBitrateMode(String mode) { 3854 for (Feature feat: bitrates) { 3855 if (feat.mName.equalsIgnoreCase(mode)) { 3856 return feat.mValue; 3857 } 3858 } 3859 return 0; 3860 } 3861 3862 /** 3863 * Query whether a bitrate mode is supported. 3864 */ 3865 public boolean isBitrateModeSupported(int mode) { 3866 for (Feature feat: bitrates) { 3867 if (mode == feat.mValue) { 3868 return (mBitControl & (1 << mode)) != 0; 3869 } 3870 } 3871 return false; 3872 } 3873 3874 private Range<Integer> mQualityRange; 3875 private Range<Integer> mComplexityRange; 3876 private CodecCapabilities mParent; 3877 3878 /* no public constructor */ 3879 private EncoderCapabilities() { } 3880 3881 /** @hide */ 3882 public static EncoderCapabilities create( 3883 MediaFormat info, CodecCapabilities parent) { 3884 EncoderCapabilities caps = new EncoderCapabilities(); 3885 caps.init(info, parent); 3886 return caps; 3887 } 3888 3889 private void init(MediaFormat info, CodecCapabilities parent) { 3890 // no support for complexity or quality yet 3891 mParent = parent; 3892 mComplexityRange = Range.create(0, 0); 3893 mQualityRange = Range.create(0, 0); 3894 mBitControl = (1 << BITRATE_MODE_VBR); 3895 3896 applyLevelLimits(); 3897 parseFromInfo(info); 3898 } 3899 3900 private void applyLevelLimits() { 3901 String mime = mParent.getMimeType(); 3902 if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_FLAC)) { 3903 mComplexityRange = Range.create(0, 8); 3904 mBitControl = (1 << BITRATE_MODE_CQ); 3905 } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AMR_NB) 3906 || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AMR_WB) 3907 || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_G711_ALAW) 3908 || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_G711_MLAW) 3909 || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_MSGSM)) { 3910 mBitControl = (1 << BITRATE_MODE_CBR); 3911 } 3912 } 3913 3914 private int mBitControl; 3915 private Integer mDefaultComplexity; 3916 private Integer mDefaultQuality; 3917 private String mQualityScale; 3918 3919 private void parseFromInfo(MediaFormat info) { 3920 Map<String, Object> map = info.getMap(); 3921 3922 if (info.containsKey("complexity-range")) { 3923 mComplexityRange = Utils 3924 .parseIntRange(info.getString("complexity-range"), mComplexityRange); 3925 // TODO should we limit this to level limits? 3926 } 3927 if (info.containsKey("quality-range")) { 3928 mQualityRange = Utils 3929 .parseIntRange(info.getString("quality-range"), mQualityRange); 3930 } 3931 if (info.containsKey("feature-bitrate-modes")) { 3932 mBitControl = 0; 3933 for (String mode: info.getString("feature-bitrate-modes").split(",")) { 3934 mBitControl |= (1 << parseBitrateMode(mode)); 3935 } 3936 } 3937 3938 try { 3939 mDefaultComplexity = Integer.parseInt((String)map.get("complexity-default")); 3940 } catch (NumberFormatException e) { } 3941 3942 try { 3943 mDefaultQuality = Integer.parseInt((String)map.get("quality-default")); 3944 } catch (NumberFormatException e) { } 3945 3946 mQualityScale = (String)map.get("quality-scale"); 3947 } 3948 3949 private boolean supports( 3950 Integer complexity, Integer quality, Integer profile) { 3951 boolean ok = true; 3952 if (ok && complexity != null) { 3953 ok = mComplexityRange.contains(complexity); 3954 } 3955 if (ok && quality != null) { 3956 ok = mQualityRange.contains(quality); 3957 } 3958 if (ok && profile != null) { 3959 for (CodecProfileLevel pl: mParent.profileLevels) { 3960 if (pl.profile == profile) { 3961 profile = null; 3962 break; 3963 } 3964 } 3965 ok = profile == null; 3966 } 3967 return ok; 3968 } 3969 3970 /** @hide */ 3971 public void getDefaultFormat(MediaFormat format) { 3972 // don't list trivial quality/complexity as default for now 3973 if (!mQualityRange.getUpper().equals(mQualityRange.getLower()) 3974 && mDefaultQuality != null) { 3975 format.setInteger(MediaFormat.KEY_QUALITY, mDefaultQuality); 3976 } 3977 if (!mComplexityRange.getUpper().equals(mComplexityRange.getLower()) 3978 && mDefaultComplexity != null) { 3979 format.setInteger(MediaFormat.KEY_COMPLEXITY, mDefaultComplexity); 3980 } 3981 // bitrates are listed in order of preference 3982 for (Feature feat: bitrates) { 3983 if ((mBitControl & (1 << feat.mValue)) != 0) { 3984 format.setInteger(MediaFormat.KEY_BITRATE_MODE, feat.mValue); 3985 break; 3986 } 3987 } 3988 } 3989 3990 /** @hide */ 3991 public boolean supportsFormat(MediaFormat format) { 3992 final Map<String, Object> map = format.getMap(); 3993 final String mime = mParent.getMimeType(); 3994 3995 Integer mode = (Integer)map.get(MediaFormat.KEY_BITRATE_MODE); 3996 if (mode != null && !isBitrateModeSupported(mode)) { 3997 return false; 3998 } 3999 4000 Integer complexity = (Integer)map.get(MediaFormat.KEY_COMPLEXITY); 4001 if (MediaFormat.MIMETYPE_AUDIO_FLAC.equalsIgnoreCase(mime)) { 4002 Integer flacComplexity = 4003 (Integer)map.get(MediaFormat.KEY_FLAC_COMPRESSION_LEVEL); 4004 if (complexity == null) { 4005 complexity = flacComplexity; 4006 } else if (flacComplexity != null && !complexity.equals(flacComplexity)) { 4007 throw new IllegalArgumentException( 4008 "conflicting values for complexity and " + 4009 "flac-compression-level"); 4010 } 4011 } 4012 4013 // other audio parameters 4014 Integer profile = (Integer)map.get(MediaFormat.KEY_PROFILE); 4015 if (MediaFormat.MIMETYPE_AUDIO_AAC.equalsIgnoreCase(mime)) { 4016 Integer aacProfile = (Integer)map.get(MediaFormat.KEY_AAC_PROFILE); 4017 if (profile == null) { 4018 profile = aacProfile; 4019 } else if (aacProfile != null && !aacProfile.equals(profile)) { 4020 throw new IllegalArgumentException( 4021 "conflicting values for profile and aac-profile"); 4022 } 4023 } 4024 4025 Integer quality = (Integer)map.get(MediaFormat.KEY_QUALITY); 4026 4027 return supports(complexity, quality, profile); 4028 } 4029 }; 4030 4031 /** 4032 * Encapsulates the profiles available for a codec component. 4033 * <p>You can get a set of {@link MediaCodecInfo.CodecProfileLevel} objects for a given 4034 * {@link MediaCodecInfo} object from the 4035 * {@link MediaCodecInfo.CodecCapabilities#profileLevels} field. 4036 */ 4037 public static final class CodecProfileLevel { 4038 // These constants were originally in-line with OMX values, but this 4039 // correspondence is no longer maintained. 4040 4041 // Profiles and levels for AVC Codec, corresponding to the definitions in 4042 // "SERIES H: AUDIOVISUAL AND MULTIMEDIA SYSTEMS, 4043 // Infrastructure of audiovisual services – Coding of moving video 4044 // Advanced video coding for generic audiovisual services" 4045 // found at 4046 // https://www.itu.int/rec/T-REC-H.264-201704-I 4047 4048 /** 4049 * AVC Baseline profile. 4050 * See definition in 4051 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4052 * Annex A. 4053 */ 4054 public static final int AVCProfileBaseline = 0x01; 4055 4056 /** 4057 * AVC Main profile. 4058 * See definition in 4059 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4060 * Annex A. 4061 */ 4062 public static final int AVCProfileMain = 0x02; 4063 4064 /** 4065 * AVC Extended profile. 4066 * See definition in 4067 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4068 * Annex A. 4069 */ 4070 public static final int AVCProfileExtended = 0x04; 4071 4072 /** 4073 * AVC High profile. 4074 * See definition in 4075 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4076 * Annex A. 4077 */ 4078 public static final int AVCProfileHigh = 0x08; 4079 4080 /** 4081 * AVC High 10 profile. 4082 * See definition in 4083 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4084 * Annex A. 4085 */ 4086 public static final int AVCProfileHigh10 = 0x10; 4087 4088 /** 4089 * AVC High 4:2:2 profile. 4090 * See definition in 4091 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4092 * Annex A. 4093 */ 4094 public static final int AVCProfileHigh422 = 0x20; 4095 4096 /** 4097 * AVC High 4:4:4 profile. 4098 * See definition in 4099 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4100 * Annex A. 4101 */ 4102 public static final int AVCProfileHigh444 = 0x40; 4103 4104 /** 4105 * AVC Constrained Baseline profile. 4106 * See definition in 4107 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4108 * Annex A. 4109 */ 4110 public static final int AVCProfileConstrainedBaseline = 0x10000; 4111 4112 /** 4113 * AVC Constrained High profile. 4114 * See definition in 4115 * <a href="https://www.itu.int/rec/T-REC-H.264-201704-I">H.264 recommendation</a>, 4116 * Annex A. 4117 */ 4118 public static final int AVCProfileConstrainedHigh = 0x80000; 4119 4120 public static final int AVCLevel1 = 0x01; 4121 public static final int AVCLevel1b = 0x02; 4122 public static final int AVCLevel11 = 0x04; 4123 public static final int AVCLevel12 = 0x08; 4124 public static final int AVCLevel13 = 0x10; 4125 public static final int AVCLevel2 = 0x20; 4126 public static final int AVCLevel21 = 0x40; 4127 public static final int AVCLevel22 = 0x80; 4128 public static final int AVCLevel3 = 0x100; 4129 public static final int AVCLevel31 = 0x200; 4130 public static final int AVCLevel32 = 0x400; 4131 public static final int AVCLevel4 = 0x800; 4132 public static final int AVCLevel41 = 0x1000; 4133 public static final int AVCLevel42 = 0x2000; 4134 public static final int AVCLevel5 = 0x4000; 4135 public static final int AVCLevel51 = 0x8000; 4136 public static final int AVCLevel52 = 0x10000; 4137 public static final int AVCLevel6 = 0x20000; 4138 public static final int AVCLevel61 = 0x40000; 4139 public static final int AVCLevel62 = 0x80000; 4140 4141 public static final int H263ProfileBaseline = 0x01; 4142 public static final int H263ProfileH320Coding = 0x02; 4143 public static final int H263ProfileBackwardCompatible = 0x04; 4144 public static final int H263ProfileISWV2 = 0x08; 4145 public static final int H263ProfileISWV3 = 0x10; 4146 public static final int H263ProfileHighCompression = 0x20; 4147 public static final int H263ProfileInternet = 0x40; 4148 public static final int H263ProfileInterlace = 0x80; 4149 public static final int H263ProfileHighLatency = 0x100; 4150 4151 public static final int H263Level10 = 0x01; 4152 public static final int H263Level20 = 0x02; 4153 public static final int H263Level30 = 0x04; 4154 public static final int H263Level40 = 0x08; 4155 public static final int H263Level45 = 0x10; 4156 public static final int H263Level50 = 0x20; 4157 public static final int H263Level60 = 0x40; 4158 public static final int H263Level70 = 0x80; 4159 4160 public static final int MPEG4ProfileSimple = 0x01; 4161 public static final int MPEG4ProfileSimpleScalable = 0x02; 4162 public static final int MPEG4ProfileCore = 0x04; 4163 public static final int MPEG4ProfileMain = 0x08; 4164 public static final int MPEG4ProfileNbit = 0x10; 4165 public static final int MPEG4ProfileScalableTexture = 0x20; 4166 public static final int MPEG4ProfileSimpleFace = 0x40; 4167 public static final int MPEG4ProfileSimpleFBA = 0x80; 4168 public static final int MPEG4ProfileBasicAnimated = 0x100; 4169 public static final int MPEG4ProfileHybrid = 0x200; 4170 public static final int MPEG4ProfileAdvancedRealTime = 0x400; 4171 public static final int MPEG4ProfileCoreScalable = 0x800; 4172 public static final int MPEG4ProfileAdvancedCoding = 0x1000; 4173 public static final int MPEG4ProfileAdvancedCore = 0x2000; 4174 public static final int MPEG4ProfileAdvancedScalable = 0x4000; 4175 public static final int MPEG4ProfileAdvancedSimple = 0x8000; 4176 4177 public static final int MPEG4Level0 = 0x01; 4178 public static final int MPEG4Level0b = 0x02; 4179 public static final int MPEG4Level1 = 0x04; 4180 public static final int MPEG4Level2 = 0x08; 4181 public static final int MPEG4Level3 = 0x10; 4182 public static final int MPEG4Level3b = 0x18; 4183 public static final int MPEG4Level4 = 0x20; 4184 public static final int MPEG4Level4a = 0x40; 4185 public static final int MPEG4Level5 = 0x80; 4186 public static final int MPEG4Level6 = 0x100; 4187 4188 public static final int MPEG2ProfileSimple = 0x00; 4189 public static final int MPEG2ProfileMain = 0x01; 4190 public static final int MPEG2Profile422 = 0x02; 4191 public static final int MPEG2ProfileSNR = 0x03; 4192 public static final int MPEG2ProfileSpatial = 0x04; 4193 public static final int MPEG2ProfileHigh = 0x05; 4194 4195 public static final int MPEG2LevelLL = 0x00; 4196 public static final int MPEG2LevelML = 0x01; 4197 public static final int MPEG2LevelH14 = 0x02; 4198 public static final int MPEG2LevelHL = 0x03; 4199 public static final int MPEG2LevelHP = 0x04; 4200 4201 public static final int AACObjectMain = 1; 4202 public static final int AACObjectLC = 2; 4203 public static final int AACObjectSSR = 3; 4204 public static final int AACObjectLTP = 4; 4205 public static final int AACObjectHE = 5; 4206 public static final int AACObjectScalable = 6; 4207 public static final int AACObjectERLC = 17; 4208 public static final int AACObjectERScalable = 20; 4209 public static final int AACObjectLD = 23; 4210 public static final int AACObjectHE_PS = 29; 4211 public static final int AACObjectELD = 39; 4212 /** xHE-AAC (includes USAC) */ 4213 public static final int AACObjectXHE = 42; 4214 4215 public static final int VP8Level_Version0 = 0x01; 4216 public static final int VP8Level_Version1 = 0x02; 4217 public static final int VP8Level_Version2 = 0x04; 4218 public static final int VP8Level_Version3 = 0x08; 4219 4220 public static final int VP8ProfileMain = 0x01; 4221 4222 /** VP9 Profile 0 4:2:0 8-bit */ 4223 public static final int VP9Profile0 = 0x01; 4224 4225 /** VP9 Profile 1 4:2:2 8-bit */ 4226 public static final int VP9Profile1 = 0x02; 4227 4228 /** VP9 Profile 2 4:2:0 10-bit */ 4229 public static final int VP9Profile2 = 0x04; 4230 4231 /** VP9 Profile 3 4:2:2 10-bit */ 4232 public static final int VP9Profile3 = 0x08; 4233 4234 // HDR profiles also support passing HDR metadata 4235 /** VP9 Profile 2 4:2:0 10-bit HDR */ 4236 public static final int VP9Profile2HDR = 0x1000; 4237 4238 /** VP9 Profile 3 4:2:2 10-bit HDR */ 4239 public static final int VP9Profile3HDR = 0x2000; 4240 4241 /** VP9 Profile 2 4:2:0 10-bit HDR10Plus */ 4242 public static final int VP9Profile2HDR10Plus = 0x4000; 4243 4244 /** VP9 Profile 3 4:2:2 10-bit HDR10Plus */ 4245 public static final int VP9Profile3HDR10Plus = 0x8000; 4246 4247 public static final int VP9Level1 = 0x1; 4248 public static final int VP9Level11 = 0x2; 4249 public static final int VP9Level2 = 0x4; 4250 public static final int VP9Level21 = 0x8; 4251 public static final int VP9Level3 = 0x10; 4252 public static final int VP9Level31 = 0x20; 4253 public static final int VP9Level4 = 0x40; 4254 public static final int VP9Level41 = 0x80; 4255 public static final int VP9Level5 = 0x100; 4256 public static final int VP9Level51 = 0x200; 4257 public static final int VP9Level52 = 0x400; 4258 public static final int VP9Level6 = 0x800; 4259 public static final int VP9Level61 = 0x1000; 4260 public static final int VP9Level62 = 0x2000; 4261 4262 public static final int HEVCProfileMain = 0x01; 4263 public static final int HEVCProfileMain10 = 0x02; 4264 public static final int HEVCProfileMainStill = 0x04; 4265 public static final int HEVCProfileMain10HDR10 = 0x1000; 4266 public static final int HEVCProfileMain10HDR10Plus = 0x2000; 4267 4268 public static final int HEVCMainTierLevel1 = 0x1; 4269 public static final int HEVCHighTierLevel1 = 0x2; 4270 public static final int HEVCMainTierLevel2 = 0x4; 4271 public static final int HEVCHighTierLevel2 = 0x8; 4272 public static final int HEVCMainTierLevel21 = 0x10; 4273 public static final int HEVCHighTierLevel21 = 0x20; 4274 public static final int HEVCMainTierLevel3 = 0x40; 4275 public static final int HEVCHighTierLevel3 = 0x80; 4276 public static final int HEVCMainTierLevel31 = 0x100; 4277 public static final int HEVCHighTierLevel31 = 0x200; 4278 public static final int HEVCMainTierLevel4 = 0x400; 4279 public static final int HEVCHighTierLevel4 = 0x800; 4280 public static final int HEVCMainTierLevel41 = 0x1000; 4281 public static final int HEVCHighTierLevel41 = 0x2000; 4282 public static final int HEVCMainTierLevel5 = 0x4000; 4283 public static final int HEVCHighTierLevel5 = 0x8000; 4284 public static final int HEVCMainTierLevel51 = 0x10000; 4285 public static final int HEVCHighTierLevel51 = 0x20000; 4286 public static final int HEVCMainTierLevel52 = 0x40000; 4287 public static final int HEVCHighTierLevel52 = 0x80000; 4288 public static final int HEVCMainTierLevel6 = 0x100000; 4289 public static final int HEVCHighTierLevel6 = 0x200000; 4290 public static final int HEVCMainTierLevel61 = 0x400000; 4291 public static final int HEVCHighTierLevel61 = 0x800000; 4292 public static final int HEVCMainTierLevel62 = 0x1000000; 4293 public static final int HEVCHighTierLevel62 = 0x2000000; 4294 4295 private static final int HEVCHighTierLevels = 4296 HEVCHighTierLevel1 | HEVCHighTierLevel2 | HEVCHighTierLevel21 | HEVCHighTierLevel3 | 4297 HEVCHighTierLevel31 | HEVCHighTierLevel4 | HEVCHighTierLevel41 | HEVCHighTierLevel5 | 4298 HEVCHighTierLevel51 | HEVCHighTierLevel52 | HEVCHighTierLevel6 | HEVCHighTierLevel61 | 4299 HEVCHighTierLevel62; 4300 4301 public static final int DolbyVisionProfileDvavPer = 0x1; 4302 public static final int DolbyVisionProfileDvavPen = 0x2; 4303 public static final int DolbyVisionProfileDvheDer = 0x4; 4304 public static final int DolbyVisionProfileDvheDen = 0x8; 4305 public static final int DolbyVisionProfileDvheDtr = 0x10; 4306 public static final int DolbyVisionProfileDvheStn = 0x20; 4307 public static final int DolbyVisionProfileDvheDth = 0x40; 4308 public static final int DolbyVisionProfileDvheDtb = 0x80; 4309 public static final int DolbyVisionProfileDvheSt = 0x100; 4310 public static final int DolbyVisionProfileDvavSe = 0x200; 4311 /** Dolby Vision AV1 profile */ 4312 @SuppressLint("AllUpper") 4313 public static final int DolbyVisionProfileDvav110 = 0x400; 4314 4315 public static final int DolbyVisionLevelHd24 = 0x1; 4316 public static final int DolbyVisionLevelHd30 = 0x2; 4317 public static final int DolbyVisionLevelFhd24 = 0x4; 4318 public static final int DolbyVisionLevelFhd30 = 0x8; 4319 public static final int DolbyVisionLevelFhd60 = 0x10; 4320 public static final int DolbyVisionLevelUhd24 = 0x20; 4321 public static final int DolbyVisionLevelUhd30 = 0x40; 4322 public static final int DolbyVisionLevelUhd48 = 0x80; 4323 public static final int DolbyVisionLevelUhd60 = 0x100; 4324 @SuppressLint("AllUpper") 4325 public static final int DolbyVisionLevelUhd120 = 0x200; 4326 @SuppressLint("AllUpper") 4327 public static final int DolbyVisionLevel8k30 = 0x400; 4328 @SuppressLint("AllUpper") 4329 public static final int DolbyVisionLevel8k60 = 0x800; 4330 4331 // Profiles and levels for AV1 Codec, corresponding to the definitions in 4332 // "AV1 Bitstream & Decoding Process Specification", Annex A 4333 // found at https://aomedia.org/av1-bitstream-and-decoding-process-specification/ 4334 4335 /** 4336 * AV1 Main profile 4:2:0 8-bit 4337 * 4338 * See definition in 4339 * <a href="https://aomedia.org/av1-bitstream-and-decoding-process-specification/">AV1 Specification</a> 4340 * Annex A. 4341 */ 4342 public static final int AV1ProfileMain8 = 0x1; 4343 4344 /** 4345 * AV1 Main profile 4:2:0 10-bit 4346 * 4347 * See definition in 4348 * <a href="https://aomedia.org/av1-bitstream-and-decoding-process-specification/">AV1 Specification</a> 4349 * Annex A. 4350 */ 4351 public static final int AV1ProfileMain10 = 0x2; 4352 4353 4354 /** AV1 Main profile 4:2:0 10-bit with HDR10. */ 4355 public static final int AV1ProfileMain10HDR10 = 0x1000; 4356 4357 /** AV1 Main profile 4:2:0 10-bit with HDR10Plus. */ 4358 public static final int AV1ProfileMain10HDR10Plus = 0x2000; 4359 4360 public static final int AV1Level2 = 0x1; 4361 public static final int AV1Level21 = 0x2; 4362 public static final int AV1Level22 = 0x4; 4363 public static final int AV1Level23 = 0x8; 4364 public static final int AV1Level3 = 0x10; 4365 public static final int AV1Level31 = 0x20; 4366 public static final int AV1Level32 = 0x40; 4367 public static final int AV1Level33 = 0x80; 4368 public static final int AV1Level4 = 0x100; 4369 public static final int AV1Level41 = 0x200; 4370 public static final int AV1Level42 = 0x400; 4371 public static final int AV1Level43 = 0x800; 4372 public static final int AV1Level5 = 0x1000; 4373 public static final int AV1Level51 = 0x2000; 4374 public static final int AV1Level52 = 0x4000; 4375 public static final int AV1Level53 = 0x8000; 4376 public static final int AV1Level6 = 0x10000; 4377 public static final int AV1Level61 = 0x20000; 4378 public static final int AV1Level62 = 0x40000; 4379 public static final int AV1Level63 = 0x80000; 4380 public static final int AV1Level7 = 0x100000; 4381 public static final int AV1Level71 = 0x200000; 4382 public static final int AV1Level72 = 0x400000; 4383 public static final int AV1Level73 = 0x800000; 4384 4385 /** DTS codec profile for DTS HRA. */ 4386 @SuppressLint("AllUpper") 4387 public static final int DTS_HDProfileHRA = 0x1; 4388 /** DTS codec profile for DTS Express. */ 4389 @SuppressLint("AllUpper") 4390 public static final int DTS_HDProfileLBR = 0x2; 4391 /** DTS codec profile for DTS-HD Master Audio */ 4392 @SuppressLint("AllUpper") 4393 public static final int DTS_HDProfileMA = 0x4; 4394 /** DTS codec profile for DTS:X Profile 1 */ 4395 @SuppressLint("AllUpper") 4396 public static final int DTS_UHDProfileP1 = 0x1; 4397 /** DTS codec profile for DTS:X Profile 2 */ 4398 @SuppressLint("AllUpper") 4399 public static final int DTS_UHDProfileP2 = 0x2; 4400 4401 // Profiles and levels for AC-4 Codec, corresponding to the definitions in 4402 // "The MIME codecs parameter", Annex E.13 4403 // found at https://www.etsi.org/deliver/etsi_ts/103100_103199/10319002/01.02.01_60/ts_10319002v010201p.pdf 4404 // profile = ((1 << bitstream_version) << 8) | (1 << presentation_version); 4405 // level = 1 << mdcompat; 4406 4407 @SuppressLint("AllUpper") 4408 private static final int AC4BitstreamVersion0 = 0x01; 4409 @SuppressLint("AllUpper") 4410 private static final int AC4BitstreamVersion1 = 0x02; 4411 @SuppressLint("AllUpper") 4412 private static final int AC4BitstreamVersion2 = 0x04; 4413 4414 @SuppressLint("AllUpper") 4415 private static final int AC4PresentationVersion0 = 0x01; 4416 @SuppressLint("AllUpper") 4417 private static final int AC4PresentationVersion1 = 0x02; 4418 @SuppressLint("AllUpper") 4419 private static final int AC4PresentationVersion2 = 0x04; 4420 4421 /** 4422 * AC-4 codec profile with bitstream_version 0 and presentation_version 0 4423 * as per ETSI TS 103 190-2 v1.2.1 4424 */ 4425 @SuppressLint("AllUpper") 4426 public static final int AC4Profile00 = AC4BitstreamVersion0 << 8 | AC4PresentationVersion0; 4427 4428 /** 4429 * AC-4 codec profile with bitstream_version 1 and presentation_version 0 4430 * as per ETSI TS 103 190-2 v1.2.1 4431 */ 4432 @SuppressLint("AllUpper") 4433 public static final int AC4Profile10 = AC4BitstreamVersion1 << 8 | AC4PresentationVersion0; 4434 4435 /** 4436 * AC-4 codec profile with bitstream_version 1 and presentation_version 1 4437 * as per ETSI TS 103 190-2 v1.2.1 4438 */ 4439 @SuppressLint("AllUpper") 4440 public static final int AC4Profile11 = AC4BitstreamVersion1 << 8 | AC4PresentationVersion1; 4441 4442 /** 4443 * AC-4 codec profile with bitstream_version 2 and presentation_version 1 4444 * as per ETSI TS 103 190-2 v1.2.1 4445 */ 4446 @SuppressLint("AllUpper") 4447 public static final int AC4Profile21 = AC4BitstreamVersion2 << 8 | AC4PresentationVersion1; 4448 4449 /** 4450 * AC-4 codec profile with bitstream_version 2 and presentation_version 2 4451 * as per ETSI TS 103 190-2 v1.2.1 4452 */ 4453 @SuppressLint("AllUpper") 4454 public static final int AC4Profile22 = AC4BitstreamVersion2 << 8 | AC4PresentationVersion2; 4455 4456 /** AC-4 codec level corresponding to mdcompat 0 as per ETSI TS 103 190-2 v1.2.1 */ 4457 @SuppressLint("AllUpper") 4458 public static final int AC4Level0 = 0x01; 4459 /** AC-4 codec level corresponding to mdcompat 1 as per ETSI TS 103 190-2 v1.2.1 */ 4460 @SuppressLint("AllUpper") 4461 public static final int AC4Level1 = 0x02; 4462 /** AC-4 codec level corresponding to mdcompat 2 as per ETSI TS 103 190-2 v1.2.1 */ 4463 @SuppressLint("AllUpper") 4464 public static final int AC4Level2 = 0x04; 4465 /** AC-4 codec level corresponding to mdcompat 3 as per ETSI TS 103 190-2 v1.2.1 */ 4466 @SuppressLint("AllUpper") 4467 public static final int AC4Level3 = 0x08; 4468 /** AC-4 codec level corresponding to mdcompat 4 as per ETSI TS 103 190-2 v1.2.1 */ 4469 @SuppressLint("AllUpper") 4470 public static final int AC4Level4 = 0x10; 4471 4472 /** 4473 * The profile of the media content. Depending on the type of media this can be 4474 * one of the profile values defined in this class. 4475 */ 4476 public int profile; 4477 4478 /** 4479 * The level of the media content. Depending on the type of media this can be 4480 * one of the level values defined in this class. 4481 * 4482 * Note that VP9 decoder on platforms before {@link android.os.Build.VERSION_CODES#N} may 4483 * not advertise a profile level support. For those VP9 decoders, please use 4484 * {@link VideoCapabilities} to determine the codec capabilities. 4485 */ 4486 public int level; 4487 4488 @Override 4489 public boolean equals(Object obj) { 4490 if (obj == null) { 4491 return false; 4492 } 4493 if (obj instanceof CodecProfileLevel) { 4494 CodecProfileLevel other = (CodecProfileLevel)obj; 4495 return other.profile == profile && other.level == level; 4496 } 4497 return false; 4498 } 4499 4500 @Override 4501 public int hashCode() { 4502 return Long.hashCode(((long)profile << Integer.SIZE) | level); 4503 } 4504 }; 4505 4506 /** 4507 * Enumerates the capabilities of the codec component. Since a single 4508 * component can support data of a variety of types, the type has to be 4509 * specified to yield a meaningful result. 4510 * @param type The MIME type to query 4511 */ 4512 public final CodecCapabilities getCapabilitiesForType( 4513 String type) { 4514 CodecCapabilities caps = mCaps.get(type); 4515 if (caps == null) { 4516 throw new IllegalArgumentException("codec does not support type"); 4517 } 4518 // clone writable object 4519 return caps.dup(); 4520 } 4521 4522 /** @hide */ 4523 public MediaCodecInfo makeRegular() { 4524 ArrayList<CodecCapabilities> caps = new ArrayList<CodecCapabilities>(); 4525 for (CodecCapabilities c: mCaps.values()) { 4526 if (c.isRegular()) { 4527 caps.add(c); 4528 } 4529 } 4530 if (caps.size() == 0) { 4531 return null; 4532 } else if (caps.size() == mCaps.size()) { 4533 return this; 4534 } 4535 4536 return new MediaCodecInfo( 4537 mName, mCanonicalName, mFlags, 4538 caps.toArray(new CodecCapabilities[caps.size()])); 4539 } 4540 } 4541