1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.codec.cts; 18 19 import static org.junit.Assert.assertNotNull; 20 import static org.junit.Assert.assertTrue; 21 import static org.junit.Assert.fail; 22 23 import android.app.Presentation; 24 import android.content.ComponentName; 25 import android.content.Context; 26 import android.content.Intent; 27 import android.content.ServiceConnection; 28 import android.graphics.SurfaceTexture; 29 import android.graphics.drawable.ColorDrawable; 30 import android.hardware.display.DisplayManager; 31 import android.hardware.display.VirtualDisplay; 32 import android.media.MediaCodec; 33 import android.media.MediaCodec.BufferInfo; 34 import android.media.MediaCodecInfo; 35 import android.media.MediaCodecList; 36 import android.media.MediaFormat; 37 import android.media.cts.CompositionTextureView; 38 import android.media.cts.InputSurface; 39 import android.media.cts.OutputSurface; 40 import android.opengl.GLES11Ext; 41 import android.opengl.GLES20; 42 import android.opengl.Matrix; 43 import android.os.Bundle; 44 import android.os.Handler; 45 import android.os.IBinder; 46 import android.os.Looper; 47 import android.os.Message; 48 import android.os.Parcel; 49 import android.util.Log; 50 import android.util.Size; 51 import android.view.Display; 52 import android.view.Surface; 53 import android.view.View; 54 import android.view.ViewGroup; 55 import android.view.ViewGroup.LayoutParams; 56 import android.view.WindowManager; 57 import android.widget.FrameLayout; 58 import android.widget.ImageView; 59 import android.widget.TableLayout; 60 import android.widget.TableRow; 61 62 import java.nio.ByteBuffer; 63 import java.nio.ByteOrder; 64 import java.nio.FloatBuffer; 65 import java.nio.IntBuffer; 66 import java.util.ArrayList; 67 import java.util.Arrays; 68 import java.util.List; 69 import java.util.concurrent.Semaphore; 70 import java.util.concurrent.TimeUnit; 71 import java.util.concurrent.atomic.AtomicInteger; 72 73 /** 74 * Impl class for tests using MediaCodec encoding with composition of multiple virtual displays. 75 */ 76 public class EncodeVirtualDisplayWithCompositionTestImpl { 77 private static final String TAG = "EncodeVirtualDisplayWithCompositionTestImpl"; 78 private static final boolean DBG = false; 79 private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; 80 81 private static final long DEFAULT_WAIT_TIMEOUT_MS = 10000; // 10 seconds 82 private static final long DEQUEUE_TIMEOUT_US = 3000000; // 3 seconds 83 84 private static final int COLOR_RED = makeColor(100, 0, 0); 85 private static final int COLOR_GREEN = makeColor(0, 100, 0); 86 private static final int COLOR_BLUE = makeColor(0, 0, 100); 87 private static final int COLOR_GREY = makeColor(100, 100, 100); 88 89 public static final int BITRATE_1080p = 20000000; 90 public static final int BITRATE_720p = 14000000; 91 public static final int BITRATE_800x480 = 14000000; 92 public static final int BITRATE_DEFAULT = 10000000; 93 94 private static final int IFRAME_INTERVAL = 10; 95 96 private static final int MAX_NUM_WINDOWS = 3; 97 98 private static Handler sHandlerForRunOnMain = new Handler(Looper.getMainLooper()); 99 100 private Surface mEncodingSurface; 101 private OutputSurface mDecodingSurface; 102 private volatile boolean mCodecConfigReceived = false; 103 private volatile boolean mCodecBufferReceived = false; 104 private EncodingHelper mEncodingHelper; 105 private MediaCodec mDecoder; 106 private final ByteBuffer mPixelBuf = ByteBuffer.allocateDirect(4); 107 private volatile boolean mIsQuitting = false; 108 private Throwable mTestException; 109 private VirtualDisplayPresentation mLocalPresentation; 110 private RemoteVirtualDisplayPresentation mRemotePresentation; 111 private ByteBuffer[] mDecoderInputBuffers; 112 113 /** event listener for test without verifying output */ 114 private EncoderEventListener mEncoderEventListener = new EncoderEventListener() { 115 @Override 116 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info) { 117 mCodecConfigReceived = true; 118 } 119 @Override 120 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info) { 121 mCodecBufferReceived = true; 122 } 123 }; 124 125 /* TEST_COLORS static initialization; need ARGB for ColorDrawable */ makeColor(int red, int green, int blue)126 private static int makeColor(int red, int green, int blue) { 127 return 0xff << 24 | (red & 0xff) << 16 | (green & 0xff) << 8 | (blue & 0xff); 128 } 129 130 /** 131 * Run rendering test in a separate thread. This is necessary as {@link OutputSurface} requires 132 * constructing it in a non-test thread. 133 * @param w 134 * @param h 135 * @throws Exception 136 */ runTestRenderingInSeparateThread(final Context context, final String mimeType, final int w, final int h, final boolean runRemotely, final boolean multipleWindows)137 public void runTestRenderingInSeparateThread(final Context context, final String mimeType, 138 final int w, final int h, final boolean runRemotely, final boolean multipleWindows) 139 throws Throwable { 140 runTestRenderingInSeparateThread( 141 context, mimeType, w, h, runRemotely, multipleWindows, /* degrees */ 0, null); 142 } 143 runTestRenderingInSeparateThread(final Context context, final String mimeType, final int w, final int h, final boolean runRemotely, final boolean multipleWindows, final int degrees, final String decoderName)144 public void runTestRenderingInSeparateThread(final Context context, final String mimeType, 145 final int w, final int h, final boolean runRemotely, final boolean multipleWindows, 146 final int degrees, final String decoderName) throws Throwable { 147 mTestException = null; 148 Thread renderingThread = new Thread(new Runnable() { 149 public void run() { 150 try { 151 doTestRenderingOutput( 152 context, mimeType, w, h, runRemotely, multipleWindows, 153 degrees, decoderName); 154 } catch (Throwable t) { 155 t.printStackTrace(); 156 mTestException = t; 157 } 158 } 159 }); 160 renderingThread.start(); 161 renderingThread.join(60000); 162 assertTrue(!renderingThread.isAlive()); 163 if (mTestException != null) { 164 throw mTestException; 165 } 166 } 167 doTestRenderingOutput(final Context context, String mimeType, int w, int h, boolean runRemotely, boolean multipleWindows, int degrees, String decoderName)168 private void doTestRenderingOutput(final Context context, String mimeType, int w, int h, 169 boolean runRemotely, boolean multipleWindows, int degrees, 170 String decoderName) throws Throwable { 171 if (DBG) { 172 Log.i(TAG, "doTestRenderingOutput for type:" + mimeType + " w:" + w + " h:" + h); 173 } 174 try { 175 mIsQuitting = false; 176 if (decoderName == null) { 177 mDecoder = MediaCodec.createDecoderByType(mimeType); 178 } else { 179 mDecoder = MediaCodec.createByCodecName(decoderName); 180 } 181 MediaFormat decoderFormat = MediaFormat.createVideoFormat(mimeType, w, h); 182 decoderFormat.setInteger( 183 MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 184 decoderFormat.setInteger( 185 MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 186 decoderFormat.setInteger( 187 MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 188 if (degrees != 0) { 189 decoderFormat.setInteger(MediaFormat.KEY_ROTATION, degrees); 190 } 191 mDecodingSurface = new OutputSurface(w, h); 192 mDecoder.configure(decoderFormat, mDecodingSurface.getSurface(), null, 0); 193 // only scale to fit scaling mode is supported 194 mDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); 195 mDecoder.start(); 196 mDecoderInputBuffers = mDecoder.getInputBuffers(); 197 198 mEncodingHelper = new EncodingHelper(); 199 mEncodingSurface = mEncodingHelper.startEncoding(mimeType, w, h, 200 new EncoderEventListener() { 201 @Override 202 public void onCodecConfig(ByteBuffer data, BufferInfo info) { 203 if (DBG) { 204 Log.i(TAG, "onCodecConfig l:" + info.size); 205 } 206 handleEncodedData(data, info); 207 } 208 209 @Override 210 public void onBufferReady(ByteBuffer data, BufferInfo info) { 211 if (DBG) { 212 Log.i(TAG, "onBufferReady l:" + info.size); 213 } 214 handleEncodedData(data, info); 215 } 216 217 private void handleEncodedData(ByteBuffer data, BufferInfo info) { 218 if (mIsQuitting) { 219 if (DBG) { 220 Log.i(TAG, "ignore data as test is quitting"); 221 } 222 return; 223 } 224 int inputBufferIndex = mDecoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US); 225 if (inputBufferIndex < 0) { 226 if (DBG) { 227 Log.i(TAG, "dequeueInputBuffer returned:" + inputBufferIndex); 228 } 229 return; 230 } 231 assertTrue(inputBufferIndex >= 0); 232 ByteBuffer inputBuffer = mDecoderInputBuffers[inputBufferIndex]; 233 inputBuffer.clear(); 234 inputBuffer.put(data); 235 mDecoder.queueInputBuffer(inputBufferIndex, 0, info.size, 236 info.presentationTimeUs, info.flags); 237 } 238 }); 239 GlCompositor compositor = new GlCompositor(context); 240 if (DBG) { 241 Log.i(TAG, "start composition"); 242 } 243 compositor.startComposition(mEncodingSurface, w, h, multipleWindows ? 3 : 1); 244 245 if (DBG) { 246 Log.i(TAG, "create display"); 247 } 248 249 Renderer renderer = null; 250 Surface windowSurface = compositor.getWindowSurface(multipleWindows? 1 : 0); 251 if (runRemotely) { 252 mRemotePresentation = 253 new RemoteVirtualDisplayPresentation(context, windowSurface, w, h); 254 mRemotePresentation.connect(); 255 mRemotePresentation.start(); 256 renderer = mRemotePresentation; 257 } else { 258 mLocalPresentation = (degrees == 0) 259 ? new VirtualDisplayPresentation(context, windowSurface, w, h) 260 : new RotateVirtualDisplayPresentation(context, windowSurface, w, h); 261 mLocalPresentation.createVirtualDisplay(); 262 mLocalPresentation.createPresentation(); 263 renderer = mLocalPresentation; 264 } 265 266 if (DBG) { 267 Log.i(TAG, "start rendering and check"); 268 } 269 if (degrees == 0) { 270 renderColorAndCheckResult(renderer, w, h, COLOR_RED); 271 renderColorAndCheckResult(renderer, w, h, COLOR_BLUE); 272 renderColorAndCheckResult(renderer, w, h, COLOR_GREEN); 273 renderColorAndCheckResult(renderer, w, h, COLOR_GREY); 274 } else { 275 renderRotationAndCheckResult(renderer, w, h, degrees); 276 } 277 278 mIsQuitting = true; 279 if (runRemotely) { 280 mRemotePresentation.disconnect(); 281 } else { 282 mLocalPresentation.dismissPresentation(); 283 mLocalPresentation.destroyVirtualDisplay(); 284 } 285 286 compositor.stopComposition(); 287 } finally { 288 if (mEncodingHelper != null) { 289 mEncodingHelper.stopEncoding(); 290 mEncodingHelper = null; 291 } 292 if (mDecoder != null) { 293 mDecoder.stop(); 294 mDecoder.release(); 295 mDecoder = null; 296 } 297 if (mDecodingSurface != null) { 298 mDecodingSurface.release(); 299 mDecodingSurface = null; 300 } 301 } 302 } 303 304 private static final int NUM_MAX_RETRY = 120; 305 private static final int IMAGE_WAIT_TIMEOUT_MS = 1000; 306 renderColorAndCheckResult(Renderer renderer, int w, int h, int color)307 private void renderColorAndCheckResult(Renderer renderer, int w, int h, 308 int color) throws Exception { 309 BufferInfo info = new BufferInfo(); 310 for (int i = 0; i < NUM_MAX_RETRY; i++) { 311 renderer.doRendering(color); 312 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 313 if (DBG) { 314 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 315 } 316 if (bufferIndex < 0) { 317 continue; 318 } 319 mDecoder.releaseOutputBuffer(bufferIndex, true); 320 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 321 mDecodingSurface.drawImage(); 322 if (checkSurfaceFrameColor(w, h, color)) { 323 Log.i(TAG, "color " + Integer.toHexString(color) + " matched"); 324 return; 325 } 326 } else if(DBG) { 327 Log.i(TAG, "no rendering yet"); 328 } 329 } 330 fail("Color did not match"); 331 } 332 renderRotationAndCheckResult(Renderer renderer, int w, int h, int degrees)333 private void renderRotationAndCheckResult(Renderer renderer, int w, int h, 334 int degrees) throws Exception { 335 BufferInfo info = new BufferInfo(); 336 for (int i = 0; i < NUM_MAX_RETRY; i++) { 337 renderer.doRendering(-1); 338 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 339 if (DBG) { 340 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 341 } 342 if (bufferIndex < 0) { 343 continue; 344 } 345 mDecoder.releaseOutputBuffer(bufferIndex, true); 346 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 347 mDecodingSurface.drawImage(); 348 if (checkRotatedFrameQuadrants(w, h, degrees)) { 349 Log.i(TAG, "output rotated " + degrees + " degrees"); 350 return; 351 } 352 } else if(DBG) { 353 Log.i(TAG, "no rendering yet"); 354 } 355 } 356 fail("Frame not properly rotated"); 357 } 358 checkRotatedFrameQuadrants(int w, int h, int degrees)359 private boolean checkRotatedFrameQuadrants(int w, int h, int degrees) { 360 // Read a pixel from each quadrant of the surface. 361 int ww = w / 4; 362 int hh = h / 4; 363 // coords is ordered counter clockwise (note, gl 0,0 is bottom left) 364 int[][] coords = new int[][] {{ww, hh}, {ww * 3, hh}, {ww * 3, hh * 3}, {ww, hh * 3}}; 365 List<Integer> expected = new ArrayList<>(); 366 List<Integer> colors = Arrays.asList( 367 new Integer[] {COLOR_GREEN, COLOR_BLUE, COLOR_RED, COLOR_GREY}); 368 expected.addAll(colors); 369 expected.addAll(colors); 370 int offset = (degrees / 90) % 4; 371 for (int i = 0; i < coords.length; i++) { 372 int[] c = coords[i]; 373 int x = c[0]; 374 int y = c[1]; 375 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 376 int r = mPixelBuf.get(0) & 0xff; 377 int g = mPixelBuf.get(1) & 0xff; 378 int b = mPixelBuf.get(2) & 0xff; 379 // adding the offset to rotate expected colors clockwise 380 int color = expected.get(offset + i); 381 int redExpected = (color >> 16) & 0xff; 382 int greenExpected = (color >> 8) & 0xff; 383 int blueExpected = color & 0xff; 384 Log.i(TAG, String.format("(%d,%d) expecting %d,%d,%d saw %d,%d,%d", 385 x, y, redExpected, greenExpected, blueExpected, r, g, b)); 386 if (!approxEquals(redExpected, r) || !approxEquals(greenExpected, g) 387 || !approxEquals(blueExpected, b)) { 388 return false; 389 } 390 } 391 return true; 392 } 393 checkSurfaceFrameColor(int w, int h, int color)394 private boolean checkSurfaceFrameColor(int w, int h, int color) { 395 // Read a pixel from the center of the surface. Might want to read from multiple points 396 // and average them together. 397 int x = w / 2; 398 int y = h / 2; 399 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 400 int r = mPixelBuf.get(0) & 0xff; 401 int g = mPixelBuf.get(1) & 0xff; 402 int b = mPixelBuf.get(2) & 0xff; 403 404 int redExpected = (color >> 16) & 0xff; 405 int greenExpected = (color >> 8) & 0xff; 406 int blueExpected = color & 0xff; 407 if (approxEquals(redExpected, r) && approxEquals(greenExpected, g) 408 && approxEquals(blueExpected, b)) { 409 return true; 410 } 411 Log.i(TAG, "expected 0x" + Integer.toHexString(color) + " got 0x" 412 + Integer.toHexString(makeColor(r, g, b))); 413 return false; 414 } 415 416 /** 417 * Determines if two color values are approximately equal. 418 */ approxEquals(int expected, int actual)419 private static boolean approxEquals(int expected, int actual) { 420 // allow differences between BT.601 and BT.709 conversions during encoding/decoding for now 421 final int MAX_DELTA = 17; 422 return Math.abs(expected - actual) <= MAX_DELTA; 423 } 424 425 private static final int NUM_CODEC_CREATION = 5; 426 private static final int NUM_DISPLAY_CREATION = 10; 427 private static final int NUM_RENDERING = 10; doTestVirtualDisplayRecycles(final Context context, int numDisplays)428 public void doTestVirtualDisplayRecycles(final Context context, int numDisplays) 429 throws Exception { 430 Size maxSize = getMaxSupportedEncoderSize(); 431 if (maxSize == null) { 432 Log.i(TAG, "no codec found, skipping"); 433 return; 434 } 435 VirtualDisplayPresentation[] virtualDisplays = new VirtualDisplayPresentation[numDisplays]; 436 for (int i = 0; i < NUM_CODEC_CREATION; i++) { 437 mCodecConfigReceived = false; 438 mCodecBufferReceived = false; 439 if (DBG) { 440 Log.i(TAG, "start encoding"); 441 } 442 EncodingHelper encodingHelper = new EncodingHelper(); 443 try { 444 mEncodingSurface = encodingHelper.startEncoding( 445 MIME_TYPE, maxSize.getWidth(), maxSize.getHeight(), mEncoderEventListener); 446 GlCompositor compositor = new GlCompositor(context); 447 if (DBG) { 448 Log.i(TAG, "start composition"); 449 } 450 compositor.startComposition(mEncodingSurface, 451 maxSize.getWidth(), maxSize.getHeight(), numDisplays); 452 for (int j = 0; j < NUM_DISPLAY_CREATION; j++) { 453 if (DBG) { 454 Log.i(TAG, "create display"); 455 } 456 for (int k = 0; k < numDisplays; k++) { 457 virtualDisplays[k] = 458 new VirtualDisplayPresentation(context, 459 compositor.getWindowSurface(k), 460 maxSize.getWidth()/numDisplays, maxSize.getHeight()); 461 virtualDisplays[k].createVirtualDisplay(); 462 virtualDisplays[k].createPresentation(); 463 } 464 if (DBG) { 465 Log.i(TAG, "start rendering"); 466 } 467 for (int k = 0; k < NUM_RENDERING; k++) { 468 for (int l = 0; l < numDisplays; l++) { 469 virtualDisplays[l].doRendering(COLOR_RED); 470 } 471 // do not care how many frames are actually rendered. 472 Thread.sleep(1); 473 } 474 for (int k = 0; k < numDisplays; k++) { 475 virtualDisplays[k].dismissPresentation(); 476 virtualDisplays[k].destroyVirtualDisplay(); 477 } 478 compositor.recreateWindows(); 479 } 480 if (DBG) { 481 Log.i(TAG, "stop composition"); 482 } 483 compositor.stopComposition(); 484 } finally { 485 if (DBG) { 486 Log.i(TAG, "stop encoding"); 487 } 488 encodingHelper.stopEncoding(); 489 assertTrue(mCodecConfigReceived); 490 assertTrue(mCodecBufferReceived); 491 } 492 } 493 } 494 495 interface EncoderEventListener { onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info)496 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info); onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info)497 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info); 498 } 499 500 private class EncodingHelper { 501 private MediaCodec mEncoder; 502 private volatile boolean mStopEncoding = false; 503 private EncoderEventListener mEventListener; 504 private String mMimeType; 505 private int mW; 506 private int mH; 507 private Thread mEncodingThread; 508 private Surface mEncodingSurface; 509 private Semaphore mInitCompleted = new Semaphore(0); 510 private Exception mEncodingError; 511 startEncoding(String mimeType, int w, int h, EncoderEventListener eventListener)512 Surface startEncoding(String mimeType, int w, int h, EncoderEventListener eventListener) { 513 mStopEncoding = false; 514 mMimeType = mimeType; 515 mW = w; 516 mH = h; 517 mEventListener = eventListener; 518 mEncodingError = null; 519 mEncodingThread = new Thread(new Runnable() { 520 @Override 521 public void run() { 522 try { 523 doEncoding(); 524 } catch (Exception e) { 525 e.printStackTrace(); 526 // Throwing the exception here will crash the thread and subsequently the 527 // entire test process. We save it here and throw later in stopEncoding(). 528 mEncodingError = e; 529 } 530 } 531 }); 532 mEncodingThread.start(); 533 try { 534 if (DBG) { 535 Log.i(TAG, "wait for encoder init"); 536 } 537 mInitCompleted.acquire(); 538 if (DBG) { 539 Log.i(TAG, "wait for encoder done"); 540 } 541 } catch (InterruptedException e) { 542 fail("should not happen"); 543 } 544 return mEncodingSurface; 545 } 546 stopEncoding()547 void stopEncoding() throws Exception { 548 try { 549 mStopEncoding = true; 550 mEncodingThread.join(); 551 } catch(InterruptedException e) { 552 // just ignore 553 } finally { 554 mEncodingThread = null; 555 } 556 // Throw here if any error occurred in the encoding thread. 557 if (mEncodingError != null) { 558 throw mEncodingError; 559 } 560 } 561 doEncoding()562 private void doEncoding() throws Exception { 563 final int TIMEOUT_USEC_NORMAL = 1000000; 564 MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mW, mH); 565 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 566 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 567 int bitRate = BITRATE_DEFAULT; 568 if (mW == 1920 && mH == 1080) { 569 bitRate = BITRATE_1080p; 570 } else if (mW == 1280 && mH == 720) { 571 bitRate = BITRATE_720p; 572 } else if (mW == 800 && mH == 480) { 573 bitRate = BITRATE_800x480; 574 } 575 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 576 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 577 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 578 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 579 format.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 580 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 581 582 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 583 String codecName = null; 584 if ((codecName = mcl.findEncoderForFormat(format)) == null) { 585 throw new RuntimeException("encoder "+ MIME_TYPE + " not support : " + format.toString()); 586 } 587 588 try { 589 mEncoder = MediaCodec.createByCodecName(codecName); 590 mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 591 mEncodingSurface = mEncoder.createInputSurface(); 592 mEncoder.start(); 593 mInitCompleted.release(); 594 if (DBG) { 595 Log.i(TAG, "starting encoder"); 596 } 597 ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); 598 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 599 while (!mStopEncoding) { 600 int index = mEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC_NORMAL); 601 if (DBG) { 602 Log.i(TAG, "encoder dequeOutputBuffer returned " + index); 603 } 604 if (index >= 0) { 605 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 606 Log.i(TAG, "codec config data"); 607 ByteBuffer encodedData = encoderOutputBuffers[index]; 608 encodedData.position(info.offset); 609 encodedData.limit(info.offset + info.size); 610 mEventListener.onCodecConfig(encodedData, info); 611 mEncoder.releaseOutputBuffer(index, false); 612 } else if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 613 Log.i(TAG, "EOS, stopping encoding"); 614 break; 615 } else { 616 ByteBuffer encodedData = encoderOutputBuffers[index]; 617 encodedData.position(info.offset); 618 encodedData.limit(info.offset + info.size); 619 mEventListener.onBufferReady(encodedData, info); 620 mEncoder.releaseOutputBuffer(index, false); 621 } 622 } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){ 623 Log.i(TAG, "output buffer changed"); 624 encoderOutputBuffers = mEncoder.getOutputBuffers(); 625 } 626 } 627 } catch (Exception e) { 628 e.printStackTrace(); 629 throw e; 630 } finally { 631 if (mEncoder != null) { 632 mEncoder.stop(); 633 mEncoder.release(); 634 mEncoder = null; 635 } 636 if (mEncodingSurface != null) { 637 mEncodingSurface.release(); 638 mEncodingSurface = null; 639 } 640 } 641 } 642 } 643 644 /** 645 * Handles composition of multiple SurfaceTexture into a single Surface 646 */ 647 private static class GlCompositor implements SurfaceTexture.OnFrameAvailableListener { 648 private final Context mContext; 649 private Surface mSurface; 650 private int mWidth; 651 private int mHeight; 652 private volatile int mNumWindows; 653 private GlWindow mTopWindow; 654 private Thread mCompositionThread; 655 private Semaphore mStartCompletionSemaphore; 656 private Semaphore mRecreationCompletionSemaphore; 657 private Looper mLooper; 658 private Handler mHandler; 659 private InputSurface mEglHelper; 660 private int mGlProgramId = 0; 661 private int mGluMVPMatrixHandle; 662 private int mGluSTMatrixHandle; 663 private int mGlaPositionHandle; 664 private int mGlaTextureHandle; 665 private float[] mMVPMatrix = new float[16]; 666 private TopWindowVirtualDisplayPresentation mTopPresentation; 667 668 private static final String VERTEX_SHADER = 669 "uniform mat4 uMVPMatrix;\n" + 670 "uniform mat4 uSTMatrix;\n" + 671 "attribute vec4 aPosition;\n" + 672 "attribute vec4 aTextureCoord;\n" + 673 "varying vec2 vTextureCoord;\n" + 674 "void main() {\n" + 675 " gl_Position = uMVPMatrix * aPosition;\n" + 676 " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 677 "}\n"; 678 679 private static final String FRAGMENT_SHADER = 680 "#extension GL_OES_EGL_image_external : require\n" + 681 "precision mediump float;\n" + 682 "varying vec2 vTextureCoord;\n" + 683 "uniform samplerExternalOES sTexture;\n" + 684 "void main() {\n" + 685 " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 686 "}\n"; 687 GlCompositor(Context context)688 public GlCompositor(Context context) { 689 mContext = context; 690 } 691 startComposition(Surface surface, int w, int h, int numWindows)692 void startComposition(Surface surface, int w, int h, int numWindows) throws Exception { 693 mSurface = surface; 694 mWidth = w; 695 mHeight = h; 696 mNumWindows = numWindows; 697 mCompositionThread = new Thread(new CompositionRunnable()); 698 mStartCompletionSemaphore = new Semaphore(0); 699 mCompositionThread.start(); 700 waitForStartCompletion(); 701 } 702 stopComposition()703 void stopComposition() { 704 try { 705 if (mLooper != null) { 706 mLooper.quit(); 707 mCompositionThread.join(); 708 } 709 } catch (InterruptedException e) { 710 // don't care 711 } 712 mCompositionThread = null; 713 mSurface = null; 714 mStartCompletionSemaphore = null; 715 } 716 getWindowSurface(int windowIndex)717 Surface getWindowSurface(int windowIndex) { 718 return mTopPresentation.getSurface(windowIndex); 719 } 720 recreateWindows()721 void recreateWindows() throws Exception { 722 mRecreationCompletionSemaphore = new Semaphore(0); 723 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RECREATE_WINDOWS); 724 mHandler.sendMessage(msg); 725 if(!mRecreationCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 726 TimeUnit.MILLISECONDS)) { 727 fail("recreation timeout"); 728 } 729 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 730 } 731 732 @Override onFrameAvailable(SurfaceTexture surface)733 public void onFrameAvailable(SurfaceTexture surface) { 734 if (DBG) { 735 Log.i(TAG, "onFrameAvailable " + surface); 736 } 737 GlWindow w = mTopWindow; 738 if (w != null) { 739 w.markTextureUpdated(); 740 requestUpdate(); 741 } else { 742 Log.w(TAG, "top window gone"); 743 } 744 } 745 requestUpdate()746 private void requestUpdate() { 747 Thread compositionThread = mCompositionThread; 748 if (compositionThread == null || !compositionThread.isAlive()) { 749 return; 750 } 751 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RENDERING); 752 mHandler.sendMessage(msg); 753 } 754 loadShader(int shaderType, String source)755 private int loadShader(int shaderType, String source) throws GlException { 756 int shader = GLES20.glCreateShader(shaderType); 757 checkGlError("glCreateShader type=" + shaderType); 758 GLES20.glShaderSource(shader, source); 759 GLES20.glCompileShader(shader); 760 int[] compiled = new int[1]; 761 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 762 if (compiled[0] == 0) { 763 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 764 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 765 GLES20.glDeleteShader(shader); 766 shader = 0; 767 } 768 return shader; 769 } 770 createProgram(String vertexSource, String fragmentSource)771 private int createProgram(String vertexSource, String fragmentSource) throws GlException { 772 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 773 if (vertexShader == 0) { 774 return 0; 775 } 776 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 777 if (pixelShader == 0) { 778 return 0; 779 } 780 781 int program = GLES20.glCreateProgram(); 782 checkGlError("glCreateProgram"); 783 if (program == 0) { 784 Log.e(TAG, "Could not create program"); 785 } 786 GLES20.glAttachShader(program, vertexShader); 787 checkGlError("glAttachShader"); 788 GLES20.glAttachShader(program, pixelShader); 789 checkGlError("glAttachShader"); 790 GLES20.glLinkProgram(program); 791 int[] linkStatus = new int[1]; 792 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 793 if (linkStatus[0] != GLES20.GL_TRUE) { 794 Log.e(TAG, "Could not link program: "); 795 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 796 GLES20.glDeleteProgram(program); 797 program = 0; 798 } 799 return program; 800 } 801 initGl()802 private void initGl() throws GlException { 803 mEglHelper = new InputSurface(mSurface); 804 mEglHelper.makeCurrent(); 805 mGlProgramId = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 806 mGlaPositionHandle = GLES20.glGetAttribLocation(mGlProgramId, "aPosition"); 807 checkGlError("glGetAttribLocation aPosition"); 808 if (mGlaPositionHandle == -1) { 809 throw new RuntimeException("Could not get attrib location for aPosition"); 810 } 811 mGlaTextureHandle = GLES20.glGetAttribLocation(mGlProgramId, "aTextureCoord"); 812 checkGlError("glGetAttribLocation aTextureCoord"); 813 if (mGlaTextureHandle == -1) { 814 throw new RuntimeException("Could not get attrib location for aTextureCoord"); 815 } 816 mGluMVPMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uMVPMatrix"); 817 checkGlError("glGetUniformLocation uMVPMatrix"); 818 if (mGluMVPMatrixHandle == -1) { 819 throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 820 } 821 mGluSTMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uSTMatrix"); 822 checkGlError("glGetUniformLocation uSTMatrix"); 823 if (mGluSTMatrixHandle == -1) { 824 throw new RuntimeException("Could not get attrib location for uSTMatrix"); 825 } 826 Matrix.setIdentityM(mMVPMatrix, 0); 827 Log.i(TAG, "initGl w:" + mWidth + " h:" + mHeight); 828 GLES20.glViewport(0, 0, mWidth, mHeight); 829 float[] vMatrix = new float[16]; 830 float[] projMatrix = new float[16]; 831 // max window is from (0,0) to (mWidth - 1, mHeight - 1) 832 float wMid = mWidth / 2f; 833 float hMid = mHeight / 2f; 834 // look from positive z to hide windows in lower z 835 Matrix.setLookAtM(vMatrix, 0, wMid, hMid, 5f, wMid, hMid, 0f, 0f, 1.0f, 0.0f); 836 Matrix.orthoM(projMatrix, 0, -wMid, wMid, -hMid, hMid, 1, 10); 837 Matrix.multiplyMM(mMVPMatrix, 0, projMatrix, 0, vMatrix, 0); 838 createWindows(); 839 840 } 841 createWindows()842 private void createWindows() throws GlException { 843 mTopWindow = new GlWindow(this, 0, 0, mWidth, mHeight); 844 mTopWindow.init(); 845 mTopPresentation = new TopWindowVirtualDisplayPresentation(mContext, 846 mTopWindow.getSurface(), mWidth, mHeight, mNumWindows); 847 mTopPresentation.createVirtualDisplay(); 848 mTopPresentation.createPresentation(); 849 ((TopWindowPresentation) mTopPresentation.getPresentation()).populateWindows(); 850 } 851 cleanupGl()852 private void cleanupGl() { 853 if (mTopPresentation != null) { 854 mTopPresentation.dismissPresentation(); 855 mTopPresentation.destroyVirtualDisplay(); 856 mTopPresentation = null; 857 } 858 if (mTopWindow != null) { 859 mTopWindow.cleanup(); 860 mTopWindow = null; 861 } 862 if (mEglHelper != null) { 863 mEglHelper.release(); 864 mEglHelper = null; 865 } 866 } 867 doGlRendering()868 private void doGlRendering() throws GlException { 869 if (DBG) { 870 Log.i(TAG, "doGlRendering"); 871 } 872 mTopWindow.updateTexImageIfNecessary(); 873 GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 874 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 875 876 GLES20.glUseProgram(mGlProgramId); 877 GLES20.glUniformMatrix4fv(mGluMVPMatrixHandle, 1, false, mMVPMatrix, 0); 878 mTopWindow.onDraw(mGluSTMatrixHandle, mGlaPositionHandle, mGlaTextureHandle); 879 checkGlError("window draw"); 880 if (DBG) { 881 final IntBuffer pixels = IntBuffer.allocate(1); 882 GLES20.glReadPixels(mWidth / 2, mHeight / 2, 1, 1, 883 GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels); 884 Log.i(TAG, "glReadPixels returned 0x" + Integer.toHexString(pixels.get(0))); 885 } 886 mEglHelper.swapBuffers(); 887 } 888 doRecreateWindows()889 private void doRecreateWindows() throws GlException { 890 mTopPresentation.dismissPresentation(); 891 mTopPresentation.destroyVirtualDisplay(); 892 mTopWindow.cleanup(); 893 createWindows(); 894 mRecreationCompletionSemaphore.release(); 895 } 896 waitForStartCompletion()897 private void waitForStartCompletion() throws Exception { 898 if (!mStartCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 899 TimeUnit.MILLISECONDS)) { 900 fail("start timeout"); 901 } 902 mStartCompletionSemaphore = null; 903 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 904 } 905 906 private class CompositionRunnable implements Runnable { 907 @Override run()908 public void run() { 909 try { 910 Looper.prepare(); 911 mLooper = Looper.myLooper(); 912 mHandler = new CompositionHandler(); 913 initGl(); 914 // init done 915 mStartCompletionSemaphore.release(); 916 Looper.loop(); 917 } catch (GlException e) { 918 e.printStackTrace(); 919 fail("got gl exception"); 920 } finally { 921 cleanupGl(); 922 mHandler = null; 923 mLooper = null; 924 } 925 } 926 } 927 928 private class CompositionHandler extends Handler { 929 private static final int DO_RENDERING = 1; 930 private static final int DO_RECREATE_WINDOWS = 2; 931 932 @Override handleMessage(Message msg)933 public void handleMessage(Message msg) { 934 try { 935 switch(msg.what) { 936 case DO_RENDERING: { 937 doGlRendering(); 938 } break; 939 case DO_RECREATE_WINDOWS: { 940 doRecreateWindows(); 941 } break; 942 } 943 } catch (GlException e) { 944 //ignore as this can happen during tearing down 945 } 946 } 947 } 948 949 private class GlWindow { 950 private static final int FLOAT_SIZE_BYTES = 4; 951 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 952 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 953 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 954 private int mBlX; 955 private int mBlY; 956 private int mWidth; 957 private int mHeight; 958 private int mTextureId = 0; // 0 is invalid 959 private volatile SurfaceTexture mSurfaceTexture; 960 private volatile Surface mSurface; 961 private FloatBuffer mVerticesData; 962 private float[] mSTMatrix = new float[16]; 963 private AtomicInteger mNumTextureUpdated = new AtomicInteger(0); 964 private GlCompositor mCompositor; 965 966 /** 967 * @param blX X coordinate of bottom-left point of window 968 * @param blY Y coordinate of bottom-left point of window 969 * @param w window width 970 * @param h window height 971 */ GlWindow(GlCompositor compositor, int blX, int blY, int w, int h)972 public GlWindow(GlCompositor compositor, int blX, int blY, int w, int h) { 973 mCompositor = compositor; 974 mBlX = blX; 975 mBlY = blY; 976 mWidth = w; 977 mHeight = h; 978 int trX = blX + w; 979 int trY = blY + h; 980 float[] vertices = new float[] { 981 // x, y, z, u, v 982 mBlX, mBlY, 0, 0, 0, 983 trX, mBlY, 0, 1, 0, 984 mBlX, trY, 0, 0, 1, 985 trX, trY, 0, 1, 1 986 }; 987 Log.i(TAG, "create window " + this + " blX:" + mBlX + " blY:" + mBlY + " trX:" + 988 trX + " trY:" + trY); 989 mVerticesData = ByteBuffer.allocateDirect( 990 vertices.length * FLOAT_SIZE_BYTES) 991 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 992 mVerticesData.put(vertices).position(0); 993 } 994 995 /** 996 * initialize the window for composition. counter-part is cleanup() 997 * @throws GlException 998 */ init()999 public void init() throws GlException { 1000 int[] textures = new int[1]; 1001 GLES20.glGenTextures(1, textures, 0); 1002 1003 mTextureId = textures[0]; 1004 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1005 checkGlError("glBindTexture mTextureID"); 1006 1007 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1008 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 1009 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1010 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 1011 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 1012 GLES20.GL_CLAMP_TO_EDGE); 1013 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 1014 GLES20.GL_CLAMP_TO_EDGE); 1015 checkGlError("glTexParameter"); 1016 mSurfaceTexture = new SurfaceTexture(mTextureId); 1017 mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight); 1018 mSurface = new Surface(mSurfaceTexture); 1019 mSurfaceTexture.setOnFrameAvailableListener(mCompositor); 1020 } 1021 cleanup()1022 public void cleanup() { 1023 mNumTextureUpdated.set(0); 1024 if (mTextureId != 0) { 1025 int[] textures = new int[] { 1026 mTextureId 1027 }; 1028 GLES20.glDeleteTextures(1, textures, 0); 1029 } 1030 GLES20.glFinish(); 1031 if (mSurface != null) { 1032 mSurface.release(); 1033 mSurface = null; 1034 } 1035 if (mSurfaceTexture != null) { 1036 mSurfaceTexture.release(); 1037 mSurfaceTexture = null; 1038 } 1039 } 1040 1041 /** 1042 * make texture as updated so that it can be updated in the next rendering. 1043 */ markTextureUpdated()1044 public void markTextureUpdated() { 1045 mNumTextureUpdated.incrementAndGet(); 1046 } 1047 1048 /** 1049 * update texture for rendering if it is updated. 1050 */ updateTexImageIfNecessary()1051 public void updateTexImageIfNecessary() { 1052 int numTextureUpdated = mNumTextureUpdated.getAndDecrement(); 1053 if (numTextureUpdated > 0) { 1054 if (DBG) { 1055 Log.i(TAG, "updateTexImageIfNecessary " + this); 1056 } 1057 mSurfaceTexture.updateTexImage(); 1058 mSurfaceTexture.getTransformMatrix(mSTMatrix); 1059 } 1060 if (numTextureUpdated < 0) { 1061 fail("should not happen"); 1062 } 1063 } 1064 1065 /** 1066 * draw the window. It will not be drawn at all if the window is not visible. 1067 * @param uSTMatrixHandle shader handler for the STMatrix for texture coordinates 1068 * mapping 1069 * @param aPositionHandle shader handle for vertex position. 1070 * @param aTextureHandle shader handle for texture 1071 */ onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle)1072 public void onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle) { 1073 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1074 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1075 mVerticesData.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1076 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1077 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1078 GLES20.glEnableVertexAttribArray(aPositionHandle); 1079 1080 mVerticesData.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1081 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1082 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1083 GLES20.glEnableVertexAttribArray(aTextureHandle); 1084 GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, mSTMatrix, 0); 1085 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1086 } 1087 getSurfaceTexture()1088 public SurfaceTexture getSurfaceTexture() { 1089 return mSurfaceTexture; 1090 } 1091 getSurface()1092 public Surface getSurface() { 1093 return mSurface; 1094 } 1095 } 1096 } 1097 checkGlError(String op)1098 static void checkGlError(String op) throws GlException { 1099 int error; 1100 while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1101 Log.e(TAG, op + ": glError " + error); 1102 throw new GlException(op + ": glError " + error); 1103 } 1104 } 1105 1106 public static class GlException extends Exception { GlException(String msg)1107 public GlException(String msg) { 1108 super(msg); 1109 } 1110 } 1111 1112 private interface Renderer { doRendering(final int color)1113 void doRendering(final int color) throws Exception; 1114 } 1115 1116 private static class RotateVirtualDisplayPresentation extends VirtualDisplayPresentation { 1117 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1118 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1119 super(context, surface, w, h); 1120 } 1121 1122 @Override doCreatePresentation()1123 protected TestPresentationBase doCreatePresentation() { 1124 return new TestRotatePresentation(mContext, mVirtualDisplay.getDisplay()); 1125 } 1126 1127 } 1128 1129 private static class VirtualDisplayPresentation implements Renderer { 1130 protected final Context mContext; 1131 protected final Surface mSurface; 1132 protected final int mWidth; 1133 protected final int mHeight; 1134 protected VirtualDisplay mVirtualDisplay; 1135 protected TestPresentationBase mPresentation; 1136 private final DisplayManager mDisplayManager; 1137 VirtualDisplayPresentation(Context context, Surface surface, int w, int h)1138 VirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1139 mContext = context; 1140 mSurface = surface; 1141 mWidth = w; 1142 mHeight = h; 1143 mDisplayManager = (DisplayManager)context.getSystemService(Context.DISPLAY_SERVICE); 1144 } 1145 createVirtualDisplay()1146 void createVirtualDisplay() { 1147 runOnMainSync(new Runnable() { 1148 @Override 1149 public void run() { 1150 mVirtualDisplay = mDisplayManager.createVirtualDisplay( 1151 TAG, mWidth, mHeight, 200, mSurface, 1152 DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | 1153 DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION); 1154 } 1155 }); 1156 } 1157 destroyVirtualDisplay()1158 void destroyVirtualDisplay() { 1159 runOnMainSync(new Runnable() { 1160 @Override 1161 public void run() { 1162 mVirtualDisplay.release(); 1163 } 1164 }); 1165 } 1166 createPresentation()1167 void createPresentation() { 1168 runOnMainSync(new Runnable() { 1169 @Override 1170 public void run() { 1171 mPresentation = doCreatePresentation(); 1172 mPresentation.show(); 1173 } 1174 }); 1175 } 1176 doCreatePresentation()1177 protected TestPresentationBase doCreatePresentation() { 1178 return new TestPresentation(mContext, mVirtualDisplay.getDisplay()); 1179 } 1180 getPresentation()1181 TestPresentationBase getPresentation() { 1182 return mPresentation; 1183 } 1184 dismissPresentation()1185 void dismissPresentation() { 1186 runOnMainSync(new Runnable() { 1187 @Override 1188 public void run() { 1189 mPresentation.dismiss(); 1190 } 1191 }); 1192 } 1193 1194 @Override doRendering(final int color)1195 public void doRendering(final int color) throws Exception { 1196 runOnMainSync(new Runnable() { 1197 @Override 1198 public void run() { 1199 mPresentation.doRendering(color); 1200 } 1201 }); 1202 } 1203 } 1204 1205 private static class TestPresentationBase extends Presentation { 1206 TestPresentationBase(Context outerContext, Display display)1207 public TestPresentationBase(Context outerContext, Display display) { 1208 // This theme is required to prevent an extra view from obscuring the presentation 1209 super(outerContext, display, 1210 android.R.style.Theme_Holo_Light_NoActionBar_TranslucentDecor); 1211 getWindow().addFlags(WindowManager.LayoutParams.FLAG_LOCAL_FOCUS_MODE); 1212 getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED); 1213 } 1214 doRendering(int color)1215 public void doRendering(int color) { 1216 // to be implemented by child 1217 } 1218 } 1219 1220 private static class TestPresentation extends TestPresentationBase { 1221 private ImageView mImageView; 1222 TestPresentation(Context outerContext, Display display)1223 public TestPresentation(Context outerContext, Display display) { 1224 super(outerContext, display); 1225 } 1226 1227 @Override onCreate(Bundle savedInstanceState)1228 protected void onCreate(Bundle savedInstanceState) { 1229 super.onCreate(savedInstanceState); 1230 mImageView = new ImageView(getContext()); 1231 mImageView.setImageDrawable(new ColorDrawable(COLOR_RED)); 1232 mImageView.setLayoutParams(new LayoutParams( 1233 LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); 1234 setContentView(mImageView); 1235 } 1236 doRendering(int color)1237 public void doRendering(int color) { 1238 if (DBG) { 1239 Log.i(TAG, "doRendering " + Integer.toHexString(color)); 1240 } 1241 mImageView.setImageDrawable(new ColorDrawable(color)); 1242 } 1243 } 1244 1245 private static class TestRotatePresentation extends TestPresentationBase { 1246 static final int[] kColors = new int[] {COLOR_GREY, COLOR_RED, COLOR_GREEN, COLOR_BLUE}; 1247 private final ImageView[] mQuadrants = new ImageView[4]; 1248 TestRotatePresentation(Context outerContext, Display display)1249 public TestRotatePresentation(Context outerContext, Display display) { 1250 super(outerContext, display); 1251 } 1252 1253 @Override onCreate(Bundle savedInstanceState)1254 protected void onCreate(Bundle savedInstanceState) { 1255 super.onCreate(savedInstanceState); 1256 Context ctx = getContext(); 1257 TableLayout table = new TableLayout(ctx); 1258 ViewGroup.LayoutParams fill = new ViewGroup.LayoutParams( 1259 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); 1260 TableLayout.LayoutParams fillTable = new TableLayout.LayoutParams( 1261 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1262 TableRow.LayoutParams fillRow = new TableRow.LayoutParams( 1263 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1264 table.setLayoutParams(fill); 1265 table.setStretchAllColumns(true); 1266 TableRow rows[] = new TableRow[] {new TableRow(ctx), new TableRow(ctx)}; 1267 for (int i = 0; i < mQuadrants.length; i++) { 1268 mQuadrants[i] = new ImageView(ctx); 1269 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1270 rows[i / 2].addView(mQuadrants[i], fillRow); 1271 } 1272 for (TableRow row: rows) { 1273 table.addView(row, fillTable); 1274 } 1275 setContentView(table); 1276 Log.v(TAG, "setContentView(table)"); 1277 } 1278 1279 @Override doRendering(int color)1280 public void doRendering(int color) { 1281 Log.v(TAG, "doRendering: ignoring color: " + Integer.toHexString(color)); 1282 for (int i = 0; i < mQuadrants.length; i++) { 1283 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1284 } 1285 } 1286 1287 } 1288 1289 private static class TopWindowPresentation extends TestPresentationBase { 1290 private FrameLayout[] mWindowsLayout = new FrameLayout[MAX_NUM_WINDOWS]; 1291 private CompositionTextureView[] mWindows = new CompositionTextureView[MAX_NUM_WINDOWS]; 1292 private final int mNumWindows; 1293 private final Semaphore mWindowWaitSemaphore = new Semaphore(0); 1294 TopWindowPresentation(int numWindows, Context outerContext, Display display)1295 public TopWindowPresentation(int numWindows, Context outerContext, Display display) { 1296 super(outerContext, display); 1297 mNumWindows = numWindows; 1298 } 1299 1300 @Override onCreate(Bundle savedInstanceState)1301 protected void onCreate(Bundle savedInstanceState) { 1302 super.onCreate(savedInstanceState); 1303 if (DBG) { 1304 Log.i(TAG, "TopWindowPresentation onCreate, numWindows " + mNumWindows); 1305 } 1306 setContentView(R.layout.composition_layout); 1307 mWindowsLayout[0] = (FrameLayout) findViewById(R.id.window0); 1308 mWindowsLayout[1] = (FrameLayout) findViewById(R.id.window1); 1309 mWindowsLayout[2] = (FrameLayout) findViewById(R.id.window2); 1310 } 1311 populateWindows()1312 public void populateWindows() { 1313 runOnMain(new Runnable() { 1314 public void run() { 1315 for (int i = 0; i < mNumWindows; i++) { 1316 mWindows[i] = new CompositionTextureView(getContext()); 1317 mWindows[i].setLayoutParams(new ViewGroup.LayoutParams( 1318 ViewGroup.LayoutParams.MATCH_PARENT, 1319 ViewGroup.LayoutParams.MATCH_PARENT)); 1320 mWindowsLayout[i].setVisibility(View.VISIBLE); 1321 mWindowsLayout[i].addView(mWindows[i]); 1322 mWindows[i].startListening(); 1323 } 1324 mWindowWaitSemaphore.release(); 1325 } 1326 }); 1327 } 1328 waitForSurfaceReady(long timeoutMs)1329 public void waitForSurfaceReady(long timeoutMs) throws Exception { 1330 mWindowWaitSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1331 for (int i = 0; i < mNumWindows; i++) { 1332 if(!mWindows[i].waitForSurfaceReady(timeoutMs)) { 1333 fail("surface wait timeout"); 1334 } 1335 } 1336 } 1337 getSurface(int windowIndex)1338 public Surface getSurface(int windowIndex) { 1339 Surface surface = mWindows[windowIndex].getSurface(); 1340 assertNotNull(surface); 1341 return surface; 1342 } 1343 } 1344 1345 private static class TopWindowVirtualDisplayPresentation extends VirtualDisplayPresentation { 1346 private final int mNumWindows; 1347 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, int numWindows)1348 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, 1349 int numWindows) { 1350 super(context, surface, w, h); 1351 assertNotNull(surface); 1352 mNumWindows = numWindows; 1353 } 1354 waitForSurfaceReady(long timeoutMs)1355 void waitForSurfaceReady(long timeoutMs) throws Exception { 1356 ((TopWindowPresentation) mPresentation).waitForSurfaceReady(timeoutMs); 1357 } 1358 getSurface(int windowIndex)1359 Surface getSurface(int windowIndex) { 1360 return ((TopWindowPresentation) mPresentation).getSurface(windowIndex); 1361 } 1362 doCreatePresentation()1363 protected TestPresentationBase doCreatePresentation() { 1364 return new TopWindowPresentation(mNumWindows, mContext, mVirtualDisplay.getDisplay()); 1365 } 1366 } 1367 1368 private static class RemoteVirtualDisplayPresentation implements Renderer { 1369 /** argument: Surface, int w, int h, return none */ 1370 private static final int BINDER_CMD_START = IBinder.FIRST_CALL_TRANSACTION; 1371 /** argument: int color, return none */ 1372 private static final int BINDER_CMD_RENDER = IBinder.FIRST_CALL_TRANSACTION + 1; 1373 1374 private final Context mContext; 1375 private final Surface mSurface; 1376 private final int mWidth; 1377 private final int mHeight; 1378 1379 private IBinder mService; 1380 private final Semaphore mConnectionWait = new Semaphore(0); 1381 private final ServiceConnection mConnection = new ServiceConnection() { 1382 1383 public void onServiceConnected(ComponentName arg0, IBinder arg1) { 1384 mService = arg1; 1385 mConnectionWait.release(); 1386 } 1387 1388 public void onServiceDisconnected(ComponentName arg0) { 1389 //ignore 1390 } 1391 1392 }; 1393 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1394 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1395 mContext = context; 1396 mSurface = surface; 1397 mWidth = w; 1398 mHeight = h; 1399 } 1400 connect()1401 void connect() throws Exception { 1402 Intent intent = new Intent(); 1403 intent.setClassName("android.media.codec.cts", 1404 "android.media.codec.cts.RemoteVirtualDisplayService"); 1405 mContext.bindService(intent, mConnection, Context.BIND_AUTO_CREATE); 1406 if (!mConnectionWait.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { 1407 fail("cannot bind to service"); 1408 } 1409 } 1410 disconnect()1411 void disconnect() { 1412 mContext.unbindService(mConnection); 1413 } 1414 start()1415 void start() throws Exception { 1416 Parcel parcel = Parcel.obtain(); 1417 mSurface.writeToParcel(parcel, 0); 1418 parcel.writeInt(mWidth); 1419 parcel.writeInt(mHeight); 1420 mService.transact(BINDER_CMD_START, parcel, null, 0); 1421 } 1422 1423 @Override doRendering(int color)1424 public void doRendering(int color) throws Exception { 1425 Parcel parcel = Parcel.obtain(); 1426 parcel.writeInt(color); 1427 mService.transact(BINDER_CMD_RENDER, parcel, null, 0); 1428 } 1429 } 1430 getMaxSupportedEncoderSize()1431 private static Size getMaxSupportedEncoderSize() { 1432 final Size[] standardSizes = new Size[] { 1433 new Size(1920, 1080), 1434 new Size(1280, 720), 1435 new Size(720, 480), 1436 new Size(352, 576) 1437 }; 1438 1439 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1440 for (Size sz : standardSizes) { 1441 MediaFormat format = MediaFormat.createVideoFormat( 1442 MIME_TYPE, sz.getWidth(), sz.getHeight()); 1443 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1444 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1445 int bitRate = BITRATE_DEFAULT; 1446 if (sz.getWidth() == 1920 && sz.getHeight() == 1080) { 1447 bitRate = BITRATE_1080p; 1448 } else if (sz.getWidth() == 1280 && sz.getHeight() == 720) { 1449 bitRate = BITRATE_720p; 1450 } else if (sz.getWidth() == 800 && sz.getHeight() == 480) { 1451 bitRate = BITRATE_800x480; 1452 } 1453 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1454 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1455 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1456 Log.i(TAG,"format = " + format.toString()); 1457 if (mcl.findEncoderForFormat(format) != null) { 1458 return sz; 1459 } 1460 } 1461 return null; 1462 } 1463 1464 /** 1465 * Check maximum concurrent encoding / decoding resolution allowed. 1466 * Some H/Ws cannot support maximum resolution reported in encoder if decoder is running 1467 * at the same time. 1468 * Check is done for 4 different levels: 1080p, 720p, 800x480, 480p 1469 * (The last one is required by CDD.) 1470 */ checkMaxConcurrentEncodingDecodingResolution()1471 public Size checkMaxConcurrentEncodingDecodingResolution() { 1472 if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 1920, 1080, BITRATE_1080p)) { 1473 return new Size(1920, 1080); 1474 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 1280, 720, BITRATE_720p)) { 1475 return new Size(1280, 720); 1476 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 800, 480, BITRATE_800x480)) { 1477 return new Size(800, 480); 1478 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 720, 480, BITRATE_DEFAULT)) { 1479 return new Size(720, 480); 1480 } 1481 Log.i(TAG, "SKIPPING test: concurrent encoding and decoding is not supported"); 1482 return null; 1483 } 1484 isConcurrentEncodingDecodingSupported( String mimeType, int w, int h, int bitRate)1485 public boolean isConcurrentEncodingDecodingSupported( 1486 String mimeType, int w, int h, int bitRate) { 1487 return isConcurrentEncodingDecodingSupported(mimeType, w, h, bitRate, null); 1488 } 1489 isConcurrentEncodingDecodingSupported( String mimeType, int w, int h, int bitRate, String decoderName)1490 public boolean isConcurrentEncodingDecodingSupported( 1491 String mimeType, int w, int h, int bitRate, String decoderName) { 1492 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1493 MediaFormat testFormat = MediaFormat.createVideoFormat(mimeType, w, h); 1494 testFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1495 testFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1496 if (mcl.findDecoderForFormat(testFormat) == null 1497 || mcl.findEncoderForFormat(testFormat) == null) { 1498 return false; 1499 } 1500 1501 MediaCodec decoder = null; 1502 OutputSurface decodingSurface = null; 1503 MediaCodec encoder = null; 1504 Surface encodingSurface = null; 1505 try { 1506 if (decoderName == null) { 1507 decoder = MediaCodec.createDecoderByType(mimeType); 1508 } else { 1509 decoder = MediaCodec.createByCodecName(decoderName); 1510 } 1511 MediaFormat decoderFormat = MediaFormat.createVideoFormat(mimeType, w, h); 1512 decodingSurface = new OutputSurface(w, h); 1513 decodingSurface.makeCurrent(); 1514 decoder.configure(decoderFormat, decodingSurface.getSurface(), null, 0); 1515 decoder.start(); 1516 1517 MediaFormat format = MediaFormat.createVideoFormat(mimeType, w, h); 1518 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1519 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1520 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1521 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1522 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1523 encoder = MediaCodec.createEncoderByType(mimeType); 1524 encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 1525 encodingSurface = encoder.createInputSurface(); 1526 encoder.start(); 1527 1528 encoder.stop(); 1529 decoder.stop(); 1530 } catch (Exception e) { 1531 e.printStackTrace(); 1532 Log.i(TAG, "This H/W does not support w:" + w + " h:" + h); 1533 return false; 1534 } finally { 1535 if (encodingSurface != null) { 1536 encodingSurface.release(); 1537 } 1538 if (encoder != null) { 1539 encoder.release(); 1540 } 1541 if (decoder != null) { 1542 decoder.release(); 1543 } 1544 if (decodingSurface != null) { 1545 decodingSurface.release(); 1546 } 1547 } 1548 return true; 1549 } 1550 runOnMain(Runnable runner)1551 private static void runOnMain(Runnable runner) { 1552 sHandlerForRunOnMain.post(runner); 1553 } 1554 runOnMainSync(Runnable runner)1555 private static void runOnMainSync(Runnable runner) { 1556 SyncRunnable sr = new SyncRunnable(runner); 1557 sHandlerForRunOnMain.post(sr); 1558 sr.waitForComplete(); 1559 } 1560 1561 private static final class SyncRunnable implements Runnable { 1562 private final Runnable mTarget; 1563 private boolean mComplete; 1564 SyncRunnable(Runnable target)1565 public SyncRunnable(Runnable target) { 1566 mTarget = target; 1567 } 1568 run()1569 public void run() { 1570 mTarget.run(); 1571 synchronized (this) { 1572 mComplete = true; 1573 notifyAll(); 1574 } 1575 } 1576 waitForComplete()1577 public void waitForComplete() { 1578 synchronized (this) { 1579 while (!mComplete) { 1580 try { 1581 wait(); 1582 } catch (InterruptedException e) { 1583 //ignore 1584 } 1585 } 1586 } 1587 } 1588 } 1589 } 1590