1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.encoder.cts;
18 
19 import static org.junit.Assert.assertEquals;
20 import static org.junit.Assert.assertFalse;
21 import static org.junit.Assert.assertNotNull;
22 import static org.junit.Assert.assertNull;
23 import static org.junit.Assert.assertTrue;
24 import static org.junit.Assert.fail;
25 
26 import android.graphics.ImageFormat;
27 import android.graphics.SurfaceTexture;
28 import android.media.Image;
29 import android.media.MediaCodec;
30 import android.media.MediaCodec.BufferInfo;
31 import android.media.MediaCodecInfo;
32 import android.media.MediaCodecInfo.CodecCapabilities;
33 import android.media.MediaCodecInfo.VideoCapabilities;
34 import android.media.MediaCodecList;
35 import android.media.MediaExtractor;
36 import android.media.MediaFormat;
37 import android.media.MediaMuxer;
38 import android.media.cts.CodecUtils;
39 import android.media.cts.InputSurface;
40 import android.media.cts.MediaHeavyPresubmitTest;
41 import android.media.cts.MediaTestBase;
42 import android.media.cts.OutputSurface;
43 import android.media.cts.TestArgs;
44 import android.media.cts.TestUtils;
45 import android.net.Uri;
46 import android.platform.test.annotations.AppModeFull;
47 import android.util.Log;
48 import android.util.Pair;
49 import android.util.Range;
50 import android.util.Size;
51 import android.view.Surface;
52 
53 import com.android.compatibility.common.util.ApiTest;
54 import com.android.compatibility.common.util.CddTest;
55 import com.android.compatibility.common.util.MediaUtils;
56 import com.android.compatibility.common.util.Preconditions;
57 
58 import org.junit.After;
59 import org.junit.Assume;
60 import org.junit.Before;
61 import org.junit.Test;
62 import org.junit.runner.RunWith;
63 import org.junit.runners.Parameterized;
64 
65 import java.io.IOException;
66 import java.nio.ByteBuffer;
67 import java.util.ArrayList;
68 import java.util.Collection;
69 import java.util.HashMap;
70 import java.util.HashSet;
71 import java.util.Iterator;
72 import java.util.LinkedList;
73 import java.util.List;
74 import java.util.Map;
75 import java.util.Set;
76 import java.util.concurrent.atomic.AtomicReference;
77 import java.util.function.Consumer;
78 import java.util.function.Function;
79 
80 @MediaHeavyPresubmitTest
81 @AppModeFull(reason = "TODO: evaluate and port to instant")
82 @RunWith(Parameterized.class)
83 public class VideoEncoderTest extends MediaTestBase {
84     private static final int MAX_SAMPLE_SIZE = 256 * 1024;
85     private static final String TAG = "VideoEncoderTest";
86     private static final long FRAME_TIMEOUT_MS = 1000;
87     // use larger delay before we get first frame, some encoders may need more time
88     private static final long INIT_TIMEOUT_MS = 2000;
89 
90     static final String mInpPrefix = WorkDir.getMediaDirString();
91     private static final String SOURCE_URL =
92             mInpPrefix + "video_480x360_mp4_h264_871kbps_30fps.mp4";
93 
94     private final Encoder mEncHandle;
95     private final int mWidth;
96     private final int mHeight;
97     private final boolean mFlexYuv;
98     private final TestMode mMode;
99     private final boolean DEBUG = false;
100 
101     enum TestMode {
102         TEST_MODE_SPECIFIC, // test basic encoding for given configuration
103         TEST_MODE_DETAILED, // test detailed encoding for given configuration
104         TEST_MODE_INTRAREFRESH // test intra refresh
105     }
106 
107     @Before
108     @Override
setUp()109     public void setUp() throws Throwable {
110         super.setUp();
111     }
112 
113     @After
114     @Override
tearDown()115     public void tearDown() {
116         super.tearDown();
117     }
118 
119 
120     class VideoStorage {
121         private LinkedList<Pair<ByteBuffer, BufferInfo>> mStream;
122         private MediaFormat mFormat;
123         private int mInputBufferSize;
124         // Media buffers(no CSD, no EOS) enqueued.
125         private int mMediaBuffersEnqueuedCount;
126         // Media buffers decoded.
127         private int mMediaBuffersDecodedCount;
128         private final AtomicReference<String> errorMsg = new AtomicReference(null);
129 
VideoStorage()130         public VideoStorage() {
131             mStream = new LinkedList<Pair<ByteBuffer, BufferInfo>>();
132         }
133 
setFormat(MediaFormat format)134         public void setFormat(MediaFormat format) {
135             mFormat = format;
136         }
137 
addBuffer(ByteBuffer buffer, BufferInfo info)138         public void addBuffer(ByteBuffer buffer, BufferInfo info) {
139             ByteBuffer savedBuffer = ByteBuffer.allocate(info.size);
140             savedBuffer.put(buffer);
141             if (info.size > mInputBufferSize) {
142                 mInputBufferSize = info.size;
143             }
144             BufferInfo savedInfo = new BufferInfo();
145             savedInfo.set(0, savedBuffer.position(), info.presentationTimeUs, info.flags);
146             mStream.addLast(Pair.create(savedBuffer, savedInfo));
147             if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
148                 ++mMediaBuffersEnqueuedCount;
149             }
150         }
151 
play(MediaCodec decoder, Surface surface)152         private void play(MediaCodec decoder, Surface surface) {
153             decoder.reset();
154             final Object condition = new Object();
155             final Iterator<Pair<ByteBuffer, BufferInfo>> it = mStream.iterator();
156             decoder.setCallback(new MediaCodec.Callback() {
157                 public void onOutputBufferAvailable(MediaCodec codec, int ix, BufferInfo info) {
158                     if (info.size > 0) {
159                         ++mMediaBuffersDecodedCount;
160                     }
161                     codec.releaseOutputBuffer(ix, info.size > 0);
162                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
163                         synchronized (condition) {
164                             condition.notifyAll();
165                         }
166                     }
167                 }
168                 public void onInputBufferAvailable(MediaCodec codec, int ix) {
169                     if (it.hasNext()) {
170                         try {
171                             Pair<ByteBuffer, BufferInfo> el = it.next();
172                             el.first.clear();
173                             try {
174                                 codec.getInputBuffer(ix).put(el.first);
175                             } catch (java.nio.BufferOverflowException e) {
176                                 String diagnostic = "cannot fit " + el.first.limit()
177                                         + "-byte encoded buffer into "
178                                         + codec.getInputBuffer(ix).remaining()
179                                         + "-byte input buffer of " + codec.getName()
180                                         + " configured for " + codec.getInputFormat();
181                                 Log.e(TAG, diagnostic);
182                                 errorMsg.set(diagnostic + e);
183                                 synchronized (condition) {
184                                     condition.notifyAll();
185                                 }
186                                 // no sense trying to enqueue the failed buffer
187                                 return;
188                             }
189                             BufferInfo info = el.second;
190                                 codec.queueInputBuffer(
191                                     ix, 0, info.size, info.presentationTimeUs, info.flags);
192                         } catch (Throwable t) {
193                           errorMsg.set("exception in onInputBufferAvailable( "
194                                        +  codec.getName() + "," + ix
195                                        + "): " + t);
196                           synchronized (condition) {
197                               condition.notifyAll();
198                           }
199                         }
200                     }
201                 }
202                 public void onError(MediaCodec codec, MediaCodec.CodecException e) {
203                     Log.i(TAG, "got codec exception", e);
204                     errorMsg.set("received codec error during decode" + e);
205                     synchronized (condition) {
206                         condition.notifyAll();
207                     }
208                 }
209                 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
210                     Log.i(TAG, "got output format " + format);
211                 }
212             });
213             mFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, mInputBufferSize);
214             decoder.configure(mFormat, surface, null /* crypto */, 0 /* flags */);
215             decoder.start();
216             synchronized (condition) {
217                 try {
218                     condition.wait();
219                 } catch (InterruptedException e) {
220                     fail("playback interrupted");
221                 }
222             }
223             decoder.stop();
224             assertNull(errorMsg.get(), errorMsg.get());
225             // All enqueued media data buffers should have got decoded.
226             if (mMediaBuffersEnqueuedCount != mMediaBuffersDecodedCount) {
227                 Log.i(TAG, "mMediaBuffersEnqueuedCount:" + mMediaBuffersEnqueuedCount);
228                 Log.i(TAG, "mMediaBuffersDecodedCount:" + mMediaBuffersDecodedCount);
229                 fail("not all enqueued encoded media buffers were decoded");
230             }
231             mMediaBuffersDecodedCount = 0;
232         }
233 
playAll(Surface surface)234         public boolean playAll(Surface surface) {
235             boolean skipped = true;
236             if (mFormat == null) {
237                 Log.i(TAG, "no stream to play");
238                 return !skipped;
239             }
240             String mime = mFormat.getString(MediaFormat.KEY_MIME);
241             MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
242             for (MediaCodecInfo info : mcl.getCodecInfos()) {
243                 if (info.isEncoder() || info.isAlias()) {
244                     continue;
245                 }
246                 MediaCodec codec = null;
247                 try {
248                     CodecCapabilities caps = info.getCapabilitiesForType(mime);
249                     if (!caps.isFormatSupported(mFormat)) {
250                         continue;
251                     }
252                     codec = MediaCodec.createByCodecName(info.getName());
253                 } catch (IllegalArgumentException | IOException e) {
254                     continue;
255                 }
256                 play(codec, surface);
257                 codec.release();
258                 skipped = false;
259             }
260             return !skipped;
261         }
262     }
263 
264     abstract class VideoProcessorBase extends MediaCodec.Callback {
265         private static final String TAG = "VideoProcessorBase";
266 
267         /*
268          * Set this to true to save the encoding results to /data/local/tmp
269          * You will need to make /data/local/tmp writeable, run "setenforce 0",
270          * and remove files left from a previous run.
271          */
272         private boolean mSaveResults = false;
273         private static final String FILE_DIR = "/data/local/tmp";
274         protected int mMuxIndex = -1;
275 
276         protected String mProcessorName = "VideoProcessor";
277         private MediaExtractor mExtractor;
278         protected MediaMuxer mMuxer;
279         private ByteBuffer mBuffer = ByteBuffer.allocate(MAX_SAMPLE_SIZE);
280         protected int mTrackIndex = -1;
281         private boolean mSignaledDecoderEOS;
282 
283         protected boolean mCompleted;
284         protected boolean mEncoderIsActive;
285         protected boolean mEncodeOutputFormatUpdated;
286         protected final Object mCondition = new Object();
287         protected final Object mCodecLock = new Object();
288 
289         protected MediaFormat mDecFormat;
290         protected MediaCodec mDecoder, mEncoder;
291 
292         private VideoStorage mEncodedStream;
293         protected int mFrameRate = 0;
294         protected int mBitRate = 0;
295 
296         protected Function<MediaFormat, Boolean> mUpdateConfigFormatHook;
297         protected Function<MediaFormat, Boolean> mCheckOutputFormatHook;
298 
setProcessorName(String name)299         public void setProcessorName(String name) {
300             mProcessorName = name;
301         }
302 
setUpdateConfigHook(Function<MediaFormat, Boolean> hook)303         public void setUpdateConfigHook(Function<MediaFormat, Boolean> hook) {
304             mUpdateConfigFormatHook = hook;
305         }
306 
setCheckOutputFormatHook(Function<MediaFormat, Boolean> hook)307         public void setCheckOutputFormatHook(Function<MediaFormat, Boolean> hook) {
308             mCheckOutputFormatHook = hook;
309         }
310 
open(String path)311         protected void open(String path) throws IOException {
312             mExtractor = new MediaExtractor();
313             if (path.startsWith("android.resource://")) {
314                 mExtractor.setDataSource(mContext, Uri.parse(path), null);
315             } else {
316                 mExtractor.setDataSource(path);
317             }
318 
319             for (int i = 0; i < mExtractor.getTrackCount(); i++) {
320                 MediaFormat fmt = mExtractor.getTrackFormat(i);
321                 String mime = fmt.getString(MediaFormat.KEY_MIME).toLowerCase();
322                 if (mime.startsWith("video/")) {
323                     mTrackIndex = i;
324                     mDecFormat = fmt;
325                     mExtractor.selectTrack(i);
326                     break;
327                 }
328             }
329             mEncodedStream = new VideoStorage();
330             assertTrue("file " + path + " has no video", mTrackIndex >= 0);
331         }
332 
333         // returns true if encoder supports the size
initCodecsAndConfigureEncoder( String videoEncName, String outMime, int width, int height, int colorFormat)334         protected boolean initCodecsAndConfigureEncoder(
335                 String videoEncName, String outMime, int width, int height,
336                 int colorFormat) throws IOException {
337             mDecFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
338 
339             MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
340             String videoDecName = mcl.findDecoderForFormat(mDecFormat);
341             Log.i(TAG, "decoder for " + mDecFormat + " is " + videoDecName);
342             mDecoder = MediaCodec.createByCodecName(videoDecName);
343             mEncoder = MediaCodec.createByCodecName(videoEncName);
344 
345             mDecoder.setCallback(this);
346             mEncoder.setCallback(this);
347 
348             VideoCapabilities encCaps =
349                 mEncoder.getCodecInfo().getCapabilitiesForType(outMime).getVideoCapabilities();
350             if (!encCaps.isSizeSupported(width, height)) {
351                 Log.i(TAG, videoEncName + " does not support size: " + width + "x" + height);
352                 return false;
353             }
354 
355             MediaFormat outFmt = MediaFormat.createVideoFormat(outMime, width, height);
356             int bitRate = 0;
357             MediaUtils.setMaxEncoderFrameAndBitrates(encCaps, outFmt, 30);
358             if (mFrameRate > 0) {
359                 outFmt.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate);
360             }
361             if (mBitRate > 0) {
362                 outFmt.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
363             }
364             outFmt.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
365             outFmt.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
366             // Some extra configure before starting the encoder.
367             if (mUpdateConfigFormatHook != null) {
368                 if (!mUpdateConfigFormatHook.apply(outFmt)) {
369                     return false;
370                 }
371             }
372             mEncoder.configure(outFmt, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
373             Log.i(TAG, "encoder input format " + mEncoder.getInputFormat() + " from " + outFmt);
374             if (mSaveResults) {
375                 try {
376                     String outFileName =
377                             FILE_DIR + mProcessorName + "_" + bitRate + "bps";
378                     if (outMime.equals(MediaFormat.MIMETYPE_VIDEO_VP8) ||
379                             outMime.equals(MediaFormat.MIMETYPE_VIDEO_VP9)) {
380                         mMuxer = new MediaMuxer(
381                                 outFileName + ".webm", MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM);
382                     } else {
383                         mMuxer = new MediaMuxer(
384                                 outFileName + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
385                     }
386                     // The track can't be added until we have the codec specific data
387                 } catch (Exception e) {
388                     Log.i(TAG, "couldn't create muxer: " + e);
389                 }
390             }
391             return true;
392         }
393 
close()394         protected void close() {
395             synchronized (mCodecLock) {
396                 if (mDecoder != null) {
397                     mDecoder.release();
398                     mDecoder = null;
399                 }
400                 if (mEncoder != null) {
401                     mEncoder.release();
402                     mEncoder = null;
403                 }
404             }
405             if (mExtractor != null) {
406                 mExtractor.release();
407                 mExtractor = null;
408             }
409             if (mMuxer != null) {
410                 mMuxer.stop();
411                 mMuxer.release();
412                 mMuxer = null;
413             }
414         }
415 
416         // returns true if filled buffer
fillDecoderInputBuffer(int ix)417         protected boolean fillDecoderInputBuffer(int ix) {
418             if (DEBUG) Log.v(TAG, "decoder received input #" + ix);
419             while (!mSignaledDecoderEOS) {
420                 int track = mExtractor.getSampleTrackIndex();
421                 if (track >= 0 && track != mTrackIndex) {
422                     mExtractor.advance();
423                     continue;
424                 }
425                 int size = mExtractor.readSampleData(mBuffer, 0);
426                 if (size < 0) {
427                     // queue decoder input EOS
428                     if (DEBUG) Log.v(TAG, "queuing decoder EOS");
429                     mDecoder.queueInputBuffer(
430                             ix, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
431                     mSignaledDecoderEOS = true;
432                 } else {
433                     mBuffer.limit(size);
434                     mBuffer.position(0);
435                     BufferInfo info = new BufferInfo();
436                     info.set(
437                             0, mBuffer.limit(), mExtractor.getSampleTime(),
438                             mExtractor.getSampleFlags());
439                     mDecoder.getInputBuffer(ix).put(mBuffer);
440                     if (DEBUG) Log.v(TAG, "queing input #" + ix + " for decoder with timestamp "
441                             + info.presentationTimeUs);
442                     mDecoder.queueInputBuffer(
443                             ix, 0, mBuffer.limit(), info.presentationTimeUs, 0);
444                 }
445                 mExtractor.advance();
446                 return true;
447             }
448             return false;
449         }
450 
emptyEncoderOutputBuffer(int ix, BufferInfo info)451         protected void emptyEncoderOutputBuffer(int ix, BufferInfo info) {
452             if (DEBUG) Log.v(TAG, "encoder received output #" + ix
453                      + " (sz=" + info.size + ", f=" + info.flags
454                      + ", ts=" + info.presentationTimeUs + ")");
455             ByteBuffer outputBuffer = mEncoder.getOutputBuffer(ix);
456             mEncodedStream.addBuffer(outputBuffer, info);
457 
458             if (mMuxer != null) {
459                 // reset position as addBuffer() modifies it
460                 outputBuffer.position(info.offset);
461                 outputBuffer.limit(info.offset + info.size);
462                 mMuxer.writeSampleData(mMuxIndex, outputBuffer, info);
463             }
464 
465             if (!mCompleted) {
466                 mEncoder.releaseOutputBuffer(ix, false);
467                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
468                     Log.d(TAG, "encoder received output EOS");
469                     synchronized(mCondition) {
470                         mCompleted = true;
471                         mCondition.notifyAll(); // condition is always satisfied
472                     }
473                 } else {
474                     synchronized(mCondition) {
475                         mEncoderIsActive = true;
476                     }
477                 }
478             }
479         }
480 
481         @SuppressWarnings("ReturnValueIgnored") // TODO: mCheckOutputFormatHook should be a Consumer
saveEncoderFormat(MediaFormat format)482         protected void saveEncoderFormat(MediaFormat format) {
483             mEncodedStream.setFormat(format);
484             if (mCheckOutputFormatHook != null) {
485                 mCheckOutputFormatHook.apply(format);
486             }
487             if (mMuxer != null) {
488                 if (mMuxIndex < 0) {
489                     mMuxIndex = mMuxer.addTrack(format);
490                     mMuxer.start();
491                 }
492             }
493         }
494 
playBack(Surface surface)495         public boolean playBack(Surface surface) { return mEncodedStream.playAll(surface); }
496 
setFrameAndBitRates(int frameRate, int bitRate)497         public void setFrameAndBitRates(int frameRate, int bitRate) {
498             mFrameRate = frameRate;
499             mBitRate = bitRate;
500         }
501 
502         @Override
onInputBufferAvailable(MediaCodec mediaCodec, int ix)503         public void onInputBufferAvailable(MediaCodec mediaCodec, int ix) {
504             synchronized (mCodecLock) {
505                 if (mEncoder != null && mDecoder != null) {
506                     onInputBufferAvailableLocked(mediaCodec, ix);
507                 }
508             }
509         }
510 
511         @Override
onOutputBufferAvailable( MediaCodec mediaCodec, int ix, BufferInfo info)512         public void onOutputBufferAvailable(
513                 MediaCodec mediaCodec, int ix, BufferInfo info) {
514             synchronized (mCodecLock) {
515                 if (mEncoder != null && mDecoder != null) {
516                     onOutputBufferAvailableLocked(mediaCodec, ix, info);
517                 }
518             }
519         }
520 
processLoop( String path, String outMime, String videoEncName, int width, int height, boolean optional)521         public abstract boolean processLoop(
522                 String path, String outMime, String videoEncName,
523                 int width, int height, boolean optional);
onInputBufferAvailableLocked( MediaCodec mediaCodec, int ix)524         protected abstract void onInputBufferAvailableLocked(
525                 MediaCodec mediaCodec, int ix);
onOutputBufferAvailableLocked( MediaCodec mediaCodec, int ix, BufferInfo info)526         protected abstract void onOutputBufferAvailableLocked(
527                 MediaCodec mediaCodec, int ix, BufferInfo info);
528     }
529 
530     class VideoProcessor extends VideoProcessorBase {
531         private static final String TAG = "VideoProcessor";
532         private boolean mWorkInProgress;
533         private boolean mGotDecoderEOS;
534         private boolean mSignaledEncoderEOS;
535 
536         private LinkedList<Pair<Integer, BufferInfo>> mBuffersToRender =
537             new LinkedList<Pair<Integer, BufferInfo>>();
538         private LinkedList<Integer> mEncInputBuffers = new LinkedList<Integer>();
539 
540         private int mEncInputBufferSize = -1;
541         private final AtomicReference<String> errorMsg = new AtomicReference(null);
542 
543         @Override
processLoop( String path, String outMime, String videoEncName, int width, int height, boolean optional)544         public boolean processLoop(
545                  String path, String outMime, String videoEncName,
546                  int width, int height, boolean optional) {
547             boolean skipped = true;
548             try {
549                 open(path);
550                 if (!initCodecsAndConfigureEncoder(
551                         videoEncName, outMime, width, height,
552                         CodecCapabilities.COLOR_FormatYUV420Flexible)) {
553                     assertTrue("could not configure encoder for supported size", optional);
554                     return !skipped;
555                 }
556                 skipped = false;
557 
558                 mDecoder.configure(mDecFormat, null /* surface */, null /* crypto */, 0);
559 
560                 mDecoder.start();
561                 mEncoder.start();
562 
563                 // main loop - process GL ops as only main thread has GL context
564                 while (!mCompleted && errorMsg.get() == null) {
565                     Pair<Integer, BufferInfo> decBuffer = null;
566                     int encBuffer = -1;
567                     synchronized (mCondition) {
568                         try {
569                             // wait for an encoder input buffer and a decoder output buffer
570                             // Use a timeout to avoid stalling the test if it doesn't arrive.
571                             if (!haveBuffers() && !mCompleted) {
572                                 mCondition.wait(mEncodeOutputFormatUpdated ?
573                                         FRAME_TIMEOUT_MS : INIT_TIMEOUT_MS);
574                             }
575                         } catch (InterruptedException ie) {
576                             fail("wait interrupted");  // shouldn't happen
577                         }
578                         if (mCompleted) {
579                             break;
580                         }
581                         if (!haveBuffers()) {
582                             if (mEncoderIsActive) {
583                                 mEncoderIsActive = false;
584                                 Log.d(TAG, "No more input but still getting output from encoder.");
585                                 continue;
586                             }
587                             fail("timed out after " + mBuffersToRender.size()
588                                     + " decoder output and " + mEncInputBuffers.size()
589                                     + " encoder input buffers");
590                         }
591 
592                         if (DEBUG) Log.v(TAG, "got image");
593                         decBuffer = mBuffersToRender.removeFirst();
594                         encBuffer = mEncInputBuffers.removeFirst();
595                         if (isEOSOnlyBuffer(decBuffer)) {
596                             queueEncoderEOS(decBuffer, encBuffer);
597                             continue;
598                         }
599                         mWorkInProgress = true;
600                     }
601 
602                     if (mWorkInProgress) {
603                         renderDecodedBuffer(decBuffer, encBuffer);
604                         synchronized(mCondition) {
605                             mWorkInProgress = false;
606                         }
607                     }
608                 }
609             } catch (IOException e) {
610                 e.printStackTrace();
611                 fail("received exception " + e);
612             } finally {
613                 close();
614             }
615             assertNull(errorMsg.get(), errorMsg.get());
616             return !skipped;
617         }
618 
619         @Override
onInputBufferAvailableLocked(MediaCodec mediaCodec, int ix)620         public void onInputBufferAvailableLocked(MediaCodec mediaCodec, int ix) {
621             if (mediaCodec == mDecoder) {
622                 // fill input buffer from extractor
623                 fillDecoderInputBuffer(ix);
624             } else if (mediaCodec == mEncoder) {
625                 synchronized(mCondition) {
626                     mEncInputBuffers.addLast(ix);
627                     tryToPropagateEOS();
628                     if (haveBuffers()) {
629                         mCondition.notifyAll();
630                     }
631                 }
632             } else {
633                 fail("received input buffer on " + mediaCodec.getName());
634             }
635         }
636 
637         @Override
onOutputBufferAvailableLocked( MediaCodec mediaCodec, int ix, BufferInfo info)638         public void onOutputBufferAvailableLocked(
639                 MediaCodec mediaCodec, int ix, BufferInfo info) {
640             if (mediaCodec == mDecoder) {
641                 if (DEBUG) Log.v(TAG, "decoder received output #" + ix
642                          + " (sz=" + info.size + ", f=" + info.flags
643                          + ", ts=" + info.presentationTimeUs + ")");
644                 // render output buffer from decoder
645                 if (!mGotDecoderEOS) {
646                     boolean eos = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
647                     // can release empty buffers now
648                     if (info.size == 0) {
649                         mDecoder.releaseOutputBuffer(ix, false /* render */);
650                         ix = -1; // fake index used by render to not render
651                     }
652                     synchronized(mCondition) {
653                         if (ix < 0 && eos && mBuffersToRender.size() > 0) {
654                             // move lone EOS flag to last buffer to be rendered
655                             mBuffersToRender.peekLast().second.flags |=
656                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM;
657                         } else if (ix >= 0 || eos) {
658                             mBuffersToRender.addLast(Pair.create(ix, info));
659                         }
660                         if (eos) {
661                             tryToPropagateEOS();
662                             mGotDecoderEOS = true;
663                         }
664                         if (haveBuffers()) {
665                             mCondition.notifyAll();
666                         }
667                     }
668                 }
669             } else if (mediaCodec == mEncoder) {
670                 emptyEncoderOutputBuffer(ix, info);
671             } else {
672                 fail("received output buffer on " + mediaCodec.getName());
673             }
674         }
675 
renderDecodedBuffer(Pair<Integer, BufferInfo> decBuffer, int encBuffer)676         private void renderDecodedBuffer(Pair<Integer, BufferInfo> decBuffer, int encBuffer) {
677             // process heavyweight actions under instance lock
678             Image encImage = mEncoder.getInputImage(encBuffer);
679             Image decImage = mDecoder.getOutputImage(decBuffer.first);
680             assertNotNull("could not get encoder image for " + mEncoder.getInputFormat(), encImage);
681             assertNotNull("could not get decoder image for " + mDecoder.getInputFormat(), decImage);
682             assertEquals("incorrect decoder format",decImage.getFormat(), ImageFormat.YUV_420_888);
683             assertEquals("incorrect encoder format", encImage.getFormat(), ImageFormat.YUV_420_888);
684 
685             CodecUtils.copyFlexYUVImage(encImage, decImage);
686 
687             // TRICKY: need this for queueBuffer
688             if (mEncInputBufferSize < 0) {
689                 mEncInputBufferSize = mEncoder.getInputBuffer(encBuffer).capacity();
690             }
691             Log.d(TAG, "queuing input #" + encBuffer + " for encoder (sz="
692                     + mEncInputBufferSize + ", f=" + decBuffer.second.flags
693                     + ", ts=" + decBuffer.second.presentationTimeUs + ")");
694             mEncoder.queueInputBuffer(
695                     encBuffer, 0, mEncInputBufferSize, decBuffer.second.presentationTimeUs,
696                     decBuffer.second.flags);
697             if ((decBuffer.second.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
698                 mSignaledEncoderEOS = true;
699             }
700             mDecoder.releaseOutputBuffer(decBuffer.first, false /* render */);
701         }
702 
703         @Override
onError(MediaCodec mediaCodec, MediaCodec.CodecException e)704         public void onError(MediaCodec mediaCodec, MediaCodec.CodecException e) {
705             String codecName = null;
706             try {
707                 codecName = mediaCodec.getName();
708             } catch (Exception ex) {
709                 codecName = "(error getting codec name)";
710             }
711             errorMsg.set("received error on " + codecName + ": " + e);
712         }
713 
714         @Override
onOutputFormatChanged(MediaCodec mediaCodec, MediaFormat mediaFormat)715         public void onOutputFormatChanged(MediaCodec mediaCodec, MediaFormat mediaFormat) {
716             Log.i(TAG, mediaCodec.getName() + " got new output format " + mediaFormat);
717             if (mediaCodec == mEncoder) {
718                 mEncodeOutputFormatUpdated = true;
719                 saveEncoderFormat(mediaFormat);
720             }
721         }
722 
723         // next methods are synchronized on mCondition
haveBuffers()724         private boolean haveBuffers() {
725             return mEncInputBuffers.size() > 0 && mBuffersToRender.size() > 0
726                     && !mSignaledEncoderEOS;
727         }
728 
isEOSOnlyBuffer(Pair<Integer, BufferInfo> decBuffer)729         private boolean isEOSOnlyBuffer(Pair<Integer, BufferInfo> decBuffer) {
730             return decBuffer.first < 0 || decBuffer.second.size == 0;
731         }
732 
tryToPropagateEOS()733         protected void tryToPropagateEOS() {
734             if (!mWorkInProgress && haveBuffers() && isEOSOnlyBuffer(mBuffersToRender.getFirst())) {
735                 Pair<Integer, BufferInfo> decBuffer = mBuffersToRender.removeFirst();
736                 int encBuffer = mEncInputBuffers.removeFirst();
737                 queueEncoderEOS(decBuffer, encBuffer);
738             }
739         }
740 
queueEncoderEOS(Pair<Integer, BufferInfo> decBuffer, int encBuffer)741         void queueEncoderEOS(Pair<Integer, BufferInfo> decBuffer, int encBuffer) {
742             Log.d(TAG, "signaling encoder EOS");
743             mEncoder.queueInputBuffer(encBuffer, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
744             mSignaledEncoderEOS = true;
745             if (decBuffer.first >= 0) {
746                 mDecoder.releaseOutputBuffer(decBuffer.first, false /* render */);
747             }
748         }
749     }
750 
751 
752     class SurfaceVideoProcessor extends VideoProcessorBase
753             implements SurfaceTexture.OnFrameAvailableListener {
754         private static final String TAG = "SurfaceVideoProcessor";
755         private boolean mFrameAvailable;
756         private boolean mGotDecoderEOS;
757         private boolean mSignaledEncoderEOS;
758 
759         private InputSurface mEncSurface;
760         private OutputSurface mDecSurface;
761         private BufferInfo mInfoOnSurface;
762 
763         private LinkedList<Pair<Integer, BufferInfo>> mBuffersToRender =
764             new LinkedList<Pair<Integer, BufferInfo>>();
765 
766         private final AtomicReference<String> errorMsg = new AtomicReference(null);
767 
768         @Override
processLoop( String path, String outMime, String videoEncName, int width, int height, boolean optional)769         public boolean processLoop(
770                 String path, String outMime, String videoEncName,
771                 int width, int height, boolean optional) {
772             boolean skipped = true;
773             try {
774                 open(path);
775                 if (!initCodecsAndConfigureEncoder(
776                         videoEncName, outMime, width, height,
777                         CodecCapabilities.COLOR_FormatSurface)) {
778                     assertTrue("could not configure encoder for supported size", optional);
779                     return !skipped;
780                 }
781                 skipped = false;
782 
783                 mEncSurface = new InputSurface(mEncoder.createInputSurface());
784                 mEncSurface.makeCurrent();
785 
786                 mDecSurface = new OutputSurface(this);
787                 //mDecSurface.changeFragmentShader(FRAGMENT_SHADER);
788                 mDecoder.configure(mDecFormat, mDecSurface.getSurface(), null /* crypto */, 0);
789 
790                 mDecoder.start();
791                 mEncoder.start();
792 
793                 // main loop - process GL ops as only main thread has GL context
794                 while (!mCompleted && errorMsg.get() == null) {
795                     BufferInfo info = null;
796                     synchronized (mCondition) {
797                         try {
798                             // wait for mFrameAvailable, which is set by onFrameAvailable().
799                             // Use a timeout to avoid stalling the test if it doesn't arrive.
800                             if (!mFrameAvailable && !mCompleted && !mEncoderIsActive) {
801                                 mCondition.wait(mEncodeOutputFormatUpdated ?
802                                         FRAME_TIMEOUT_MS : INIT_TIMEOUT_MS);
803                             }
804                         } catch (InterruptedException ie) {
805                             fail("wait interrupted");  // shouldn't happen
806                         }
807                         if (mCompleted) {
808                             break;
809                         }
810                         if (mEncoderIsActive) {
811                             mEncoderIsActive = false;
812                             if (DEBUG) Log.d(TAG, "encoder is still active, continue");
813                             continue;
814                         }
815                         assertTrue("still waiting for image", mFrameAvailable);
816                         if (DEBUG) Log.v(TAG, "got image");
817                         info = mInfoOnSurface;
818                     }
819                     if (info == null) {
820                         continue;
821                     }
822                     if (info.size > 0) {
823                         mDecSurface.latchImage();
824                         if (DEBUG) Log.v(TAG, "latched image");
825                         mFrameAvailable = false;
826 
827                         mDecSurface.drawImage();
828                         Log.d(TAG, "encoding frame at " + info.presentationTimeUs * 1000);
829 
830                         mEncSurface.setPresentationTime(info.presentationTimeUs * 1000);
831                         mEncSurface.swapBuffers();
832                     }
833                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
834                         mSignaledEncoderEOS = true;
835                         Log.d(TAG, "signaling encoder EOS");
836                         mEncoder.signalEndOfInputStream();
837                     }
838 
839                     synchronized (mCondition) {
840                         mInfoOnSurface = null;
841                         if (mBuffersToRender.size() > 0 && mInfoOnSurface == null) {
842                             if (DEBUG) Log.v(TAG, "handling postponed frame");
843                             Pair<Integer, BufferInfo> nextBuffer = mBuffersToRender.removeFirst();
844                             renderDecodedBuffer(nextBuffer.first, nextBuffer.second);
845                         }
846                     }
847                 }
848             } catch (IOException e) {
849                 e.printStackTrace();
850                 fail("received exception " + e);
851             } finally {
852                 close();
853                 if (mEncSurface != null) {
854                     mEncSurface.release();
855                     mEncSurface = null;
856                 }
857                 if (mDecSurface != null) {
858                     mDecSurface.release();
859                     mDecSurface = null;
860                 }
861             }
862             assertNull(errorMsg.get(), errorMsg.get());
863             return !skipped;
864         }
865 
866         @Override
onFrameAvailable(SurfaceTexture st)867         public void onFrameAvailable(SurfaceTexture st) {
868             if (DEBUG) Log.v(TAG, "new frame available");
869             synchronized (mCondition) {
870                 assertFalse("mFrameAvailable already set, frame could be dropped", mFrameAvailable);
871                 mFrameAvailable = true;
872                 mCondition.notifyAll();
873             }
874         }
875 
876         @Override
onInputBufferAvailableLocked(MediaCodec mediaCodec, int ix)877         public void onInputBufferAvailableLocked(MediaCodec mediaCodec, int ix) {
878             if (mediaCodec == mDecoder) {
879                 // fill input buffer from extractor
880                 fillDecoderInputBuffer(ix);
881             } else {
882                 fail("received input buffer on " + mediaCodec.getName());
883             }
884         }
885 
886         @Override
onOutputBufferAvailableLocked( MediaCodec mediaCodec, int ix, BufferInfo info)887         public void onOutputBufferAvailableLocked(
888                 MediaCodec mediaCodec, int ix, BufferInfo info) {
889             if (mediaCodec == mDecoder) {
890                 if (DEBUG) Log.v(TAG, "decoder received output #" + ix
891                          + " (sz=" + info.size + ", f=" + info.flags
892                          + ", ts=" + info.presentationTimeUs + ")");
893                 // render output buffer from decoder
894                 if (!mGotDecoderEOS) {
895                     boolean eos = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
896                     if (eos) {
897                         mGotDecoderEOS = true;
898                     }
899                     // can release empty buffers now
900                     if (info.size == 0) {
901                         mDecoder.releaseOutputBuffer(ix, false /* render */);
902                         ix = -1; // fake index used by render to not render
903                     }
904                     if (eos || info.size > 0) {
905                         synchronized(mCondition) {
906                             if (mInfoOnSurface != null || mBuffersToRender.size() > 0) {
907                                 if (DEBUG) Log.v(TAG, "postponing render, surface busy");
908                                 mBuffersToRender.addLast(Pair.create(ix, info));
909                             } else {
910                                 renderDecodedBuffer(ix, info);
911                             }
912                         }
913                     }
914                 }
915             } else if (mediaCodec == mEncoder) {
916                 emptyEncoderOutputBuffer(ix, info);
917                 synchronized(mCondition) {
918                     if (!mCompleted) {
919                         mEncoderIsActive = true;
920                         mCondition.notifyAll();
921                     }
922                 }
923             } else {
924                 fail("received output buffer on " + mediaCodec.getName());
925             }
926         }
927 
renderDecodedBuffer(int ix, BufferInfo info)928         private void renderDecodedBuffer(int ix, BufferInfo info) {
929             boolean eos = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
930             mInfoOnSurface = info;
931             if (info.size > 0) {
932                 Log.d(TAG, "rendering frame #" + ix + " at " + info.presentationTimeUs * 1000
933                         + (eos ? " with EOS" : ""));
934                 mDecoder.releaseOutputBuffer(ix, info.presentationTimeUs * 1000);
935             }
936 
937             if (eos && info.size == 0) {
938                 if (DEBUG) Log.v(TAG, "decoder output EOS available");
939                 mFrameAvailable = true;
940                 mCondition.notifyAll();
941             }
942         }
943 
944         @Override
onError(MediaCodec mediaCodec, MediaCodec.CodecException e)945         public void onError(MediaCodec mediaCodec, MediaCodec.CodecException e) {
946             String codecName = null;
947             try {
948                 codecName = mediaCodec.getName();
949             } catch (Exception ex) {
950                 codecName = "(error getting codec name)";
951             }
952             errorMsg.set("received error on " + codecName + ": " + e);
953         }
954 
955         @Override
onOutputFormatChanged(MediaCodec mediaCodec, MediaFormat mediaFormat)956         public void onOutputFormatChanged(MediaCodec mediaCodec, MediaFormat mediaFormat) {
957             Log.i(TAG, mediaCodec.getName() + " got new output format " + mediaFormat);
958             if (mediaCodec == mEncoder) {
959                 mEncodeOutputFormatUpdated = true;
960                 saveEncoderFormat(mediaFormat);
961             }
962         }
963     }
964 
965     static class EncoderSize {
966         private final boolean DEBUG = false;
967         private static final String TAG = "EncoderSize";
968         final private String mName;
969         final private String mMime;
970         final private CodecCapabilities mCaps;
971         final private VideoCapabilities mVideoCaps;
972 
973         final public Map<Size, Set<Size>> mMinMax;     // extreme sizes
974         final public Map<Size, Set<Size>> mNearMinMax; // sizes near extreme
975         final public Set<Size> mArbitraryW;            // arbitrary widths in the middle
976         final public Set<Size> mArbitraryH;            // arbitrary heights in the middle
977         final public Set<Size> mSizes;                 // all non-specifically tested sizes
978 
979         final private int xAlign;
980         final private int yAlign;
981 
EncoderSize(String name, String mime, CodecCapabilities caps)982         EncoderSize(String name, String mime, CodecCapabilities caps) {
983             mName = name;
984             mMime = mime;
985             mCaps = caps;
986             mVideoCaps = caps.getVideoCapabilities();
987 
988             /* calculate min/max sizes */
989             mMinMax = new HashMap<Size, Set<Size>>();
990             mNearMinMax = new HashMap<Size, Set<Size>>();
991             mArbitraryW = new HashSet<Size>();
992             mArbitraryH = new HashSet<Size>();
993             mSizes = new HashSet<Size>();
994 
995             xAlign = mVideoCaps.getWidthAlignment();
996             yAlign = mVideoCaps.getHeightAlignment();
997 
998             initializeSizes();
999         }
1000 
initializeSizes()1001         private void initializeSizes() {
1002             for (int x = 0; x < 2; ++x) {
1003                 for (int y = 0; y < 2; ++y) {
1004                     addExtremeSizesFor(x, y);
1005                 }
1006             }
1007 
1008             // initialize arbitrary sizes
1009             for (int i = 1; i <= 7; ++i) {
1010                 int j = ((7 * i) % 11) + 1;
1011                 int width, height;
1012                 try {
1013                     width = alignedPointInRange(i * 0.125, xAlign, mVideoCaps.getSupportedWidths());
1014                     height = alignedPointInRange(j * 0.077, yAlign,
1015                             mVideoCaps.getSupportedHeightsFor(width));
1016                     mArbitraryW.add(new Size(width, height));
1017                 } catch (IllegalArgumentException e) {
1018                 }
1019 
1020                 try {
1021                     height = alignedPointInRange(i * 0.125, yAlign,
1022                             mVideoCaps.getSupportedHeights());
1023                     width = alignedPointInRange(j * 0.077, xAlign,
1024                             mVideoCaps.getSupportedWidthsFor(height));
1025                     mArbitraryH.add(new Size(width, height));
1026                 } catch (IllegalArgumentException e) {
1027                 }
1028             }
1029             mArbitraryW.removeAll(mArbitraryH);
1030             mArbitraryW.removeAll(mSizes);
1031             mSizes.addAll(mArbitraryW);
1032             mArbitraryH.removeAll(mSizes);
1033             mSizes.addAll(mArbitraryH);
1034             if (DEBUG) Log.i(TAG, "arbitrary=" + mArbitraryW + "/" + mArbitraryH);
1035         }
1036 
addExtremeSizesFor(int x, int y)1037         private void addExtremeSizesFor(int x, int y) {
1038             Set<Size> minMax = new HashSet<Size>();
1039             Set<Size> nearMinMax = new HashSet<Size>();
1040 
1041             for (int dx = 0; dx <= xAlign; dx += xAlign) {
1042                 for (int dy = 0; dy <= yAlign; dy += yAlign) {
1043                     Set<Size> bucket = (dx + dy == 0) ? minMax : nearMinMax;
1044                     try {
1045                         int width = getExtreme(mVideoCaps.getSupportedWidths(), x, dx);
1046                         int height = getExtreme(mVideoCaps.getSupportedHeightsFor(width), y, dy);
1047                         bucket.add(new Size(width, height));
1048 
1049                         // try max max with more reasonable ratio if too skewed
1050                         if (x + y == 2 && width >= 4 * height) {
1051                             Size wideScreen = getLargestSizeForRatio(16, 9);
1052                             width = getExtreme(
1053                                     mVideoCaps.getSupportedWidths()
1054                                             .intersect(0, wideScreen.getWidth()), x, dx);
1055                             height = getExtreme(mVideoCaps.getSupportedHeightsFor(width), y, 0);
1056                             bucket.add(new Size(width, height));
1057                         }
1058                     } catch (IllegalArgumentException e) {
1059                     }
1060 
1061                     try {
1062                         int height = getExtreme(mVideoCaps.getSupportedHeights(), y, dy);
1063                         int width = getExtreme(mVideoCaps.getSupportedWidthsFor(height), x, dx);
1064                         bucket.add(new Size(width, height));
1065 
1066                         // try max max with more reasonable ratio if too skewed
1067                         if (x + y == 2 && height >= 4 * width) {
1068                             Size wideScreen = getLargestSizeForRatio(9, 16);
1069                             height = getExtreme(
1070                                     mVideoCaps.getSupportedHeights()
1071                                             .intersect(0, wideScreen.getHeight()), y, dy);
1072                             width = getExtreme(mVideoCaps.getSupportedWidthsFor(height), x, dx);
1073                             bucket.add(new Size(width, height));
1074                         }
1075                     } catch (IllegalArgumentException e) {
1076                     }
1077                 }
1078             }
1079 
1080             // keep unique sizes
1081             minMax.removeAll(mSizes);
1082             mSizes.addAll(minMax);
1083             nearMinMax.removeAll(mSizes);
1084             mSizes.addAll(nearMinMax);
1085 
1086             mMinMax.put(new Size(x, y), minMax);
1087             mNearMinMax.put(new Size(x, y), nearMinMax);
1088             if (DEBUG) Log.i(TAG, x + "x" + y + ": minMax=" + mMinMax + ", near=" + mNearMinMax);
1089         }
1090 
alignInRange(double value, int align, Range<Integer> range)1091         private int alignInRange(double value, int align, Range<Integer> range) {
1092             return range.clamp(align * (int)Math.round(value / align));
1093         }
1094 
1095         /* point should be between 0. and 1. */
alignedPointInRange(double point, int align, Range<Integer> range)1096         private int alignedPointInRange(double point, int align, Range<Integer> range) {
1097             return alignInRange(
1098                     range.getLower() + point * (range.getUpper() - range.getLower()), align, range);
1099         }
1100 
getExtreme(Range<Integer> range, int i, int delta)1101         private int getExtreme(Range<Integer> range, int i, int delta) {
1102             int dim = i == 1 ? range.getUpper() - delta : range.getLower() + delta;
1103             if (delta == 0
1104                     || (dim > range.getLower() && dim < range.getUpper())) {
1105                 return dim;
1106             }
1107             throw new IllegalArgumentException();
1108         }
1109 
getLargestSizeForRatio(int x, int y)1110         private Size getLargestSizeForRatio(int x, int y) {
1111             Range<Integer> widthRange = mVideoCaps.getSupportedWidths();
1112             Range<Integer> heightRange = mVideoCaps.getSupportedHeightsFor(widthRange.getUpper());
1113             final int xAlign = mVideoCaps.getWidthAlignment();
1114             final int yAlign = mVideoCaps.getHeightAlignment();
1115 
1116             // scale by alignment
1117             int width = alignInRange(
1118                     Math.sqrt(widthRange.getUpper() * heightRange.getUpper() * (double)x / y),
1119                     xAlign, widthRange);
1120             int height = alignInRange(
1121                     width * (double)y / x, yAlign, mVideoCaps.getSupportedHeightsFor(width));
1122             return new Size(width, height);
1123         }
1124     }
1125 
1126     class Encoder {
1127         final private String mName;
1128         final private String mMime;
1129         final private CodecCapabilities mCaps;
1130         final private VideoCapabilities mVideoCaps;
1131 
1132 
Encoder(String name, String mime, CodecCapabilities caps)1133         Encoder(String name, String mime, CodecCapabilities caps) {
1134             mName = name;
1135             mMime = mime;
1136             mCaps = caps;
1137             mVideoCaps = caps.getVideoCapabilities();
1138         }
1139 
testSpecific(int width, int height, boolean flexYUV)1140         public boolean testSpecific(int width, int height, boolean flexYUV) {
1141             return test(width, height, true /* optional */, flexYUV);
1142         }
1143 
testIntraRefresh(int width, int height)1144         public boolean testIntraRefresh(int width, int height) {
1145             if (!mCaps.isFeatureSupported(CodecCapabilities.FEATURE_IntraRefresh)) {
1146                 return false;
1147             }
1148 
1149             final int refreshPeriod[] = new int[] {10, 13, 17, 22, 29, 38, 50, 60};
1150 
1151             // Test the support of refresh periods in the range of 10 - 60 frames
1152             for (int period : refreshPeriod) {
1153                 Function<MediaFormat, Boolean> updateConfigFormatHook =
1154                 new Function<MediaFormat, Boolean>() {
1155                     public Boolean apply(MediaFormat fmt) {
1156                         // set i-frame-interval to 10000 so encoded video only has 1 i-frame.
1157                         fmt.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10000);
1158                         fmt.setInteger(MediaFormat.KEY_INTRA_REFRESH_PERIOD, period);
1159                         return true;
1160                     }
1161                 };
1162 
1163                 Function<MediaFormat, Boolean> checkOutputFormatHook =
1164                 new Function<MediaFormat, Boolean>() {
1165                     public Boolean apply(MediaFormat fmt) {
1166                         int intraPeriod = fmt.getInteger(MediaFormat.KEY_INTRA_REFRESH_PERIOD);
1167                         // Make sure intra period is correct and carried in the output format.
1168                         // intraPeriod must be larger than 0 and operate within 20% of refresh
1169                         // period.
1170                         if (intraPeriod > 1.2 * period || intraPeriod < 0.8 * period) {
1171                             throw new RuntimeException("Intra period mismatch");
1172                         }
1173                         return true;
1174                     }
1175                 };
1176 
1177                 String testName =
1178                 mName + '_' + width + "x" + height + '_' + "flexYUV_intraRefresh";
1179 
1180                 Consumer<VideoProcessorBase> configureVideoProcessor =
1181                 new Consumer<VideoProcessorBase>() {
1182                     public void accept(VideoProcessorBase processor) {
1183                         processor.setProcessorName(testName);
1184                         processor.setUpdateConfigHook(updateConfigFormatHook);
1185                         processor.setCheckOutputFormatHook(checkOutputFormatHook);
1186                     }
1187                 };
1188 
1189                 if (!test(width, height, 0 /* frameRate */, 0 /* bitRate */, true /* optional */,
1190                     true /* flex */, configureVideoProcessor)) {
1191                     return false;
1192                 }
1193             }
1194 
1195             return true;
1196         }
1197 
testDetailed( int width, int height, int frameRate, int bitRate, boolean flexYUV)1198         public boolean testDetailed(
1199                 int width, int height, int frameRate, int bitRate, boolean flexYUV) {
1200             String testName =
1201                     mName + '_' + width + "x" + height + '_' + (flexYUV ? "flexYUV" : " surface");
1202             Consumer<VideoProcessorBase> configureVideoProcessor =
1203                     new Consumer<VideoProcessorBase>() {
1204                 public void accept(VideoProcessorBase processor) {
1205                     processor.setProcessorName(testName);
1206                 }
1207             };
1208             return test(width, height, frameRate, bitRate, true /* optional */, flexYUV,
1209                     configureVideoProcessor);
1210         }
1211 
testSupport(int width, int height, int frameRate, int bitRate)1212         public boolean testSupport(int width, int height, int frameRate, int bitRate) {
1213             return mVideoCaps.areSizeAndRateSupported(width, height, frameRate) &&
1214                     mVideoCaps.getBitrateRange().contains(bitRate);
1215         }
1216 
test( int width, int height, boolean optional, boolean flexYUV)1217         private boolean test(
1218                 int width, int height, boolean optional, boolean flexYUV) {
1219             String testName =
1220                     mName + '_' + width + "x" + height + '_' + (flexYUV ? "flexYUV" : " surface");
1221             Consumer<VideoProcessorBase> configureVideoProcessor =
1222                     new Consumer<VideoProcessorBase>() {
1223                 public void accept(VideoProcessorBase processor) {
1224                     processor.setProcessorName(testName);
1225                 }
1226             };
1227             return test(width, height, 0 /* frameRate */, 0 /* bitRate */,
1228                     optional, flexYUV, configureVideoProcessor);
1229         }
1230 
test( int width, int height, int frameRate, int bitRate, boolean optional, boolean flexYUV, Consumer<VideoProcessorBase> configureVideoProcessor)1231         private boolean test(
1232                 int width, int height, int frameRate, int bitRate, boolean optional,
1233                 boolean flexYUV, Consumer<VideoProcessorBase> configureVideoProcessor) {
1234             Log.i(TAG, "testing " + mMime + " on " + mName + " for " + width + "x" + height
1235                     + (flexYUV ? " flexYUV" : " surface"));
1236 
1237             Preconditions.assertTestFileExists(SOURCE_URL);
1238 
1239             VideoProcessorBase processor =
1240                 flexYUV ? new VideoProcessor() : new SurfaceVideoProcessor();
1241 
1242             processor.setFrameAndBitRates(frameRate, bitRate);
1243             configureVideoProcessor.accept(processor);
1244 
1245             // We are using a resource URL as an example
1246             boolean success = processor.processLoop(
1247                     SOURCE_URL, mMime, mName, width, height, optional);
1248             if (success) {
1249                 success = processor.playBack(getActivity().getSurfaceHolder().getSurface());
1250             }
1251             return success;
1252         }
1253     }
getCodecCapabities(String encoderName, String mime, boolean isEncoder)1254     private static CodecCapabilities getCodecCapabities(String encoderName, String mime,
1255                                                         boolean isEncoder) {
1256         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
1257         for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
1258             if (isEncoder != codecInfo.isEncoder()) {
1259                 continue;
1260             }
1261             if (encoderName.equals(codecInfo.getName())) {
1262                 return codecInfo.getCapabilitiesForType(mime);
1263             }
1264         }
1265         return null;
1266     }
getEncHandle(String encodername, String mime)1267     private Encoder getEncHandle(String encodername, String mime) {
1268         CodecCapabilities caps = getCodecCapabities(encodername, mime, true);
1269         assertNotNull(caps);
1270         Encoder encoder = new Encoder(encodername, mime, caps);
1271         return encoder;
1272     }
1273 
1274     @Parameterized.Parameters(name = "{index}_{0}_{1}_{2}x{3}_{4}_{5}")
input()1275     public static Collection<Object[]> input() {
1276         final String[] mediaTypesList = new String[] {
1277                 MediaFormat.MIMETYPE_VIDEO_AVC,
1278                 MediaFormat.MIMETYPE_VIDEO_H263,
1279                 MediaFormat.MIMETYPE_VIDEO_HEVC,
1280                 MediaFormat.MIMETYPE_VIDEO_MPEG4,
1281                 MediaFormat.MIMETYPE_VIDEO_VP8,
1282                 MediaFormat.MIMETYPE_VIDEO_VP9,
1283                 MediaFormat.MIMETYPE_VIDEO_AV1,
1284         };
1285         final List<Object[]> argsList = new ArrayList<>();
1286         for (String mediaType : mediaTypesList) {
1287             if (TestArgs.shouldSkipMediaType(mediaType)) {
1288                 continue;
1289             }
1290             String[] encoders = MediaUtils.getEncoderNamesForMime(mediaType);
1291             for (String encoder : encoders) {
1292                 if (TestArgs.shouldSkipCodec(encoder)) {
1293                     continue;
1294                 }
1295                 if (!TestUtils.isTestableCodecInCurrentMode(encoder)) {
1296                     Log.d(TAG, "Skipping tests for codec: " + encoder);
1297                     continue;
1298                 }
1299                 CodecCapabilities caps = getCodecCapabities(encoder, mediaType, true);
1300                 assertNotNull(caps);
1301                 EncoderSize encoderSize = new EncoderSize(encoder, mediaType, caps);
1302                 final Set<Size> sizes = new HashSet<Size>();
1303                 for (boolean near : new boolean[] {false, true}) {
1304                     Map<Size, Set<Size>> testSizes =
1305                             near ? encoderSize.mNearMinMax : encoderSize.mMinMax;
1306                     for (int x = 0; x < 2; x++) {
1307                         for (int y = 0; y < 2; y++) {
1308                             for (Size s : testSizes.get(new Size(x, y))) {
1309                                 sizes.add(new Size(s.getWidth(), s.getHeight()));
1310                             }
1311                         }
1312                     }
1313                 }
1314                 for (boolean widths : new boolean[] {false, true}) {
1315                     for (Size s : (widths ? encoderSize.mArbitraryW : encoderSize.mArbitraryH)) {
1316                         sizes.add(new Size(s.getWidth(), s.getHeight()));
1317                     }
1318                 }
1319                 final Set<Size> specificSizes = new HashSet<Size>();
1320                 specificSizes.add(new Size(176, 144));
1321                 specificSizes.add(new Size(320, 180));
1322                 specificSizes.add(new Size(320, 240));
1323                 specificSizes.add(new Size(720, 480));
1324                 specificSizes.add(new Size(1280, 720));
1325                 specificSizes.add(new Size(1920, 1080));
1326 
1327                 for (boolean flexYuv : new boolean[] {false, true}) {
1328                     for (Size s : specificSizes) {
1329                         argsList.add(new Object[]{encoder, mediaType, s.getWidth(), s.getHeight(),
1330                                 flexYuv, TestMode.TEST_MODE_DETAILED});
1331                     }
1332                 }
1333 
1334                 argsList.add(new Object[]{encoder, mediaType, 480, 360, true,
1335                         TestMode.TEST_MODE_INTRAREFRESH});
1336                 sizes.removeAll(specificSizes);
1337                 specificSizes.addAll(sizes);
1338                 for (boolean flexYuv : new boolean[] {false, true}) {
1339                     for (Size s : specificSizes) {
1340                         argsList.add(new Object[]{encoder, mediaType, s.getWidth(), s.getHeight(),
1341                                 flexYuv, TestMode.TEST_MODE_SPECIFIC});
1342                     }
1343                 }
1344             }
1345         }
1346         return argsList;
1347     }
1348 
VideoEncoderTest(String encoderName, String mime, int width, int height, boolean flexYuv, TestMode mode)1349     public VideoEncoderTest(String encoderName, String mime, int width, int height, boolean flexYuv,
1350                             TestMode mode) {
1351         mEncHandle = getEncHandle(encoderName, mime);
1352         mWidth = width;
1353         mHeight = height;
1354         mFlexYuv = flexYuv;
1355         mMode = mode;
1356     }
1357 
1358     @CddTest(requirements = {"5.1.7/C-3-1"})
1359     @ApiTest(apis = {"MediaCodecInfo.CodecCapabilities#FEATURE_IntraRefresh",
1360             "android.media.MediaFormat#KEY_WIDTH",
1361             "android.media.MediaFormat#KEY_HEIGHT",
1362             "android.media.MediaFormat#KEY_FRAME_RATE",
1363             "android.media.MediaFormat#KEY_BIT_RATE",
1364             "android.media.MediaFormat#KEY_I_FRAME_INTERVAL",
1365             "android.media.MediaFormat#KEY_INTRA_REFRESH_PERIOD",
1366             "android.media.MediaFormat#KEY_MAX_INPUT_SIZE",
1367             "MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420Flexible",
1368             "MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface"})
1369     @Test
testEncode()1370     public void testEncode() {
1371         int frameRate = 30;
1372         int bitRate;
1373         int lumaSamples = mWidth * mHeight;
1374         if (lumaSamples <= 320 * 240) {
1375             bitRate = 384 * 1000;
1376         } else if (lumaSamples <= 720 * 480) {
1377             bitRate = 2 * 1000000;
1378         } else if (lumaSamples <= 1280 * 720) {
1379             bitRate = 4 * 1000000;
1380         } else {
1381             bitRate = 10 * 1000000;
1382         }
1383         switch (mMode) {
1384             case TEST_MODE_SPECIFIC:
1385                 specific(new Encoder[]{mEncHandle}, mWidth, mHeight, mFlexYuv);
1386                 break;
1387             case TEST_MODE_DETAILED:
1388                 detailed(new Encoder[]{mEncHandle}, mWidth, mHeight, frameRate, bitRate, mFlexYuv);
1389                 break;
1390             case TEST_MODE_INTRAREFRESH:
1391                 intraRefresh(new Encoder[]{mEncHandle}, mWidth, mHeight);
1392                 break;
1393         }
1394     }
1395 
1396     /* test specific size */
specific(Encoder[] encoders, int width, int height, boolean flexYUV)1397     private void specific(Encoder[] encoders, int width, int height, boolean flexYUV) {
1398         boolean skipped = true;
1399         if (encoders.length == 0) {
1400             MediaUtils.skipTest("no such encoder present");
1401             return;
1402         }
1403         for (Encoder encoder : encoders) {
1404             if (encoder.testSpecific(width, height, flexYUV)) {
1405                 skipped = false;
1406             }
1407         }
1408         if (skipped) {
1409             MediaUtils.skipTest("duplicate or unsupported resolution");
1410         }
1411     }
1412 
1413     /* test intra refresh with flexYUV */
intraRefresh(Encoder[] encoders, int width, int height)1414     private void intraRefresh(Encoder[] encoders, int width, int height) {
1415         boolean skipped = true;
1416         if (encoders.length == 0) {
1417             MediaUtils.skipTest("no such encoder present");
1418             return;
1419         }
1420         for (Encoder encoder : encoders) {
1421             if (encoder.testIntraRefresh(width, height)) {
1422                 skipped = false;
1423             }
1424         }
1425         if (skipped) {
1426             MediaUtils.skipTest("intra-refresh unsupported");
1427         }
1428     }
1429 
1430     /* test size, frame rate and bit rate */
detailed( Encoder[] encoders, int width, int height, int frameRate, int bitRate, boolean flexYUV)1431     private void detailed(
1432             Encoder[] encoders, int width, int height, int frameRate, int bitRate,
1433             boolean flexYUV) {
1434         Assume.assumeTrue("Test is currently enabled only for avc and vp8 encoders",
1435                 mEncHandle.mMime.equals(MediaFormat.MIMETYPE_VIDEO_AVC) ||
1436                         mEncHandle.mMime.equals(MediaFormat.MIMETYPE_VIDEO_VP8));
1437         if (encoders.length == 0) {
1438             MediaUtils.skipTest("no such encoder present");
1439             return;
1440         }
1441         boolean skipped = true;
1442         for (Encoder encoder : encoders) {
1443             if (encoder.testSupport(width, height, frameRate, bitRate)) {
1444                 skipped = false;
1445                 encoder.testDetailed(width, height, frameRate, bitRate, flexYUV);
1446             }
1447         }
1448         if (skipped) {
1449             MediaUtils.skipTest("unsupported resolution and rate");
1450         }
1451     }
1452 
1453 }
1454