1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS;
20 import static android.hardware.camera2.cts.CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS;
21 import static android.hardware.camera2.cts.CameraTestUtils.SESSION_READY_TIMEOUT_MS;
22 import static android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
23 import static android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
24 import static android.hardware.camera2.cts.CameraTestUtils.dumpFile;
25 import static android.hardware.camera2.cts.CameraTestUtils.getUnavailablePhysicalCameras;
26 import static android.hardware.camera2.cts.CameraTestUtils.getValueNotNull;
27 
28 import static com.google.common.truth.Truth.assertWithMessage;
29 
30 import static junit.framework.Assert.assertEquals;
31 import static junit.framework.Assert.assertFalse;
32 import static junit.framework.Assert.assertNotNull;
33 import static junit.framework.Assert.assertTrue;
34 import static junit.framework.Assert.fail;
35 
36 import android.graphics.Bitmap;
37 import android.graphics.BitmapFactory;
38 import android.graphics.BitmapRegionDecoder;
39 import android.graphics.Canvas;
40 import android.graphics.Color;
41 import android.graphics.ColorSpace;
42 import android.graphics.ImageFormat;
43 import android.graphics.Matrix;
44 import android.graphics.PixelFormat;
45 import android.graphics.Rect;
46 import android.graphics.RectF;
47 import android.hardware.DataSpace;
48 import android.hardware.HardwareBuffer;
49 import android.hardware.camera2.CameraCharacteristics;
50 import android.hardware.camera2.CameraDevice;
51 import android.hardware.camera2.CameraManager;
52 import android.hardware.camera2.CameraMetadata;
53 import android.hardware.camera2.CaptureRequest;
54 import android.hardware.camera2.CaptureResult;
55 import android.hardware.camera2.cts.CameraTestUtils.ImageDropperListener;
56 import android.hardware.camera2.cts.helpers.StaticMetadata;
57 import android.hardware.camera2.cts.rs.BitmapUtils;
58 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
59 import android.hardware.camera2.params.DynamicRangeProfiles;
60 import android.hardware.camera2.params.OutputConfiguration;
61 import android.hardware.camera2.params.StreamConfigurationMap;
62 import android.media.Image;
63 import android.media.Image.Plane;
64 import android.media.ImageReader;
65 import android.media.ImageWriter;
66 import android.os.Build;
67 import android.os.ConditionVariable;
68 import android.os.SystemClock;
69 import android.os.SystemProperties;
70 import android.util.Log;
71 import android.util.Pair;
72 import android.util.Size;
73 import android.view.Surface;
74 
75 import com.android.compatibility.common.util.PropertyUtil;
76 import com.android.ex.camera2.blocking.BlockingSessionCallback;
77 
78 import org.junit.Test;
79 import org.junit.runner.RunWith;
80 import org.junit.runners.Parameterized;
81 
82 import java.nio.ByteBuffer;
83 import java.util.ArrayList;
84 import java.util.Arrays;
85 import java.util.List;
86 import java.util.Set;
87 import java.util.concurrent.TimeUnit;
88 
89 /**
90  * <p>Basic test for ImageReader APIs. It uses CameraDevice as producer, camera
91  * sends the data to the surface provided by imageReader. Below image formats
92  * are tested:</p>
93  *
94  * <p>YUV_420_888: flexible YUV420, it is mandatory format for camera. </p>
95  * <p>JPEG: used for JPEG still capture, also mandatory format. </p>
96  * <p>Some invalid access test. </p>
97  * <p>TODO: Add more format tests? </p>
98  */
99 @RunWith(Parameterized.class)
100 public class ImageReaderTest extends Camera2AndroidTestCase {
101     private static final String TAG = "ImageReaderTest";
102     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
103     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
104 
105     // Number of frame (for streaming requests) to be verified.
106     private static final int NUM_FRAME_VERIFIED = 2;
107     // Number of frame (for streaming requests) to be verified with log processing time.
108     private static final int NUM_LONG_PROCESS_TIME_FRAME_VERIFIED = 10;
109     // The time to hold each image for to simulate long processing time.
110     private static final int LONG_PROCESS_TIME_MS = 300;
111     // Max number of images can be accessed simultaneously from ImageReader.
112     private static final int MAX_NUM_IMAGES = 5;
113     // Max difference allowed between YUV and JPEG patches. This tolerance is intentionally very
114     // generous to avoid false positives due to punch/saturation operations vendors apply to the
115     // JPEG outputs.
116     private static final double IMAGE_DIFFERENCE_TOLERANCE = 40;
117     // Legacy level devices needs even larger tolerance because jpeg and yuv are not captured
118     // from the same frame in legacy mode.
119     private static final double IMAGE_DIFFERENCE_TOLERANCE_LEGACY = 60;
120 
121     private SimpleImageListener mListener;
122 
123     @Override
setUp()124     public void setUp() throws Exception {
125         super.setUp();
126     }
127 
128     @Override
tearDown()129     public void tearDown() throws Exception {
130         super.tearDown();
131     }
132 
133     @Test
testFlexibleYuv()134     public void testFlexibleYuv() throws Exception {
135         for (String id : getCameraIdsUnderTest()) {
136             try {
137                 Log.i(TAG, "Testing Camera " + id);
138                 openDevice(id);
139                 BufferFormatTestParam params = new BufferFormatTestParam(
140                         ImageFormat.YUV_420_888, /*repeating*/true);
141                 bufferFormatTestByCamera(params);
142             } finally {
143                 closeDevice(id);
144             }
145         }
146     }
147 
148     @Test
testDepth16()149     public void testDepth16() throws Exception {
150         for (String id : getCameraIdsUnderTest()) {
151             try {
152                 Log.i(TAG, "Testing Camera " + id);
153                 openDevice(id);
154                 BufferFormatTestParam params = new BufferFormatTestParam(
155                         ImageFormat.DEPTH16, /*repeating*/true);
156                 bufferFormatTestByCamera(params);
157             } finally {
158                 closeDevice(id);
159             }
160         }
161     }
162 
163     @Test
testDepthPointCloud()164     public void testDepthPointCloud() throws Exception {
165         for (String id : getCameraIdsUnderTest()) {
166             try {
167                 Log.i(TAG, "Testing Camera " + id);
168                 openDevice(id);
169                 BufferFormatTestParam params = new BufferFormatTestParam(
170                         ImageFormat.DEPTH_POINT_CLOUD, /*repeating*/true);
171                 bufferFormatTestByCamera(params);
172             } finally {
173                 closeDevice(id);
174             }
175         }
176     }
177 
178     @Test
testDynamicDepth()179     public void testDynamicDepth() throws Exception {
180         for (String id : getCameraIdsUnderTest()) {
181             try {
182                 openDevice(id);
183                 BufferFormatTestParam params = new BufferFormatTestParam(
184                         ImageFormat.DEPTH_JPEG, /*repeating*/true);
185                 params.mCheckSession = true;
186                 bufferFormatTestByCamera(params);
187             } finally {
188                 closeDevice(id);
189             }
190         }
191     }
192 
193     @Test
testY8()194     public void testY8() throws Exception {
195         for (String id : getCameraIdsUnderTest()) {
196             try {
197                 Log.i(TAG, "Testing Camera " + id);
198                 openDevice(id);
199                 BufferFormatTestParam params = new BufferFormatTestParam(
200                         ImageFormat.Y8, /*repeating*/true);
201                 bufferFormatTestByCamera(params);
202             } finally {
203                 closeDevice(id);
204             }
205         }
206     }
207 
208     @Test
testJpeg()209     public void testJpeg() throws Exception {
210         for (String id : getCameraIdsUnderTest()) {
211             try {
212                 Log.v(TAG, "Testing jpeg capture for Camera " + id);
213                 openDevice(id);
214                 BufferFormatTestParam params = new BufferFormatTestParam(
215                         ImageFormat.JPEG, /*repeating*/false);
216                 bufferFormatTestByCamera(params);
217             } finally {
218                 closeDevice(id);
219             }
220         }
221     }
222 
223     @Test
testRaw()224     public void testRaw() throws Exception {
225         for (String id : getCameraIdsUnderTest()) {
226             try {
227                 Log.v(TAG, "Testing raw capture for camera " + id);
228                 openDevice(id);
229                 BufferFormatTestParam params = new BufferFormatTestParam(
230                         ImageFormat.RAW_SENSOR, /*repeating*/false);
231                 bufferFormatTestByCamera(params);
232             } finally {
233                 closeDevice(id);
234             }
235         }
236     }
237 
238     @Test
testRawPrivate()239     public void testRawPrivate() throws Exception {
240         for (String id : getCameraIdsUnderTest()) {
241             try {
242                 Log.v(TAG, "Testing raw capture for camera " + id);
243                 openDevice(id);
244                 BufferFormatTestParam params = new BufferFormatTestParam(
245                         ImageFormat.RAW_PRIVATE, /*repeating*/false);
246                 bufferFormatTestByCamera(params);
247             } finally {
248                 closeDevice(id);
249             }
250         }
251     }
252 
253     @Test
testP010()254     public void testP010() throws Exception {
255         for (String id : getCameraIdsUnderTest()) {
256             try {
257                 Log.v(TAG, "Testing YUV P010 capture for Camera " + id);
258                 openDevice(id);
259                 if (!mStaticInfo.isCapabilitySupported(CameraCharacteristics.
260                             REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
261                     continue;
262                 }
263                 Set<Long> availableProfiles =
264                     mStaticInfo.getAvailableDynamicRangeProfilesChecked();
265                 assertFalse("Absent dynamic range profiles", availableProfiles.isEmpty());
266                 assertTrue("HLG10 not present in the available dynamic range profiles",
267                         availableProfiles.contains(DynamicRangeProfiles.HLG10));
268 
269                 BufferFormatTestParam params = new BufferFormatTestParam(
270                         ImageFormat.YCBCR_P010, /*repeating*/false);
271                 params.mDynamicRangeProfile = DynamicRangeProfiles.HLG10;
272                 bufferFormatTestByCamera(params);
273             } finally {
274                 closeDevice(id);
275             }
276         }
277     }
278 
279     @Test
testDisplayP3Yuv()280     public void testDisplayP3Yuv() throws Exception {
281         for (String id : getCameraIdsUnderTest()) {
282             try {
283                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
284                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
285                     continue;
286                 }
287                 Set<ColorSpace.Named> availableColorSpaces =
288                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
289                                 ImageFormat.YUV_420_888);
290 
291                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
292                     continue;
293                 }
294 
295                 openDevice(id);
296                 Log.v(TAG, "Testing Display P3 Yuv capture for Camera " + id);
297                 BufferFormatTestParam params = new BufferFormatTestParam(
298                         ImageFormat.YUV_420_888, /*repeating*/false);
299                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
300                 params.mUseColorSpace = true;
301                 bufferFormatTestByCamera(params);
302             } finally {
303                 closeDevice(id);
304             }
305         }
306     }
307 
308     @Test
testDisplayP3YuvRepeating()309     public void testDisplayP3YuvRepeating() throws Exception {
310         for (String id : getCameraIdsUnderTest()) {
311             try {
312                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
313                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
314                     continue;
315                 }
316                 Set<ColorSpace.Named> availableColorSpaces =
317                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
318                                 ImageFormat.YUV_420_888);
319 
320                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
321                     continue;
322                 }
323 
324                 openDevice(id);
325                 Log.v(TAG, "Testing repeating Display P3 Yuv capture for Camera " + id);
326                 BufferFormatTestParam params = new BufferFormatTestParam(
327                         ImageFormat.YUV_420_888, /*repeating*/true);
328                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
329                 params.mUseColorSpace = true;
330                 bufferFormatTestByCamera(params);
331             } finally {
332                 closeDevice(id);
333             }
334         }
335     }
336 
337     @Test
testDisplayP3Heic()338     public void testDisplayP3Heic() throws Exception {
339         for (String id : getCameraIdsUnderTest()) {
340             try {
341                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
342                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
343                     continue;
344                 }
345                 Set<ColorSpace.Named> availableColorSpaces =
346                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.HEIC);
347 
348                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
349                     continue;
350                 }
351 
352                 openDevice(id);
353                 Log.v(TAG, "Testing Display P3 HEIC capture for Camera " + id);
354                 BufferFormatTestParam params = new BufferFormatTestParam(
355                         ImageFormat.HEIC, /*repeating*/false);
356                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
357                 params.mUseColorSpace = true;
358                 bufferFormatTestByCamera(params);
359             } finally {
360                 closeDevice(id);
361             }
362         }
363     }
364 
365     @Test
testDisplayP3HeicRepeating()366     public void testDisplayP3HeicRepeating() throws Exception {
367         for (String id : getCameraIdsUnderTest()) {
368             try {
369                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
370                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
371                     continue;
372                 }
373                 Set<ColorSpace.Named> availableColorSpaces =
374                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.HEIC);
375 
376                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
377                     continue;
378                 }
379 
380                 openDevice(id);
381                 Log.v(TAG, "Testing repeating Display P3 HEIC capture for Camera " + id);
382                 BufferFormatTestParam params = new BufferFormatTestParam(
383                         ImageFormat.HEIC, /*repeating*/true);
384                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
385                 params.mUseColorSpace = true;
386                 bufferFormatTestByCamera(params);
387             } finally {
388                 closeDevice(id);
389             }
390         }
391     }
392 
393     @Test
testDisplayP3Jpeg()394     public void testDisplayP3Jpeg() throws Exception {
395         for (String id : getCameraIdsUnderTest()) {
396             try {
397                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
398                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
399                     continue;
400                 }
401                 Set<ColorSpace.Named> availableColorSpaces =
402                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
403 
404                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
405                     continue;
406                 }
407 
408                 openDevice(id);
409                 Log.v(TAG, "Testing Display P3 JPEG capture for Camera " + id);
410                 BufferFormatTestParam params = new BufferFormatTestParam(
411                         ImageFormat.JPEG, /*repeating*/false);
412                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
413                 params.mUseColorSpace = true;
414                 bufferFormatTestByCamera(params);
415             } finally {
416                 closeDevice(id);
417             }
418         }
419     }
420 
421     @Test
testDisplayP3JpegRepeating()422     public void testDisplayP3JpegRepeating() throws Exception {
423         for (String id : getCameraIdsUnderTest()) {
424             try {
425                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
426                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
427                     continue;
428                 }
429                 Set<ColorSpace.Named> availableColorSpaces =
430                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
431 
432                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
433                     continue;
434                 }
435 
436                 openDevice(id);
437                 Log.v(TAG, "Testing repeating Display P3 JPEG capture for Camera " + id);
438                 BufferFormatTestParam params = new BufferFormatTestParam(
439                         ImageFormat.JPEG, /*repeating*/true);
440                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
441                 params.mUseColorSpace = true;
442                 bufferFormatTestByCamera(params);
443             } finally {
444                 closeDevice(id);
445             }
446         }
447     }
448 
449     @Test
testSRGBJpeg()450     public void testSRGBJpeg() throws Exception {
451         for (String id : getCameraIdsUnderTest()) {
452             try {
453                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
454                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
455                     continue;
456                 }
457                 Set<ColorSpace.Named> availableColorSpaces =
458                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
459 
460                 if (!availableColorSpaces.contains(ColorSpace.Named.SRGB)) {
461                     continue;
462                 }
463 
464                 openDevice(id);
465                 Log.v(TAG, "Testing sRGB JPEG capture for Camera " + id);
466                 BufferFormatTestParam params = new BufferFormatTestParam(
467                         ImageFormat.JPEG, /*repeating*/false);
468                 params.mColorSpace = ColorSpace.Named.SRGB;
469                 params.mUseColorSpace = true;
470                 bufferFormatTestByCamera(params);
471             } finally {
472                 closeDevice(id);
473             }
474         }
475     }
476 
477     @Test
testSRGBJpegRepeating()478     public void testSRGBJpegRepeating() throws Exception {
479         for (String id : getCameraIdsUnderTest()) {
480             try {
481                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
482                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
483                     continue;
484                 }
485                 Set<ColorSpace.Named> availableColorSpaces =
486                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
487 
488                 if (!availableColorSpaces.contains(ColorSpace.Named.SRGB)) {
489                     continue;
490                 }
491 
492                 openDevice(id);
493                 Log.v(TAG, "Testing repeating sRGB JPEG capture for Camera " + id);
494                 BufferFormatTestParam params = new BufferFormatTestParam(
495                         ImageFormat.JPEG, /*repeating*/true);
496                 params.mColorSpace = ColorSpace.Named.SRGB;
497                 params.mUseColorSpace = true;
498                 bufferFormatTestByCamera(params);
499             } finally {
500                 closeDevice(id);
501             }
502         }
503     }
504 
505     @Test
testJpegR()506     public void testJpegR() throws Exception {
507         for (String id : getCameraIdsUnderTest()) {
508             try {
509                 if (!mAllStaticInfo.get(id).isJpegRSupported()) {
510                     Log.i(TAG, "Camera " + id + " does not support Jpeg/R, skipping");
511                     continue;
512                 }
513                 Log.v(TAG, "Testing Jpeg/R capture for Camera " + id);
514 
515                 assertTrue(mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
516                         .REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT));
517 
518                 openDevice(id);
519                 BufferFormatTestParam params = new BufferFormatTestParam(
520                         ImageFormat.JPEG_R, /*repeating*/false);
521                 bufferFormatTestByCamera(params);
522             } finally {
523                 closeDevice(id);
524             }
525         }
526     }
527 
528     @Test
testJpegRDisplayP3()529     public void testJpegRDisplayP3() throws Exception {
530         for (String id : getCameraIdsUnderTest()) {
531             try {
532                 if (!mAllStaticInfo.get(id).isJpegRSupported()) {
533                     Log.i(TAG, "Camera " + id + " does not support Jpeg/R, skipping");
534                     continue;
535                 }
536 
537                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
538                         .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
539                     continue;
540                 }
541                 Set<ColorSpace.Named> availableColorSpaces =
542                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
543                                 ImageFormat.JPEG_R);
544 
545                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
546                     continue;
547                 }
548                 openDevice(id);
549                 Log.v(TAG, "Testing Display P3 Jpeg/R capture for Camera " + id);
550                 BufferFormatTestParam params = new BufferFormatTestParam(
551                         ImageFormat.JPEG_R, /*repeating*/false);
552                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
553                 params.mUseColorSpace = true;
554                 params.mDynamicRangeProfile = DynamicRangeProfiles.HLG10;
555                 bufferFormatTestByCamera(params);
556             } finally {
557                 closeDevice(id);
558             }
559         }
560     }
561 
562     @Test
testHeic()563     public void testHeic() throws Exception {
564         for (String id : getCameraIdsUnderTest()) {
565             try {
566                 Log.v(TAG, "Testing heic capture for Camera " + id);
567                 openDevice(id);
568                 BufferFormatTestParam params = new BufferFormatTestParam(
569                         ImageFormat.HEIC, /*repeating*/false);
570                 bufferFormatTestByCamera(params);
571             } finally {
572                 closeDevice(id);
573             }
574         }
575     }
576 
577     @Test
testRepeatingJpeg()578     public void testRepeatingJpeg() throws Exception {
579         for (String id : getCameraIdsUnderTest()) {
580             try {
581                 Log.v(TAG, "Testing repeating jpeg capture for Camera " + id);
582                 openDevice(id);
583                 BufferFormatTestParam params = new BufferFormatTestParam(
584                         ImageFormat.JPEG, /*repeating*/true);
585                 bufferFormatTestByCamera(params);
586             } finally {
587                 closeDevice(id);
588             }
589         }
590     }
591 
592     @Test
testRepeatingRaw()593     public void testRepeatingRaw() throws Exception {
594         for (String id : getCameraIdsUnderTest()) {
595             try {
596                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
597                 openDevice(id);
598                 BufferFormatTestParam params = new BufferFormatTestParam(
599                         ImageFormat.RAW_SENSOR, /*repeating*/true);
600                 bufferFormatTestByCamera(params);
601             } finally {
602                 closeDevice(id);
603             }
604         }
605     }
606 
607     @Test
testRepeatingRawPrivate()608     public void testRepeatingRawPrivate() throws Exception {
609         for (String id : getCameraIdsUnderTest()) {
610             try {
611                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
612                 openDevice(id);
613                 BufferFormatTestParam params = new BufferFormatTestParam(
614                         ImageFormat.RAW_PRIVATE, /*repeating*/true);
615                 bufferFormatTestByCamera(params);
616             } finally {
617                 closeDevice(id);
618             }
619         }
620     }
621 
622     @Test
testRepeatingHeic()623     public void testRepeatingHeic() throws Exception {
624         for (String id : getCameraIdsUnderTest()) {
625             try {
626                 Log.v(TAG, "Testing repeating heic capture for Camera " + id);
627                 openDevice(id);
628                 BufferFormatTestParam params = new BufferFormatTestParam(
629                         ImageFormat.HEIC, /*repeating*/true);
630                 bufferFormatTestByCamera(params);
631             } finally {
632                 closeDevice(id);
633             }
634         }
635     }
636 
637     @Test
testFlexibleYuvWithTimestampBase()638     public void testFlexibleYuvWithTimestampBase() throws Exception {
639         for (String id : getCameraIdsUnderTest()) {
640             try {
641                 Log.i(TAG, "Testing Camera " + id);
642                 openDevice(id);
643 
644                 BufferFormatTestParam params = new BufferFormatTestParam(
645                         ImageFormat.YUV_420_888, /*repeating*/true);
646                 params.mValidateImageData = false;
647                 int[] timeBases = {OutputConfiguration.TIMESTAMP_BASE_SENSOR,
648                         OutputConfiguration.TIMESTAMP_BASE_MONOTONIC,
649                         OutputConfiguration.TIMESTAMP_BASE_REALTIME,
650                         OutputConfiguration.TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED};
651                 for (int timeBase : timeBases) {
652                     params.mTimestampBase = timeBase;
653                     bufferFormatTestByCamera(params);
654                 }
655             } finally {
656                 closeDevice(id);
657             }
658         }
659     }
660 
661     @Test
testLongProcessingRepeatingRaw()662     public void testLongProcessingRepeatingRaw() throws Exception {
663         for (String id : getCameraIdsUnderTest()) {
664             try {
665                 Log.v(TAG, "Testing long processing on repeating raw for camera " + id);
666 
667                 if (!mAllStaticInfo.get(id).isCapabilitySupported(
668                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
669                     continue;
670                 }
671                 openDevice(id);
672 
673                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.RAW_SENSOR);
674             } finally {
675                 closeDevice(id);
676             }
677         }
678     }
679 
680     @Test
testLongProcessingRepeatingFlexibleYuv()681     public void testLongProcessingRepeatingFlexibleYuv() throws Exception {
682         for (String id : getCameraIdsUnderTest()) {
683             try {
684                 Log.v(TAG, "Testing long processing on repeating YUV for camera " + id);
685 
686                 if (!mAllStaticInfo.get(id).isCapabilitySupported(
687                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
688                     continue;
689                 }
690 
691                 openDevice(id);
692                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.YUV_420_888);
693             } finally {
694                 closeDevice(id);
695             }
696         }
697     }
698 
699     /**
700      * Test invalid access of image after an image is closed, further access
701      * of the image will get an IllegalStateException. The basic assumption of
702      * this test is that the ImageReader always gives direct byte buffer, which is always true
703      * for camera case. For if the produced image byte buffer is not direct byte buffer, there
704      * is no guarantee to get an ISE for this invalid access case.
705      */
706     @Test
testInvalidAccessTest()707     public void testInvalidAccessTest() throws Exception {
708         // Test byte buffer access after an image is released, it should throw ISE.
709         for (String id : getCameraIdsUnderTest()) {
710             try {
711                 Log.v(TAG, "Testing invalid image access for Camera " + id);
712                 openDevice(id);
713                 invalidAccessTestAfterClose();
714             } finally {
715                 closeDevice(id);
716                 closeDefaultImageReader();
717             }
718         }
719     }
720 
721     /**
722      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader.
723      *
724      * <p>Both stream formats are mandatory for Camera2 API</p>
725      */
726     @Test
testYuvAndJpeg()727     public void testYuvAndJpeg() throws Exception {
728         for (String id : getCameraIdsUnderTest()) {
729             try {
730                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
731                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
732                     Log.i(TAG, "Camera " + id +
733                             " does not support color outputs, skipping");
734                     continue;
735                 }
736                 openDevice(id);
737                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG);
738             } finally {
739                 closeDevice(id);
740             }
741         }
742     }
743 
744     /**
745      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader with the ImageReader
746      * factory method that has usage flag argument.
747      *
748      * <p>Both stream formats are mandatory for Camera2 API</p>
749      */
750     @Test
testYuvAndJpegWithUsageFlag()751     public void testYuvAndJpegWithUsageFlag() throws Exception {
752         for (String id : getCameraIdsUnderTest()) {
753             try {
754                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
755                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
756                     Log.i(TAG, "Camera " + id +
757                             " does not support color outputs, skipping");
758                     continue;
759                 }
760                 openDevice(id);
761                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG, true);
762             } finally {
763                 closeDevice(id);
764             }
765         }
766     }
767 
768     @Test
testImageReaderBuilderSetHardwareBufferFormatAndDataSpace()769     public void testImageReaderBuilderSetHardwareBufferFormatAndDataSpace() throws Exception {
770         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
771         try (
772             ImageReader reader = new ImageReader
773                 .Builder(20, 45)
774                 .setMaxImages(2)
775                 .setDefaultHardwareBufferFormat(HardwareBuffer.RGBA_8888)
776                 .setDefaultDataSpace(DataSpace.DATASPACE_BT709)
777                 .setUsage(usage)
778                 .build();
779             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
780             Image outputImage = writer.dequeueInputImage()
781         ) {
782             assertEquals(2, reader.getMaxImages());
783             assertEquals(usage, reader.getUsage());
784             assertEquals(HardwareBuffer.RGBA_8888, reader.getHardwareBufferFormat());
785 
786             assertEquals(20, outputImage.getWidth());
787             assertEquals(45, outputImage.getHeight());
788             assertEquals(HardwareBuffer.RGBA_8888, outputImage.getFormat());
789         }
790     }
791 
792     @Test
testImageReaderBuilderWithBLOBAndHEIF()793     public void testImageReaderBuilderWithBLOBAndHEIF() throws Exception {
794         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
795         try (
796             ImageReader reader = new ImageReader
797                 .Builder(20, 45)
798                 .setMaxImages(2)
799                 .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
800                 .setDefaultDataSpace(DataSpace.DATASPACE_HEIF)
801                 .setUsage(usage)
802                 .build();
803             ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
804         ) {
805             assertEquals(2, reader.getMaxImages());
806             assertEquals(usage, reader.getUsage());
807             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
808             assertEquals(DataSpace.DATASPACE_HEIF, reader.getDataSpace());
809             // writer should have same dataspace/hardwarebuffer format as reader.
810             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
811             assertEquals(DataSpace.DATASPACE_HEIF, writer.getDataSpace());
812             // HEIC is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_HEIF
813             assertEquals(ImageFormat.HEIC, writer.getFormat());
814         }
815     }
816 
817     @Test
testImageReaderBuilderWithBLOBAndJpegR()818     public void testImageReaderBuilderWithBLOBAndJpegR() throws Exception {
819         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
820         try (
821                 ImageReader reader = new ImageReader
822                         .Builder(20, 45)
823                         .setMaxImages(2)
824                         .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
825                         .setDefaultDataSpace(DataSpace.DATASPACE_JPEG_R)
826                         .setUsage(usage)
827                         .build();
828                 ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
829         ) {
830             assertEquals(2, reader.getMaxImages());
831             assertEquals(usage, reader.getUsage());
832             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
833             assertEquals(DataSpace.DATASPACE_JPEG_R, reader.getDataSpace());
834             // writer should have same dataspace/hardwarebuffer format as reader.
835             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
836             assertEquals(DataSpace.DATASPACE_JPEG_R, writer.getDataSpace());
837             // Jpeg/R is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_JPEG_R
838             assertEquals(ImageFormat.JPEG_R, writer.getFormat());
839         }
840     }
841 
842     @Test
testImageReaderBuilderWithBLOBAndJFIF()843     public void testImageReaderBuilderWithBLOBAndJFIF() throws Exception {
844         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
845         try (
846             ImageReader reader = new ImageReader
847                 .Builder(20, 45)
848                 .setMaxImages(2)
849                 .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
850                 .setDefaultDataSpace(DataSpace.DATASPACE_JFIF)
851                 .setUsage(usage)
852                 .build();
853             ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
854         ) {
855             assertEquals(2, reader.getMaxImages());
856             assertEquals(usage, reader.getUsage());
857             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
858             assertEquals(DataSpace.DATASPACE_JFIF, reader.getDataSpace());
859             // writer should have same dataspace/hardwarebuffer format as reader.
860             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
861             assertEquals(DataSpace.DATASPACE_JFIF, writer.getDataSpace());
862             // JPEG is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_JFIF
863             assertEquals(ImageFormat.JPEG, writer.getFormat());
864         }
865     }
866 
867     @Test
testImageReaderBuilderImageFormatOverride()868     public void testImageReaderBuilderImageFormatOverride() throws Exception {
869         try (
870             ImageReader reader = new ImageReader
871                 .Builder(20, 45)
872                 .setImageFormat(ImageFormat.HEIC)
873                 .setDefaultHardwareBufferFormat(HardwareBuffer.RGB_888)
874                 .setDefaultDataSpace(DataSpace.DATASPACE_BT709)
875                 .build();
876             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
877             Image outputImage = writer.dequeueInputImage()
878         ) {
879             assertEquals(1, reader.getMaxImages());
880             assertEquals(HardwareBuffer.USAGE_CPU_READ_OFTEN, reader.getUsage());
881             assertEquals(HardwareBuffer.RGB_888, reader.getHardwareBufferFormat());
882             assertEquals(DataSpace.DATASPACE_BT709, reader.getDataSpace());
883 
884             assertEquals(20, outputImage.getWidth());
885             assertEquals(45, outputImage.getHeight());
886             assertEquals(HardwareBuffer.RGB_888, outputImage.getFormat());
887         }
888     }
889 
890     @Test
testImageReaderBuilderSetImageFormat()891     public void testImageReaderBuilderSetImageFormat() throws Exception {
892         try (
893             ImageReader reader = new ImageReader
894                 .Builder(20, 45)
895                 .setMaxImages(2)
896                 .setImageFormat(ImageFormat.YUV_420_888)
897                 .build();
898             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
899             Image outputImage = writer.dequeueInputImage()
900         ) {
901             assertEquals(2, reader.getMaxImages());
902             assertEquals(ImageFormat.YUV_420_888, reader.getImageFormat());
903             assertEquals(HardwareBuffer.USAGE_CPU_READ_OFTEN, reader.getUsage());
904             // ImageFormat.YUV_420_888 hal dataspace is DATASPACE_JFIF
905             assertEquals(DataSpace.DATASPACE_JFIF, reader.getDataSpace());
906 
907             // writer should retrieve all info from reader's surface
908             assertEquals(DataSpace.DATASPACE_JFIF, writer.getDataSpace());
909             assertEquals(HardwareBuffer.YCBCR_420_888, writer.getHardwareBufferFormat());
910 
911             assertEquals(20, outputImage.getWidth());
912             assertEquals(45, outputImage.getHeight());
913             assertEquals(ImageFormat.YUV_420_888, outputImage.getFormat());
914         }
915     }
916 
917     /**
918      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader.
919      *
920      */
921     @Test
testImageReaderYuvAndRaw()922     public void testImageReaderYuvAndRaw() throws Exception {
923         for (String id : getCameraIdsUnderTest()) {
924             try {
925                 Log.v(TAG, "YUV and RAW testing for camera " + id);
926                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
927                     Log.i(TAG, "Camera " + id +
928                             " does not support color outputs, skipping");
929                     continue;
930                 }
931                 openDevice(id);
932                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR);
933             } finally {
934                 closeDevice(id);
935             }
936         }
937     }
938 
939     /**
940      * Test two image stream (YUV420_888 and PRIVATE) capture by using ImageReader.
941      */
942     @Test
testImageReaderYuvAndPrivate()943     public void testImageReaderYuvAndPrivate() throws Exception {
944         for (String id : getCameraIdsUnderTest()) {
945             try {
946                 Log.v(TAG, "YUV and PRIVATE testing for camera " + id);
947                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
948                     Log.i(TAG, "Camera " + id
949                             + " does not support color outputs, skipping");
950                     continue;
951                 }
952                 openDevice(id);
953                 // YUV PREVIEW + PRIVATE PREVIEW is a mandatory legacy stream combination.
954                 bufferFormatWithYuvTestByCamera(ImageFormat.PRIVATE,
955                         /*setUsageFlag*/false, /*useYuvSize*/true);
956             } finally {
957                 closeDevice(id);
958             }
959         }
960     }
961 
962     /**
963      * If the camera device advertises the SECURE_IAMGE_DATA capability, test
964      * ImageFormat.PRIVATE + PROTECTED usage capture by using ImageReader with the
965      * ImageReader factory method that has usage flag argument, and uses a custom usage flag.
966      */
967     @Test
testImageReaderPrivateWithProtectedUsageFlag()968     public void testImageReaderPrivateWithProtectedUsageFlag() throws Exception {
969         Set<Pair<String, String>> unavailablePhysicalCameras = getUnavailablePhysicalCameras(
970                 mCameraManager, mHandler);
971         for (String id : getCameraIdsUnderTest()) {
972             try {
973                 Log.v(TAG, "Private format and protected usage testing for camera " + id);
974                 List<String> testCameraIds = new ArrayList<>();
975 
976                 if (mAllStaticInfo.get(id).isCapabilitySupported(
977                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA)) {
978                     // Test the camera id without using physical camera
979                     testCameraIds.add(null);
980                 }
981 
982                 if (mAllStaticInfo.get(id).isLogicalMultiCamera()) {
983                     Set<String> physicalIdsSet =
984                             mAllStaticInfo.get(id).getCharacteristics().getPhysicalCameraIds();
985                     for (String physicalId : physicalIdsSet) {
986                         StaticMetadata phyInfo = mAllStaticInfo.get(physicalId);
987                         boolean isUnavailable =
988                                 unavailablePhysicalCameras.contains(new Pair<>(id, physicalId));
989                         if (phyInfo.isCapabilitySupported(CameraCharacteristics
990                                 .REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA)
991                                 && !isUnavailable) {
992                             testCameraIds.add(physicalId);
993                         }
994                     }
995                 }
996 
997                 if (testCameraIds.isEmpty()) {
998                     Log.i(TAG, "Camera " + id +
999                             " does not support secure image data capability, skipping");
1000                     continue;
1001                 }
1002                 openDevice(id);
1003 
1004 
1005                 BufferFormatTestParam params = new BufferFormatTestParam(
1006                         ImageFormat.PRIVATE, /*repeating*/true);
1007                 params.mSetUsageFlag = true;
1008                 params.mUsageFlag = HardwareBuffer.USAGE_PROTECTED_CONTENT;
1009                 params.mRepeating = true;
1010                 params.mCheckSession = true;
1011                 params.mValidateImageData = false;
1012                 for (String testCameraId : testCameraIds) {
1013                     params.mPhysicalId = testCameraId;
1014                     bufferFormatTestByCamera(params);
1015                 }
1016             } finally {
1017                 closeDevice(id);
1018             }
1019         }
1020     }
1021 
1022     /**
1023      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader with the
1024      * ImageReader factory method that has usage flag argument.
1025      *
1026      */
1027     @Test
testImageReaderYuvAndRawWithUsageFlag()1028     public void testImageReaderYuvAndRawWithUsageFlag() throws Exception {
1029         for (String id : getCameraIdsUnderTest()) {
1030             try {
1031                 Log.v(TAG, "YUV and RAW testing for camera " + id);
1032                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
1033                     Log.i(TAG, "Camera " + id +
1034                             " does not support color outputs, skipping");
1035                     continue;
1036                 }
1037                 openDevice(id);
1038                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR, true);
1039             } finally {
1040                 closeDevice(id);
1041             }
1042         }
1043     }
1044 
1045     /**
1046      * Check that the center patches for YUV and JPEG outputs for the same frame match for each YUV
1047      * resolution and format supported.
1048      */
1049     @Test
testAllOutputYUVResolutions()1050     public void testAllOutputYUVResolutions() throws Exception {
1051         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1052                 BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1053         for (String id : getCameraIdsUnderTest()) {
1054             try {
1055                 Log.v(TAG, "Testing all YUV image resolutions for camera " + id);
1056 
1057                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
1058                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
1059                     continue;
1060                 }
1061 
1062                 openDevice(id);
1063                 // Skip warmup on FULL mode devices.
1064                 int warmupCaptureNumber = (mStaticInfo.isHardwareLevelLegacy()) ?
1065                         MAX_NUM_IMAGES - 1 : 0;
1066 
1067                 // NV21 isn't supported by ImageReader.
1068                 final int[] YUVFormats = new int[] {ImageFormat.YUV_420_888, ImageFormat.YV12};
1069 
1070                 CameraCharacteristics.Key<StreamConfigurationMap> key =
1071                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
1072                 StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(key);
1073                 int[] supportedFormats = config.getOutputFormats();
1074                 List<Integer> supportedYUVFormats = new ArrayList<>();
1075                 for (int format : YUVFormats) {
1076                     if (CameraTestUtils.contains(supportedFormats, format)) {
1077                         supportedYUVFormats.add(format);
1078                     }
1079                 }
1080 
1081                 Size[] jpegSizes = mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.JPEG,
1082                         StaticMetadata.StreamDirection.Output);
1083                 assertFalse("JPEG output not supported for camera " + id +
1084                         ", at least one JPEG output is required.", jpegSizes.length == 0);
1085 
1086                 Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes);
1087                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
1088                 Size QCIF = new Size(176, 144);
1089                 Size FULL_HD = new Size(1920, 1080);
1090                 for (int format : supportedYUVFormats) {
1091                     Size[] targetCaptureSizes =
1092                             mStaticInfo.getAvailableSizesForFormatChecked(format,
1093                             StaticMetadata.StreamDirection.Output);
1094 
1095                     for (Size captureSz : targetCaptureSizes) {
1096                         if (VERBOSE) {
1097                             Log.v(TAG, "Testing yuv size " + captureSz + " and jpeg size "
1098                                     + maxJpegSize + " for camera " + mCamera.getId());
1099                         }
1100 
1101                         ImageReader jpegReader = null;
1102                         ImageReader yuvReader = null;
1103                         try {
1104                             // Create YUV image reader
1105                             SimpleImageReaderListener yuvListener = new SimpleImageReaderListener();
1106                             yuvReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1107                                     yuvListener);
1108                             Surface yuvSurface = yuvReader.getSurface();
1109 
1110                             // Create JPEG image reader
1111                             SimpleImageReaderListener jpegListener =
1112                                     new SimpleImageReaderListener();
1113                             jpegReader = createImageReader(maxJpegSize,
1114                                     ImageFormat.JPEG, MAX_NUM_IMAGES, jpegListener);
1115                             Surface jpegSurface = jpegReader.getSurface();
1116 
1117                             // Setup session
1118                             List<Surface> outputSurfaces = new ArrayList<Surface>();
1119                             outputSurfaces.add(yuvSurface);
1120                             outputSurfaces.add(jpegSurface);
1121                             createSession(outputSurfaces);
1122 
1123                             int state = mCameraSessionListener.getStateWaiter().waitForAnyOfStates(
1124                                         Arrays.asList(sessionStates),
1125                                         CameraTestUtils.SESSION_CONFIGURE_TIMEOUT_MS);
1126 
1127                             if (state == BlockingSessionCallback.SESSION_CONFIGURE_FAILED) {
1128                                 if (captureSz.getWidth() > maxPreviewSize.getWidth() ||
1129                                         captureSz.getHeight() > maxPreviewSize.getHeight()) {
1130                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
1131                                             + " ,jpeg:" + maxJpegSize + "} for camera "
1132                                             + mCamera.getId() +
1133                                             " because full size jpeg + yuv larger than "
1134                                             + "max preview size (" + maxPreviewSize
1135                                             + ") is not supported");
1136                                     continue;
1137                                 } else if (captureSz.equals(QCIF) &&
1138                                         ((maxJpegSize.getWidth() > FULL_HD.getWidth()) ||
1139                                          (maxJpegSize.getHeight() > FULL_HD.getHeight()))) {
1140                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
1141                                             + " ,jpeg:" + maxJpegSize + "} for camera "
1142                                             + mCamera.getId() +
1143                                             " because QCIF + >Full_HD size is not supported");
1144                                     continue;
1145                                 } else {
1146                                     fail("Camera " + mCamera.getId() +
1147                                             ":session configuration failed for {jpeg: " +
1148                                             maxJpegSize + ", yuv: " + captureSz + "}");
1149                                 }
1150                             }
1151 
1152                             // Warm up camera preview (mainly to give legacy devices time to do 3A).
1153                             CaptureRequest.Builder warmupRequest =
1154                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1155                             warmupRequest.addTarget(yuvSurface);
1156                             assertNotNull("Fail to get CaptureRequest.Builder", warmupRequest);
1157                             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1158 
1159                             for (int i = 0; i < warmupCaptureNumber; i++) {
1160                                 startCapture(warmupRequest.build(), /*repeating*/false,
1161                                         resultListener, mHandler);
1162                             }
1163                             for (int i = 0; i < warmupCaptureNumber; i++) {
1164                                 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1165                                 Image image = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1166                                 image.close();
1167                             }
1168 
1169                             // Capture image.
1170                             CaptureRequest.Builder mainRequest =
1171                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1172                             for (Surface s : outputSurfaces) {
1173                                 mainRequest.addTarget(s);
1174                             }
1175 
1176                             startCapture(mainRequest.build(), /*repeating*/false, resultListener,
1177                                     mHandler);
1178 
1179                             // Verify capture result and images
1180                             resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1181 
1182                             Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1183                             Image jpegImage = jpegListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1184 
1185                             //Validate captured images.
1186                             CameraTestUtils.validateImage(yuvImage, captureSz.getWidth(),
1187                                     captureSz.getHeight(), format, /*filePath*/null);
1188                             CameraTestUtils.validateImage(jpegImage, maxJpegSize.getWidth(),
1189                                     maxJpegSize.getHeight(), ImageFormat.JPEG, /*filePath*/null);
1190 
1191                             // Compare the image centers.
1192                             RectF jpegDimens = new RectF(0, 0, jpegImage.getWidth(),
1193                                     jpegImage.getHeight());
1194                             RectF yuvDimens = new RectF(0, 0, yuvImage.getWidth(),
1195                                     yuvImage.getHeight());
1196 
1197                             // Find scale difference between YUV and JPEG output
1198                             Matrix m = new Matrix();
1199                             m.setRectToRect(yuvDimens, jpegDimens, Matrix.ScaleToFit.START);
1200                             RectF scaledYuv = new RectF();
1201                             m.mapRect(scaledYuv, yuvDimens);
1202                             float scale = scaledYuv.width() / yuvDimens.width();
1203 
1204                             final int PATCH_DIMEN = 40; // pixels in YUV
1205 
1206                             // Find matching square patch of pixels in YUV and JPEG output
1207                             RectF tempPatch = new RectF(0, 0, PATCH_DIMEN, PATCH_DIMEN);
1208                             tempPatch.offset(yuvDimens.centerX() - tempPatch.centerX(),
1209                                     yuvDimens.centerY() - tempPatch.centerY());
1210                             Rect yuvPatch = new Rect();
1211                             tempPatch.roundOut(yuvPatch);
1212 
1213                             tempPatch.set(0, 0, PATCH_DIMEN * scale, PATCH_DIMEN * scale);
1214                             tempPatch.offset(jpegDimens.centerX() - tempPatch.centerX(),
1215                                     jpegDimens.centerY() - tempPatch.centerY());
1216                             Rect jpegPatch = new Rect();
1217                             tempPatch.roundOut(jpegPatch);
1218 
1219                             // Decode center patches
1220                             int[] yuvColors = convertPixelYuvToRgba(yuvPatch.width(),
1221                                     yuvPatch.height(), yuvPatch.left, yuvPatch.top, yuvImage);
1222                             Bitmap yuvBmap = Bitmap.createBitmap(yuvColors, yuvPatch.width(),
1223                                     yuvPatch.height(), Bitmap.Config.ARGB_8888);
1224 
1225                             byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpegImage);
1226                             BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance(
1227                                     compressedJpegData, /*offset*/0, compressedJpegData.length,
1228                                     /*isShareable*/true);
1229                             BitmapFactory.Options opt = new BitmapFactory.Options();
1230                             opt.inPreferredConfig = Bitmap.Config.ARGB_8888;
1231                             Bitmap fullSizeJpegBmap = decoder.decodeRegion(jpegPatch, opt);
1232                             Bitmap jpegBmap = Bitmap.createScaledBitmap(fullSizeJpegBmap,
1233                                     yuvPatch.width(), yuvPatch.height(), /*filter*/true);
1234 
1235                             // Compare two patches using average of per-pixel differences
1236                             double difference = BitmapUtils.calcDifferenceMetric(yuvBmap, jpegBmap);
1237                             double tolerance = IMAGE_DIFFERENCE_TOLERANCE;
1238                             if (mStaticInfo.isHardwareLevelLegacy()) {
1239                                 tolerance = IMAGE_DIFFERENCE_TOLERANCE_LEGACY;
1240                             }
1241                             Log.i(TAG, "Difference for resolution " + captureSz + " is: " +
1242                                     difference);
1243                             if (difference > tolerance) {
1244                                 // Dump files if running in verbose mode
1245                                 if (DEBUG) {
1246                                     String jpegFileName = mDebugFileNameBase + "/" + captureSz +
1247                                             "_jpeg.jpg";
1248                                     dumpFile(jpegFileName, jpegBmap);
1249                                     String fullSizeJpegFileName = mDebugFileNameBase + "/" +
1250                                             captureSz + "_full_jpeg.jpg";
1251                                     dumpFile(fullSizeJpegFileName, compressedJpegData);
1252                                     String yuvFileName = mDebugFileNameBase + "/" + captureSz +
1253                                             "_yuv.jpg";
1254                                     dumpFile(yuvFileName, yuvBmap);
1255                                     String fullSizeYuvFileName = mDebugFileNameBase + "/" +
1256                                             captureSz + "_full_yuv.jpg";
1257                                     int[] fullYUVColors = convertPixelYuvToRgba(yuvImage.getWidth(),
1258                                             yuvImage.getHeight(), 0, 0, yuvImage);
1259                                     Bitmap fullYUVBmap = Bitmap.createBitmap(fullYUVColors,
1260                                             yuvImage.getWidth(), yuvImage.getHeight(),
1261                                             Bitmap.Config.ARGB_8888);
1262                                     dumpFile(fullSizeYuvFileName, fullYUVBmap);
1263                                 }
1264                                 fail("Camera " + mCamera.getId() + ": YUV image at capture size "
1265                                         + captureSz + " and JPEG image at capture size "
1266                                         + maxJpegSize + " for the same frame are not similar,"
1267                                         + " center patches have difference metric of "
1268                                         + difference + ", tolerance is " + tolerance);
1269                             }
1270 
1271                             // Stop capture, delete the streams.
1272                             stopCapture(/*fast*/false);
1273                             yuvImage.close();
1274                             jpegImage.close();
1275                             yuvListener.drain();
1276                             jpegListener.drain();
1277                         } finally {
1278                             closeImageReader(jpegReader);
1279                             jpegReader = null;
1280                             closeImageReader(yuvReader);
1281                             yuvReader = null;
1282                         }
1283                     }
1284                 }
1285 
1286             } finally {
1287                 closeDevice(id);
1288             }
1289         }
1290     }
1291 
1292     /**
1293      * Test that images captured after discarding free buffers are valid.
1294      */
1295     @Test
testDiscardFreeBuffers()1296     public void testDiscardFreeBuffers() throws Exception {
1297         for (String id : getCameraIdsUnderTest()) {
1298             try {
1299                 Log.v(TAG, "Testing discardFreeBuffers for Camera " + id);
1300                 openDevice(id);
1301                 discardFreeBuffersTestByCamera();
1302             } finally {
1303                 closeDevice(id);
1304             }
1305         }
1306     }
1307 
1308     /** Tests that usage bits are preserved */
1309     @Test
testUsageRespected()1310     public void testUsageRespected() throws Exception {
1311         final long REQUESTED_USAGE_BITS =
1312                 HardwareBuffer.USAGE_GPU_COLOR_OUTPUT | HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE;
1313         ImageReader reader = ImageReader.newInstance(1, 1, PixelFormat.RGBA_8888, 1,
1314                 REQUESTED_USAGE_BITS);
1315         Surface surface = reader.getSurface();
1316         Canvas canvas = surface.lockHardwareCanvas();
1317         canvas.drawColor(Color.RED);
1318         surface.unlockCanvasAndPost(canvas);
1319         Image image = null;
1320         for (int i = 0; i < 100; i++) {
1321             image = reader.acquireNextImage();
1322             if (image != null) break;
1323             Thread.sleep(10);
1324         }
1325         assertNotNull(image);
1326         HardwareBuffer buffer = image.getHardwareBuffer();
1327         assertNotNull(buffer);
1328         // Mask off the upper vendor bits
1329         int myBits = (int) (buffer.getUsage() & 0xFFFFFFF);
1330         assertWithMessage("Usage bits %s did not contain requested usage bits %s", myBits,
1331                 REQUESTED_USAGE_BITS).that(myBits & REQUESTED_USAGE_BITS)
1332                         .isEqualTo(REQUESTED_USAGE_BITS);
1333     }
1334 
testLandscapeToPortraitOverride(boolean overrideToPortrait)1335     private void testLandscapeToPortraitOverride(boolean overrideToPortrait) throws Exception {
1336         if (!SystemProperties.getBoolean(CameraManager.LANDSCAPE_TO_PORTRAIT_PROP, false)) {
1337             Log.i(TAG, "Landscape to portrait override not supported, skipping test");
1338             return;
1339         }
1340 
1341         for (String id : getCameraIdsUnderTest()) {
1342             CameraCharacteristics c = mCameraManager.getCameraCharacteristics(
1343                     id, /*overrideToPortrait*/false);
1344             int[] modes = c.get(CameraCharacteristics.SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
1345             boolean supportsRotateAndCrop = false;
1346             for (int mode : modes) {
1347                 if (mode == CameraMetadata.SCALER_ROTATE_AND_CROP_90
1348                         || mode == CameraMetadata.SCALER_ROTATE_AND_CROP_270) {
1349                     supportsRotateAndCrop = true;
1350                     break;
1351                 }
1352             }
1353 
1354             if (!supportsRotateAndCrop) {
1355                 Log.i(TAG, "Skipping non-rotate-and-crop cameraId " + id);
1356                 continue;
1357             }
1358 
1359             int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
1360             if (sensorOrientation != 0 && sensorOrientation != 180) {
1361                 Log.i(TAG, "Skipping portrait orientation sensor cameraId " + id);
1362                 continue;
1363             }
1364 
1365             Log.i(TAG, "Testing overrideToPortrait " + overrideToPortrait
1366                     + " for Camera " + id);
1367 
1368             if (overrideToPortrait) {
1369                 c = mCameraManager.getCameraCharacteristics(id, overrideToPortrait);
1370                 sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
1371                 assertTrue("SENSOR_ORIENTATION should imply portrait sensor.",
1372                         sensorOrientation == 90 || sensorOrientation == 270);
1373             }
1374 
1375             BufferFormatTestParam params = new BufferFormatTestParam(
1376                     ImageFormat.JPEG, /*repeating*/false);
1377             params.mValidateImageData = true;
1378 
1379             try {
1380                 openDevice(id, overrideToPortrait);
1381                 bufferFormatTestByCamera(params);
1382             } finally {
1383                 closeDevice(id);
1384             }
1385         }
1386     }
1387 
1388     @Test
testLandscapeToPortraitOverrideEnabled()1389     public void testLandscapeToPortraitOverrideEnabled() throws Exception {
1390         testLandscapeToPortraitOverride(true);
1391     }
1392 
1393     @Test
testLandscapeToPortraitOverrideDisabled()1394     public void testLandscapeToPortraitOverrideDisabled() throws Exception {
1395         testLandscapeToPortraitOverride(false);
1396     }
1397 
1398     /**
1399      * Convert a rectangular patch in a YUV image to an ARGB color array.
1400      *
1401      * @param w width of the patch.
1402      * @param h height of the patch.
1403      * @param wOffset offset of the left side of the patch.
1404      * @param hOffset offset of the top of the patch.
1405      * @param yuvImage a YUV image to select a patch from.
1406      * @return the image patch converted to RGB as an ARGB color array.
1407      */
convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset, Image yuvImage)1408     private static int[] convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset,
1409                                                Image yuvImage) {
1410         final int CHANNELS = 3; // yuv
1411         final float COLOR_RANGE = 255f;
1412 
1413         assertTrue("Invalid argument to convertPixelYuvToRgba",
1414                 w > 0 && h > 0 && wOffset >= 0 && hOffset >= 0);
1415         assertNotNull(yuvImage);
1416 
1417         int imageFormat = yuvImage.getFormat();
1418         assertTrue("YUV image must have YUV-type format",
1419                 imageFormat == ImageFormat.YUV_420_888 || imageFormat == ImageFormat.YV12 ||
1420                         imageFormat == ImageFormat.NV21);
1421 
1422         int height = yuvImage.getHeight();
1423         int width = yuvImage.getWidth();
1424 
1425         Rect imageBounds = new Rect(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height);
1426         Rect crop = new Rect(/*left*/wOffset, /*top*/hOffset, /*right*/wOffset + w,
1427                 /*bottom*/hOffset + h);
1428         assertTrue("Output rectangle" + crop + " must lie within image bounds " + imageBounds,
1429                 imageBounds.contains(crop));
1430         Image.Plane[] planes = yuvImage.getPlanes();
1431 
1432         Image.Plane yPlane = planes[0];
1433         Image.Plane cbPlane = planes[1];
1434         Image.Plane crPlane = planes[2];
1435 
1436         ByteBuffer yBuf = yPlane.getBuffer();
1437         int yPixStride = yPlane.getPixelStride();
1438         int yRowStride = yPlane.getRowStride();
1439         ByteBuffer cbBuf = cbPlane.getBuffer();
1440         int cbPixStride = cbPlane.getPixelStride();
1441         int cbRowStride = cbPlane.getRowStride();
1442         ByteBuffer crBuf = crPlane.getBuffer();
1443         int crPixStride = crPlane.getPixelStride();
1444         int crRowStride = crPlane.getRowStride();
1445 
1446         int[] output = new int[w * h];
1447 
1448         // TODO: Optimize this with renderscript intrinsics
1449         byte[] yRow = new byte[yPixStride * (w - 1) + 1];
1450         byte[] cbRow = new byte[cbPixStride * (w / 2 - 1) + 1];
1451         byte[] crRow = new byte[crPixStride * (w / 2 - 1) + 1];
1452         yBuf.mark();
1453         cbBuf.mark();
1454         crBuf.mark();
1455         int initialYPos = yBuf.position();
1456         int initialCbPos = cbBuf.position();
1457         int initialCrPos = crBuf.position();
1458         int outputPos = 0;
1459         for (int i = hOffset; i < hOffset + h; i++) {
1460             yBuf.position(initialYPos + i * yRowStride + wOffset * yPixStride);
1461             yBuf.get(yRow);
1462             if ((i & 1) == (hOffset & 1)) {
1463                 cbBuf.position(initialCbPos + (i / 2) * cbRowStride + wOffset * cbPixStride / 2);
1464                 cbBuf.get(cbRow);
1465                 crBuf.position(initialCrPos + (i / 2) * crRowStride + wOffset * crPixStride / 2);
1466                 crBuf.get(crRow);
1467             }
1468             for (int j = 0, yPix = 0, crPix = 0, cbPix = 0; j < w; j++, yPix += yPixStride) {
1469                 float y = yRow[yPix] & 0xFF;
1470                 float cb = cbRow[cbPix] & 0xFF;
1471                 float cr = crRow[crPix] & 0xFF;
1472 
1473                 // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
1474                 int r = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.402f * (cr - 128)));
1475                 int g = (int) Math.max(0.0f,
1476                         Math.min(COLOR_RANGE, y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128)));
1477                 int b = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.772f * (cb - 128)));
1478 
1479                 // Convert to ARGB pixel color (use opaque alpha)
1480                 output[outputPos++] = Color.rgb(r, g, b);
1481 
1482                 if ((j & 1) == 1) {
1483                     crPix += crPixStride;
1484                     cbPix += cbPixStride;
1485                 }
1486             }
1487         }
1488         yBuf.rewind();
1489         cbBuf.rewind();
1490         crBuf.rewind();
1491 
1492         return output;
1493     }
1494 
1495     /**
1496      * Test capture a given format stream with yuv stream simultaneously.
1497      *
1498      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
1499      *
1500      * @param format The capture format to be tested along with yuv format.
1501      */
bufferFormatWithYuvTestByCamera(int format)1502     private void bufferFormatWithYuvTestByCamera(int format) throws Exception {
1503         bufferFormatWithYuvTestByCamera(format, false);
1504     }
1505 
1506     /**
1507      * Test capture a given format stream with yuv stream simultaneously.
1508      *
1509      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
1510      *
1511      * @param format The capture format to be tested along with yuv format.
1512      * @param setUsageFlag The ImageReader factory method to be used (with or without specifying
1513      *                     usage flag)
1514      */
bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag)1515     private void bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag)
1516             throws Exception {
1517         bufferFormatWithYuvTestByCamera(format, setUsageFlag, /*useYuvSize*/false);
1518     }
1519 
1520     /**
1521      * Test capture a given format stream with yuv stream simultaneously.
1522      *
1523      * <p>Compared to bufferFormatWithYuvTestByCamera(int, boolean), this function
1524      * provide an option to use the same size between the 2 streams.</p>
1525      *
1526      * @param format The capture format to be tested along with yuv format.
1527      * @param setUsageFlag The ImageReader factory method to be used (with or without specifying
1528      *                     usage flag)
1529      * @param useYuvSize The capture size will be the same as the yuv size
1530      */
bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag, boolean useYuvSize)1531     private void bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag,
1532             boolean useYuvSize) throws Exception {
1533         if (format != ImageFormat.JPEG && format != ImageFormat.RAW_SENSOR
1534                 && format != ImageFormat.PRIVATE
1535                 && format != ImageFormat.YUV_420_888) {
1536             throw new IllegalArgumentException("Unsupported format: " + format);
1537         }
1538 
1539         final int NUM_SINGLE_CAPTURE_TESTED = MAX_NUM_IMAGES - 1;
1540         Size maxYuvSz = mOrderedPreviewSizes.get(0);
1541         Size[] targetCaptureSizes = useYuvSize ? new Size[]{maxYuvSz} :
1542                 mStaticInfo.getAvailableSizesForFormatChecked(format,
1543                 StaticMetadata.StreamDirection.Output);
1544 
1545         for (Size captureSz : targetCaptureSizes) {
1546             if (VERBOSE) {
1547                 Log.v(TAG, "Testing yuv size " + maxYuvSz.toString() + " and capture size "
1548                         + captureSz.toString() + " for camera " + mCamera.getId());
1549             }
1550 
1551             ImageReader captureReader = null;
1552             ImageReader yuvReader = null;
1553             try {
1554                 // Create YUV image reader
1555                 SimpleImageReaderListener yuvListener  = new SimpleImageReaderListener();
1556                 if (setUsageFlag) {
1557                     yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
1558                             HardwareBuffer.USAGE_CPU_READ_OFTEN, yuvListener);
1559                 } else {
1560                     yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
1561                             yuvListener);
1562                 }
1563 
1564                 Surface yuvSurface = yuvReader.getSurface();
1565 
1566                 // Create capture image reader
1567                 SimpleImageReaderListener captureListener = new SimpleImageReaderListener();
1568                 boolean isPrivateFormat = (format == ImageFormat.PRIVATE);
1569                 long usage = isPrivateFormat ? HardwareBuffer.USAGE_COMPOSER_OVERLAY :
1570                         HardwareBuffer.USAGE_CPU_READ_OFTEN;
1571                 if (setUsageFlag || isPrivateFormat) {
1572                     captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1573                             usage, captureListener);
1574                 } else {
1575                     captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1576                             captureListener);
1577                 }
1578                 Surface captureSurface = captureReader.getSurface();
1579 
1580                 // Capture images.
1581                 List<Surface> outputSurfaces = new ArrayList<Surface>();
1582                 outputSurfaces.add(yuvSurface);
1583                 outputSurfaces.add(captureSurface);
1584                 CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces,
1585                         CameraDevice.TEMPLATE_PREVIEW);
1586                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1587 
1588                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
1589                     startCapture(request.build(), /*repeating*/false, resultListener, mHandler);
1590                 }
1591 
1592                 // Verify capture result and images
1593                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
1594                     resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1595                     if (VERBOSE) {
1596                         Log.v(TAG, " Got the capture result back for " + i + "th capture");
1597                     }
1598 
1599                     Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1600                     if (VERBOSE) {
1601                         Log.v(TAG, " Got the yuv image back for " + i + "th capture");
1602                     }
1603 
1604                     Image captureImage = captureListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1605                     if (VERBOSE) {
1606                         Log.v(TAG, " Got the capture image back for " + i + "th capture");
1607                     }
1608 
1609                     //Validate captured images.
1610                     CameraTestUtils.validateImage(yuvImage, maxYuvSz.getWidth(),
1611                             maxYuvSz.getHeight(), ImageFormat.YUV_420_888, /*filePath*/null);
1612                     CameraTestUtils.validateImage(captureImage, captureSz.getWidth(),
1613                             captureSz.getHeight(), format, /*filePath*/null);
1614                     yuvImage.close();
1615                     captureImage.close();
1616                 }
1617 
1618                 // Stop capture, delete the streams.
1619                 stopCapture(/*fast*/false);
1620             } finally {
1621                 closeImageReader(captureReader);
1622                 captureReader = null;
1623                 closeImageReader(yuvReader);
1624                 yuvReader = null;
1625             }
1626         }
1627     }
1628 
invalidAccessTestAfterClose()1629     private void invalidAccessTestAfterClose() throws Exception {
1630         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
1631             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
1632 
1633         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
1634                 StaticMetadata.StreamDirection.Output);
1635         Image img = null;
1636         // Create ImageReader.
1637         mListener = new SimpleImageListener();
1638         createDefaultImageReader(availableSizes[0], FORMAT, MAX_NUM_IMAGES, mListener);
1639 
1640         // Start capture.
1641         CaptureRequest request = prepareCaptureRequest();
1642         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1643         startCapture(request, /* repeating */false, listener, mHandler);
1644 
1645         mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1646         img = mReader.acquireNextImage();
1647         Plane firstPlane = img.getPlanes()[0];
1648         ByteBuffer buffer = firstPlane.getBuffer();
1649         img.close();
1650 
1651         imageInvalidAccessTestAfterClose(img, firstPlane, buffer);
1652     }
1653 
1654     /**
1655      * Test that images captured after discarding free buffers are valid.
1656      */
discardFreeBuffersTestByCamera()1657     private void discardFreeBuffersTestByCamera() throws Exception {
1658         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
1659             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
1660 
1661         final Size SIZE = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
1662                 StaticMetadata.StreamDirection.Output)[0];
1663         // Create ImageReader.
1664         mListener = new SimpleImageListener();
1665         createDefaultImageReader(SIZE, FORMAT, MAX_NUM_IMAGES, mListener);
1666 
1667         // Start capture.
1668         final boolean REPEATING = true;
1669         final boolean SINGLE = false;
1670         CaptureRequest request = prepareCaptureRequest();
1671         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1672         startCapture(request, REPEATING, listener, mHandler);
1673 
1674         // Validate images and capture results.
1675         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING, /*colorSpace*/ null);
1676         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
1677 
1678         // Discard free buffers.
1679         mReader.discardFreeBuffers();
1680 
1681         // Validate images and capture resulst again.
1682         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING, /*colorSpace*/ null);
1683         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
1684 
1685         // Stop repeating request in preparation for discardFreeBuffers
1686         mCameraSession.stopRepeating();
1687         mCameraSessionListener.getStateWaiter().waitForState(
1688                 BlockingSessionCallback.SESSION_READY, SESSION_READY_TIMEOUT_MS);
1689 
1690         // Drain the reader queue and discard free buffers from the reader.
1691         Image img = mReader.acquireLatestImage();
1692         if (img != null) {
1693             img.close();
1694         }
1695         mReader.discardFreeBuffers();
1696 
1697         // Do a single capture for camera device to reallocate buffers
1698         mListener.reset();
1699         startCapture(request, SINGLE, listener, mHandler);
1700         validateImage(SIZE, FORMAT, /*captureCount*/ 1, SINGLE, /*colorSpace*/ null);
1701     }
1702 
1703     private class BufferFormatTestParam {
1704         public int mFormat;
1705         public boolean mRepeating;
1706         public boolean mSetUsageFlag = false;
1707         public long mUsageFlag = HardwareBuffer.USAGE_CPU_READ_OFTEN;
1708         public boolean mCheckSession = false;
1709         public boolean mValidateImageData = true;
1710         public String mPhysicalId = null;
1711         public long mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
1712         public ColorSpace.Named mColorSpace;
1713         public boolean mUseColorSpace = false;
1714         public int mTimestampBase = OutputConfiguration.TIMESTAMP_BASE_DEFAULT;
1715 
BufferFormatTestParam(int format, boolean repeating)1716         BufferFormatTestParam(int format, boolean repeating) {
1717             mFormat = format;
1718             mRepeating = repeating;
1719         }
1720     };
1721 
bufferFormatTestByCamera(BufferFormatTestParam params)1722     private void bufferFormatTestByCamera(BufferFormatTestParam params)
1723             throws Exception {
1724         int format = params.mFormat;
1725         boolean setUsageFlag = params.mSetUsageFlag;
1726         long usageFlag = params.mUsageFlag;
1727         boolean repeating = params.mRepeating;
1728         boolean validateImageData = params.mValidateImageData;
1729         int timestampBase = params.mTimestampBase;
1730 
1731         String physicalId = params.mPhysicalId;
1732         StaticMetadata staticInfo;
1733         if (physicalId == null) {
1734             staticInfo = mStaticInfo;
1735         } else {
1736             staticInfo = mAllStaticInfo.get(physicalId);
1737         }
1738 
1739         Size[] availableSizes = staticInfo.getAvailableSizesForFormatChecked(format,
1740                 StaticMetadata.StreamDirection.Output);
1741 
1742         boolean secureTest = setUsageFlag &&
1743                 ((usageFlag & HardwareBuffer.USAGE_PROTECTED_CONTENT) != 0);
1744         Size secureDataSize = null;
1745         if (secureTest) {
1746             secureDataSize = staticInfo.getCharacteristics().get(
1747                     CameraCharacteristics.SCALER_DEFAULT_SECURE_IMAGE_SIZE);
1748         }
1749 
1750         boolean validateTimestampBase = (timestampBase
1751                 != OutputConfiguration.TIMESTAMP_BASE_DEFAULT);
1752         Integer deviceTimestampSource = staticInfo.getCharacteristics().get(
1753                 CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
1754         // for each resolution, test imageReader:
1755         for (Size sz : availableSizes) {
1756             try {
1757                 // For secure mode test only test default secure data size if HAL advertises one.
1758                 if (secureDataSize != null && !secureDataSize.equals(sz)) {
1759                     continue;
1760                 }
1761 
1762                 if (VERBOSE) {
1763                     Log.v(TAG, "Testing size " + sz.toString() + " format " + format
1764                             + " for camera " + mCamera.getId());
1765                 }
1766 
1767                 // Create ImageReader.
1768                 mListener  = new SimpleImageListener();
1769                 if (setUsageFlag) {
1770                     createDefaultImageReader(sz, format, MAX_NUM_IMAGES, usageFlag, mListener);
1771                 } else {
1772                     createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
1773                 }
1774 
1775                 // Don't queue up images if we won't validate them
1776                 if (!validateImageData && !validateTimestampBase) {
1777                     ImageDropperListener imageDropperListener = new ImageDropperListener();
1778                     mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
1779                 }
1780 
1781                 if (params.mCheckSession) {
1782                     checkImageReaderSessionConfiguration(
1783                             "Camera capture session validation for format: " + format + "failed",
1784                             physicalId);
1785                 }
1786 
1787                 ArrayList<OutputConfiguration> outputConfigs = new ArrayList<>();
1788                 OutputConfiguration config = new OutputConfiguration(mReader.getSurface());
1789                 assertTrue("Default timestamp source must be DEFAULT",
1790                         config.getTimestampBase() == OutputConfiguration.TIMESTAMP_BASE_DEFAULT);
1791                 assertTrue("Default mirroring mode must be AUTO",
1792                         config.getMirrorMode() == OutputConfiguration.MIRROR_MODE_AUTO);
1793                 if (physicalId != null) {
1794                     config.setPhysicalCameraId(physicalId);
1795                 }
1796                 config.setDynamicRangeProfile(params.mDynamicRangeProfile);
1797                 config.setTimestampBase(params.mTimestampBase);
1798                 outputConfigs.add(config);
1799 
1800                 CaptureRequest request;
1801                 if (params.mUseColorSpace) {
1802                     request = prepareCaptureRequestForColorSpace(
1803                         outputConfigs, CameraDevice.TEMPLATE_PREVIEW, params.mColorSpace)
1804                         .build();
1805                 } else {
1806                     request = prepareCaptureRequestForConfigs(
1807                         outputConfigs, CameraDevice.TEMPLATE_PREVIEW).build();
1808                 }
1809 
1810                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
1811                 startCapture(request, repeating, listener, mHandler);
1812 
1813                 int numFrameVerified = repeating ? NUM_FRAME_VERIFIED : 1;
1814 
1815                 if (validateTimestampBase) {
1816                     validateTimestamps(deviceTimestampSource, timestampBase, numFrameVerified,
1817                             listener, repeating);
1818                 }
1819 
1820                 if (validateImageData) {
1821                     // Validate images.
1822                     ColorSpace colorSpace = null;
1823                     if (params.mUseColorSpace) {
1824                         colorSpace = ColorSpace.get(params.mColorSpace);
1825                     }
1826                     validateImage(sz, format, numFrameVerified, repeating, colorSpace);
1827                 }
1828 
1829                 // Validate capture result.
1830                 validateCaptureResult(format, sz, listener, numFrameVerified);
1831 
1832                 // stop capture.
1833                 stopCapture(/*fast*/false);
1834             } finally {
1835                 closeDefaultImageReader();
1836             }
1837 
1838             // Only test one size for non-default timestamp base.
1839             if (timestampBase != OutputConfiguration.TIMESTAMP_BASE_DEFAULT) break;
1840         }
1841     }
1842 
bufferFormatLongProcessingTimeTestByCamera(int format)1843     private void bufferFormatLongProcessingTimeTestByCamera(int format)
1844             throws Exception {
1845 
1846         final int TEST_SENSITIVITY_VALUE = mStaticInfo.getSensitivityClampToRange(204);
1847         final long TEST_EXPOSURE_TIME_NS = mStaticInfo.getExposureClampToRange(28000000);
1848         final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000;
1849 
1850         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
1851                 StaticMetadata.StreamDirection.Output);
1852 
1853         Size[] testSizes = getMinAndMaxSizes(availableSizes);
1854 
1855         // for each resolution, test imageReader:
1856         for (Size sz : testSizes) {
1857             Log.v(TAG, "testing size " + sz.toString());
1858             try {
1859                 if (VERBOSE) {
1860                     Log.v(TAG, "Testing long processing time: size " + sz.toString() + " format " +
1861                             format + " for camera " + mCamera.getId());
1862                 }
1863 
1864                 // Create ImageReader.
1865                 mListener  = new SimpleImageListener();
1866                 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
1867 
1868                 // Setting manual controls
1869                 List<Surface> outputSurfaces = new ArrayList<Surface>();
1870                 outputSurfaces.add(mReader.getSurface());
1871                 CaptureRequest.Builder requestBuilder = prepareCaptureRequestForSurfaces(
1872                         outputSurfaces, CameraDevice.TEMPLATE_STILL_CAPTURE);
1873                 // Need to consume the SESSION_READY state because stopCapture() waits
1874                 // on an additional SESSION_READY state.
1875                 mCameraSessionListener.getStateWaiter().
1876                     waitForState(BlockingSessionCallback.SESSION_READY, CAMERA_IDLE_TIMEOUT_MS);
1877 
1878                 requestBuilder.set(
1879                         CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
1880                 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
1881                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
1882                 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
1883                         CaptureRequest.CONTROL_AE_MODE_OFF);
1884                 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
1885                         CaptureRequest.CONTROL_AWB_MODE_OFF);
1886                 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, TEST_SENSITIVITY_VALUE);
1887                 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, TEST_EXPOSURE_TIME_NS);
1888 
1889                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
1890                 startCapture(requestBuilder.build(), /*repeating*/true, listener, mHandler);
1891 
1892                 for (int i = 0; i < NUM_LONG_PROCESS_TIME_FRAME_VERIFIED; i++) {
1893                     mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1894 
1895                     // Verify image.
1896                     Image img = mReader.acquireNextImage();
1897                     assertNotNull("Unable to acquire next image", img);
1898                     CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
1899                             mDebugFileNameBase);
1900 
1901                     // Verify the exposure time and iso match the requested values.
1902                     CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1903 
1904                     long exposureTimeDiff = TEST_EXPOSURE_TIME_NS -
1905                             getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
1906                     int sensitivityDiff = TEST_SENSITIVITY_VALUE -
1907                             getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
1908 
1909                     mCollector.expectTrue(
1910                             String.format("Long processing frame %d format %d size %s " +
1911                                     "exposure time was %d expecting %d.", i, format, sz.toString(),
1912                                     getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME),
1913                                     TEST_EXPOSURE_TIME_NS),
1914                             exposureTimeDiff < EXPOSURE_TIME_ERROR_MARGIN_NS &&
1915                             exposureTimeDiff >= 0);
1916 
1917                     mCollector.expectTrue(
1918                             String.format("Long processing frame %d format %d size %s " +
1919                                     "sensitivity was %d expecting %d.", i, format, sz.toString(),
1920                                     getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY),
1921                                     TEST_SENSITIVITY_VALUE),
1922                             sensitivityDiff >= 0);
1923 
1924                     // Sleep to Simulate long porcessing before closing the image.
1925                     Thread.sleep(LONG_PROCESS_TIME_MS);
1926                     img.close();
1927                 }
1928                 // Stop capture.
1929                 // Drain the reader queue in case the full queue blocks
1930                 // HAL from delivering new results
1931                 ImageDropperListener imageDropperListener = new ImageDropperListener();
1932                 mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
1933                 Image img = mReader.acquireLatestImage();
1934                 if (img != null) {
1935                     img.close();
1936                 }
1937                 stopCapture(/*fast*/true);
1938             } finally {
1939                 closeDefaultImageReader();
1940             }
1941         }
1942     }
1943 
1944     /**
1945      * Validate capture results.
1946      *
1947      * @param format The format of this capture.
1948      * @param size The capture size.
1949      * @param listener The capture listener to get capture result callbacks.
1950      */
validateCaptureResult(int format, Size size, SimpleCaptureCallback listener, int numFrameVerified)1951     private void validateCaptureResult(int format, Size size, SimpleCaptureCallback listener,
1952             int numFrameVerified) {
1953         for (int i = 0; i < numFrameVerified; i++) {
1954             CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1955 
1956             // TODO: Update this to use availableResultKeys once shim supports this.
1957             if (mStaticInfo.isCapabilitySupported(
1958                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
1959                 StaticMetadata staticInfo = mStaticInfo;
1960                 boolean supportActivePhysicalIdConsistency =
1961                         PropertyUtil.getFirstApiLevel() >= Build.VERSION_CODES.S;
1962                 if (mStaticInfo.isLogicalMultiCamera() && supportActivePhysicalIdConsistency
1963                         && mStaticInfo.isActivePhysicalCameraIdSupported()) {
1964                     String activePhysicalId =
1965                             result.get(CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
1966                     staticInfo = mAllStaticInfo.get(activePhysicalId);
1967                 }
1968 
1969                 Long exposureTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
1970                 Integer sensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
1971                 mCollector.expectInRange(
1972                         String.format(
1973                                 "Capture for format %d, size %s exposure time is invalid.",
1974                                 format, size.toString()),
1975                         exposureTime,
1976                         staticInfo.getExposureMinimumOrDefault(),
1977                         staticInfo.getExposureMaximumOrDefault()
1978                 );
1979                 mCollector.expectInRange(
1980                         String.format("Capture for format %d, size %s sensitivity is invalid.",
1981                                 format, size.toString()),
1982                         sensitivity,
1983                         staticInfo.getSensitivityMinimumOrDefault(),
1984                         staticInfo.getSensitivityMaximumOrDefault()
1985                 );
1986             }
1987             // TODO: add more key validations.
1988         }
1989     }
1990 
1991     private final class SimpleImageListener implements ImageReader.OnImageAvailableListener {
1992         private final ConditionVariable imageAvailable = new ConditionVariable();
1993         @Override
onImageAvailable(ImageReader reader)1994         public void onImageAvailable(ImageReader reader) {
1995             if (mReader != reader) {
1996                 return;
1997             }
1998 
1999             if (VERBOSE) Log.v(TAG, "new image available");
2000             imageAvailable.open();
2001         }
2002 
waitForAnyImageAvailable(long timeout)2003         public void waitForAnyImageAvailable(long timeout) {
2004             if (imageAvailable.block(timeout)) {
2005                 imageAvailable.close();
2006             } else {
2007                 fail("wait for image available timed out after " + timeout + "ms");
2008             }
2009         }
2010 
closePendingImages()2011         public void closePendingImages() {
2012             Image image = mReader.acquireLatestImage();
2013             if (image != null) {
2014                 image.close();
2015             }
2016         }
2017 
reset()2018         public void reset() {
2019             imageAvailable.close();
2020         }
2021     }
2022 
validateImage(Size sz, int format, int captureCount, boolean repeating, ColorSpace colorSpace)2023     private void validateImage(Size sz, int format, int captureCount,  boolean repeating,
2024             ColorSpace colorSpace) throws Exception {
2025         // TODO: Add more format here, and wrap each one as a function.
2026         Image img;
2027         final int MAX_RETRY_COUNT = 20;
2028         int numImageVerified = 0;
2029         int reTryCount = 0;
2030         while (numImageVerified < captureCount) {
2031             assertNotNull("Image listener is null", mListener);
2032             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
2033             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
2034             if (repeating) {
2035                 /**
2036                  * Acquire the latest image in case the validation is slower than
2037                  * the image producing rate.
2038                  */
2039                 img = mReader.acquireLatestImage();
2040                 /**
2041                  * Sometimes if multiple onImageAvailable callbacks being queued,
2042                  * acquireLatestImage will clear all buffer before corresponding callback is
2043                  * executed. Wait for a new frame in that case.
2044                  */
2045                 if (img == null && reTryCount < MAX_RETRY_COUNT) {
2046                     reTryCount++;
2047                     continue;
2048                 }
2049             } else {
2050                 img = mReader.acquireNextImage();
2051             }
2052             assertNotNull("Unable to acquire the latest image", img);
2053             if (VERBOSE) Log.v(TAG, "Got the latest image");
2054             CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
2055                     mDebugFileNameBase, colorSpace);
2056             HardwareBuffer hwb = img.getHardwareBuffer();
2057             assertNotNull("Unable to retrieve the Image's HardwareBuffer", hwb);
2058             if (format == ImageFormat.DEPTH_JPEG) {
2059                 byte [] dynamicDepthBuffer = CameraTestUtils.getDataFromImage(img);
2060                 assertTrue("Dynamic depth validation failed!",
2061                         validateDynamicDepthNative(dynamicDepthBuffer));
2062             }
2063             if (VERBOSE) Log.v(TAG, "finish validation of image " + numImageVerified);
2064             img.close();
2065             numImageVerified++;
2066             reTryCount = 0;
2067         }
2068 
2069         // Return all pending images to the ImageReader as the validateImage may
2070         // take a while to return and there could be many images pending.
2071         mListener.closePendingImages();
2072     }
2073 
validateTimestamps(Integer deviceTimestampSource, int timestampBase, int captureCount, SimpleCaptureCallback listener, boolean repeating)2074     private void validateTimestamps(Integer deviceTimestampSource, int timestampBase,
2075             int captureCount, SimpleCaptureCallback listener, boolean repeating) throws Exception {
2076         Image img;
2077         final int MAX_RETRY_COUNT = 20;
2078         int numImageVerified = 0;
2079         int retryCount = 0;
2080         List<Long> imageTimestamps = new ArrayList<Long>();
2081         assertNotNull("Image listener is null", mListener);
2082         while (numImageVerified < captureCount) {
2083             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
2084             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
2085             if (repeating) {
2086                 img = mReader.acquireNextImage();
2087                 if (img == null && retryCount < MAX_RETRY_COUNT) {
2088                     retryCount++;
2089                     continue;
2090                 }
2091             } else {
2092                 img = mReader.acquireNextImage();
2093             }
2094             assertNotNull("Unable to acquire the latest image", img);
2095             if (VERBOSE) {
2096                 Log.v(TAG, "Got the latest image with timestamp " + img.getTimestamp());
2097             }
2098             imageTimestamps.add(img.getTimestamp());
2099             img.close();
2100             numImageVerified++;
2101             retryCount = 0;
2102         }
2103 
2104         List<Long> captureStartTimestamps = listener.getCaptureStartTimestamps(captureCount);
2105         if (VERBOSE) {
2106             Log.v(TAG, "deviceTimestampSource: " + deviceTimestampSource
2107                     + ", timestampBase: " + timestampBase + ", captureStartTimestamps: "
2108                     + captureStartTimestamps + ", imageTimestamps: " + imageTimestamps);
2109         }
2110         if (timestampBase == OutputConfiguration.TIMESTAMP_BASE_SENSOR
2111                 || (timestampBase == OutputConfiguration.TIMESTAMP_BASE_MONOTONIC
2112                 && deviceTimestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
2113                 || (timestampBase == OutputConfiguration.TIMESTAMP_BASE_REALTIME
2114                 && deviceTimestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME)) {
2115             // Makes sure image timestamps match capture started timestamp
2116             for (Long timestamp : imageTimestamps) {
2117                 mCollector.expectTrue("Image timestamp " + timestamp
2118                         + " should match one of onCaptureStarted timestamps "
2119                         + captureStartTimestamps,
2120                         captureStartTimestamps.contains(timestamp));
2121             }
2122         } else if (timestampBase == OutputConfiguration.TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED) {
2123             // Make sure that timestamp base is MONOTONIC. Do not strictly check against
2124             // choreographer callback because there are cases camera framework doesn't use
2125             // choreographer timestamp (when consumer is slower than camera for example).
2126             final int TIMESTAMP_THRESHOLD_MILLIS = 3000; // 3 seconds
2127             long monotonicTime = SystemClock.uptimeMillis();
2128             for (Long timestamp : imageTimestamps) {
2129                 long timestampMs = TimeUnit.NANOSECONDS.toMillis(timestamp);
2130                 mCollector.expectTrue("Image timestamp " + timestampMs + " ms should be in the "
2131                         + "same timebase as SystemClock.updateMillis " + monotonicTime
2132                         + " ms when timestamp base is set to CHOREOGRAPHER synced",
2133                         Math.abs(timestampMs - monotonicTime) < TIMESTAMP_THRESHOLD_MILLIS);
2134             }
2135         }
2136 
2137         // Return all pending images to the ImageReader as the validateImage may
2138         // take a while to return and there could be many images pending.
2139         mListener.closePendingImages();
2140     }
2141 
2142     /**
2143      * Gets the list of test sizes to run the test on, given the array of available sizes.
2144      * For ImageReaderTest, where the sizes are not the most relevant, it is sufficient to test with
2145      * just the min and max size, which helps reduce test time significantly.
2146      */
getMinAndMaxSizes(Size[] availableSizes)2147     private Size[] getMinAndMaxSizes(Size[] availableSizes) {
2148         if (availableSizes.length <= 2) {
2149             return availableSizes;
2150         }
2151 
2152         Size[] testSizes = new Size[2];
2153         Size maxSize = availableSizes[0];
2154         Size minSize = availableSizes[1];
2155 
2156         for (Size size : availableSizes) {
2157             if (size.getWidth() * size.getHeight() > maxSize.getWidth() * maxSize.getHeight()) {
2158                 maxSize = size;
2159             }
2160 
2161             if (size.getWidth() * size.getHeight() < minSize.getWidth() * minSize.getHeight()) {
2162                 minSize = size;
2163             }
2164         }
2165 
2166         testSizes[0] = minSize;
2167         testSizes[1] = maxSize;
2168 
2169         return testSizes;
2170     }
2171 
2172     /** Load dynamic depth validation jni on initialization */
2173     static {
2174         System.loadLibrary("ctscamera2_jni");
2175     }
2176     /**
2177      * Use the dynamic depth SDK to validate a dynamic depth file stored in the buffer.
2178      *
2179      * Returns false if the dynamic depth has validation errors. Validation warnings/errors
2180      * will be printed to logcat.
2181      */
validateDynamicDepthNative(byte[] dynamicDepthBuffer)2182     private static native boolean validateDynamicDepthNative(byte[] dynamicDepthBuffer);
2183 }
2184