1 /*
2  * Copyright 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.virtualdevice.cts.camera;
18 
19 import static android.Manifest.permission.CAMERA;
20 import static android.graphics.ImageFormat.JPEG;
21 import static android.graphics.ImageFormat.YUV_420_888;
22 import static android.media.MediaPlayer.MEDIA_INFO_VIDEO_RENDERING_START;
23 import static android.opengl.EGL14.EGL_ALPHA_SIZE;
24 import static android.opengl.EGL14.EGL_BLUE_SIZE;
25 import static android.opengl.EGL14.EGL_CONTEXT_CLIENT_VERSION;
26 import static android.opengl.EGL14.EGL_DEFAULT_DISPLAY;
27 import static android.opengl.EGL14.EGL_GREEN_SIZE;
28 import static android.opengl.EGL14.EGL_NONE;
29 import static android.opengl.EGL14.EGL_NO_CONTEXT;
30 import static android.opengl.EGL14.EGL_NO_DISPLAY;
31 import static android.opengl.EGL14.EGL_NO_SURFACE;
32 import static android.opengl.EGL14.EGL_RED_SIZE;
33 import static android.opengl.EGL14.eglChooseConfig;
34 import static android.opengl.EGL14.eglCreateContext;
35 import static android.opengl.EGL14.eglDestroyContext;
36 import static android.opengl.EGL14.eglGetDisplay;
37 import static android.opengl.EGL14.eglInitialize;
38 import static android.opengl.EGL14.eglMakeCurrent;
39 import static android.opengl.GLES20.GL_MAX_TEXTURE_SIZE;
40 import static android.opengl.GLES20.glGetIntegerv;
41 
42 import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
43 import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation;
44 
45 import static com.google.common.truth.Truth.assertThat;
46 import static com.google.common.truth.Truth.assertWithMessage;
47 
48 import static org.junit.Assert.fail;
49 import static org.junit.Assume.assumeFalse;
50 
51 import static java.lang.Byte.toUnsignedInt;
52 
53 import android.companion.virtual.camera.VirtualCameraCallback;
54 import android.companion.virtual.camera.VirtualCameraConfig;
55 import android.companion.virtual.camera.VirtualCameraStreamConfig;
56 import android.content.Context;
57 import android.graphics.Bitmap;
58 import android.graphics.BitmapFactory;
59 import android.graphics.Canvas;
60 import android.graphics.Color;
61 import android.graphics.ImageDecoder;
62 import android.graphics.PixelFormat;
63 import android.hardware.camera2.CameraCharacteristics;
64 import android.hardware.camera2.cts.rs.BitmapUtils;
65 import android.media.Image;
66 import android.media.MediaPlayer;
67 import android.net.Uri;
68 import android.opengl.EGLConfig;
69 import android.opengl.EGLContext;
70 import android.opengl.EGLDisplay;
71 import android.os.UserHandle;
72 import android.util.Log;
73 import android.view.Surface;
74 
75 import androidx.annotation.ColorInt;
76 
77 import com.google.common.collect.Iterables;
78 
79 import java.io.File;
80 import java.io.FileNotFoundException;
81 import java.io.FileOutputStream;
82 import java.io.IOException;
83 import java.util.concurrent.CountDownLatch;
84 import java.util.concurrent.Executor;
85 import java.util.concurrent.TimeUnit;
86 import java.util.function.Consumer;
87 
88 public final class VirtualCameraUtils {
89     static final String BACK_CAMERA_ID = "0";
90     static final String FRONT_CAMERA_ID = "1";
91     static final CameraCharacteristics.Key<Integer> INFO_DEVICE_ID =
92             new CameraCharacteristics.Key<Integer>("android.info.deviceId", int.class);
93     private static final long TIMEOUT_MILLIS = 2000L;
94     private static final float EPSILON = 0.3f;
95     // Difference between two bitmaps using average of per-pixel differences.
96     private static final double BITMAP_MAX_DIFF = 1.5;
97     private static final String TAG = "VirtualCameraUtils";
98 
createVirtualCameraConfig( int width, int height, int format, int maximumFramesPerSecond, int sensorOrientation, int lensFacing, String name, Executor executor, VirtualCameraCallback callback)99     static VirtualCameraConfig createVirtualCameraConfig(
100             int width, int height, int format, int maximumFramesPerSecond, int sensorOrientation,
101             int lensFacing, String name, Executor executor, VirtualCameraCallback callback) {
102         return new VirtualCameraConfig.Builder(name)
103                 .addStreamConfig(width, height, format, maximumFramesPerSecond)
104                 .setVirtualCameraCallback(executor, callback)
105                 .setSensorOrientation(sensorOrientation)
106                 .setLensFacing(lensFacing)
107                 .build();
108     }
109 
assertVirtualCameraConfig(VirtualCameraConfig config, int width, int height, int format, int maximumFramesPerSecond, int sensorOrientation, int lensFacing, String name)110     static void assertVirtualCameraConfig(VirtualCameraConfig config, int width, int height,
111             int format, int maximumFramesPerSecond, int sensorOrientation, int lensFacing,
112             String name) {
113         assertThat(config.getName()).isEqualTo(name);
114         assertThat(config.getStreamConfigs()).hasSize(1);
115         VirtualCameraStreamConfig streamConfig =
116                 Iterables.getOnlyElement(config.getStreamConfigs());
117         assertThat(streamConfig.getWidth()).isEqualTo(width);
118         assertThat(streamConfig.getHeight()).isEqualTo(height);
119         assertThat(streamConfig.getFormat()).isEqualTo(format);
120         assertThat(streamConfig.getMaximumFramesPerSecond()).isEqualTo(maximumFramesPerSecond);
121         assertThat(config.getSensorOrientation()).isEqualTo(sensorOrientation);
122         assertThat(config.getLensFacing()).isEqualTo(lensFacing);
123     }
124 
paintSurface(Surface surface, @ColorInt int color)125     static void paintSurface(Surface surface, @ColorInt int color) {
126         Canvas canvas = surface.lockCanvas(null);
127         canvas.drawColor(color);
128         surface.unlockCanvasAndPost(canvas);
129     }
130 
paintSurfaceRed(Surface surface)131     static void paintSurfaceRed(Surface surface) {
132         paintSurface(surface, Color.RED);
133     }
134 
135     // Converts YUV to ARGB int representation,
136     // using BT601 full-range matrix.
137     // See https://en.wikipedia.org/wiki/YCbCr#JPEG_conversion
yuv2rgb(int y, int u, int v)138     private static int yuv2rgb(int y, int u, int v) {
139         int r = (int) (y + 1.402f * (v - 128f));
140         int g = (int) (y - 0.344136f * (u - 128f) - 0.714136 * (v - 128f));
141         int b = (int) (y + 1.772 * (u - 128f));
142         return 0xff000000 | (r << 16) | (g << 8) | b;
143     }
144 
145     // Compares two ARGB colors and returns true if they are approximately
146     // the same color.
areColorsAlmostIdentical(int colorA, int colorB)147     private static boolean areColorsAlmostIdentical(int colorA, int colorB) {
148         float a1 = ((colorA >> 24) & 0xff) / 255f;
149         float r1 = ((colorA >> 16) & 0xff) / 255f;
150         float g1 = ((colorA >> 4) & 0xff) / 255f;
151         float b1 = (colorA & 0xff) / 255f;
152 
153         float a2 = ((colorB >> 24) & 0xff) / 255f;
154         float r2 = ((colorB >> 16) & 0xff) / 255f;
155         float g2 = ((colorB >> 4) & 0xff) / 255f;
156         float b2 = (colorB & 0xff) / 255f;
157 
158         float mse = ((a1 - a2) * (a1 - a2)
159                 + (r1 - r2) * (r1 - r2)
160                 + (g1 - g2) * (g1 - g2)
161                 + (b1 - b2) * (b1 - b2)) / 4;
162 
163         return mse < EPSILON;
164     }
165 
yuv420ImageHasColor(Image image, int color)166     private static boolean yuv420ImageHasColor(Image image, int color) {
167         final int width = image.getWidth();
168         final int height = image.getHeight();
169         final Image.Plane[] planes = image.getPlanes();
170         for (int j = 0; j < height; ++j) {
171             int jChroma = j / 2;
172             for (int i = 0; i < width; ++i) {
173                 int iChroma = i / 2;
174                 int y = toUnsignedInt(planes[0].getBuffer().get(
175                         j * planes[0].getRowStride() + i * planes[0].getPixelStride()));
176                 int u = toUnsignedInt(planes[1].getBuffer().get(
177                         jChroma * planes[1].getRowStride() + iChroma * planes[1].getPixelStride()));
178                 int v = toUnsignedInt(planes[2].getBuffer().get(
179                         jChroma * planes[2].getRowStride() + iChroma * planes[2].getPixelStride()));
180                 int argb = yuv2rgb(y, u, v);
181                 if (!areColorsAlmostIdentical(argb, color)) {
182                     return false;
183                 }
184             }
185         }
186         return true;
187     }
188 
jpegImageHasColor(Image image, int color)189     private static boolean jpegImageHasColor(Image image, int color) throws IOException {
190         Bitmap bitmap = ImageDecoder.decodeBitmap(
191                 ImageDecoder.createSource(image.getPlanes()[0].getBuffer())).copy(
192                 Bitmap.Config.ARGB_8888, false);
193         final int width = bitmap.getWidth();
194         final int height = bitmap.getHeight();
195         for (int j = 0; j < height; ++j) {
196             for (int i = 0; i < width; ++i) {
197                 if (!areColorsAlmostIdentical(bitmap.getColor(i, j).toArgb(), color)) {
198                     return false;
199                 }
200             }
201         }
202         return true;
203     }
204 
205     // TODO(b/316326725) Turn this into proper custom matcher.
imageHasColor(Image image, int color)206     static boolean imageHasColor(Image image, int color) throws IOException {
207         return switch (image.getFormat()) {
208             case YUV_420_888 -> yuv420ImageHasColor(image, color);
209             case JPEG -> jpegImageHasColor(image, color);
210             default -> {
211                 fail("Encountered unsupported image format: " + image.getFormat());
212                 yield false;
213             }
214         };
215     }
216 
toFormat(String str)217     static int toFormat(String str) {
218         if (str.equals("YUV_420_888")) {
219             return YUV_420_888;
220         }
221         if (str.equals("RGBA_888")) {
222             return PixelFormat.RGBA_8888;
223         }
224         if (str.equals("JPEG")) {
225             return JPEG;
226         }
227 
228         fail("Unknown pixel format string: " + str);
229         return PixelFormat.UNKNOWN;
230     }
231 
232     /**
233      * Will write the image to disk so it can be pulled by the collector in case of error
234      *
235      * @see com.android.tradefed.device.metric.FilePullerLogCollector
236      */
writeImageToDisk(String imageName, Bitmap bitmap)237     private static void writeImageToDisk(String imageName, Bitmap bitmap) {
238         File dir = getApplicationContext().getFilesDir();
239         // The FilePullerLogCollector only pulls image in png
240         File imageFile = new File(dir, imageName + ".png");
241         try {
242             Log.i(TAG, "Saving image to disk at " + imageFile.getAbsolutePath());
243             bitmap.compress(Bitmap.CompressFormat.PNG, 80, new FileOutputStream(imageFile));
244         } catch (FileNotFoundException e) {
245             throw new RuntimeException(e);
246         }
247     }
248 
249     /**
250      * @param generated Bitmap generated from the test.
251      * @param golden    Golden bitmap to compare to.
252      * @param prefix    Prefix for the image file generated in case of error.
253      */
assertImagesSimilar(Bitmap generated, Bitmap golden, String prefix)254     static void assertImagesSimilar(Bitmap generated, Bitmap golden, String prefix) {
255         assertImagesSimilar(generated, golden, prefix, BITMAP_MAX_DIFF);
256     }
257 
258     /**
259      * @param generated Bitmap generated from the test.
260      * @param golden    Golden bitmap to compare to.
261      * @param prefix    Prefix for the image file generated in case of error.
262      */
assertImagesSimilar(Bitmap generated, Bitmap golden, String prefix, double maxDiff)263     static void assertImagesSimilar(Bitmap generated, Bitmap golden, String prefix,
264             double maxDiff) {
265         boolean assertionPassed = false;
266         try {
267             double actual = BitmapUtils.calcDifferenceMetric(generated, golden);
268             assertWithMessage("Generated image does not match golden. "
269                     + "Images have been saved to disk.").that(actual).isAtMost(maxDiff);
270             assertionPassed = true;
271         } finally {
272             if (!assertionPassed) {
273                 writeImageToDisk(prefix + "_generated", generated);
274                 writeImageToDisk(prefix + "_golden", golden);
275             }
276         }
277     }
278 
279     static class VideoRenderer implements Consumer<Surface> {
280         private final MediaPlayer mPlayer;
281         private final CountDownLatch mLatch;
282 
VideoRenderer(int resId)283         VideoRenderer(int resId) {
284             String path =
285                     "android.resource://" + getApplicationContext().getPackageName() + "/" + resId;
286             mPlayer = MediaPlayer.create(getApplicationContext(), Uri.parse(path));
287             mLatch = new CountDownLatch(1);
288 
289             mPlayer.setOnInfoListener((mp, what, extra) -> {
290                 if (what == MEDIA_INFO_VIDEO_RENDERING_START) {
291                     mLatch.countDown();
292                     return true;
293                 }
294                 return false;
295             });
296         }
297 
298         @Override
accept(Surface surface)299         public void accept(Surface surface) {
300             mPlayer.setSurface(surface);
301             mPlayer.seekTo(1000);
302             mPlayer.start();
303             try {
304                 // Block until media player has drawn the first video frame
305                 assertWithMessage("Media player did not notify first frame on time")
306                         .that(mLatch.await(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS))
307                         .isTrue();
308             } catch (InterruptedException e) {
309                 throw new RuntimeException(e);
310             }
311         }
312     }
313 
loadBitmapFromRaw(int rawResId)314     static Bitmap loadBitmapFromRaw(int rawResId) {
315         BitmapFactory.Options options = new BitmapFactory.Options();
316         options.inScaled = false;
317         return BitmapFactory.decodeResource(getApplicationContext().getResources(),
318                 rawResId, options);
319     }
320 
jpegImageToBitmap(Image image)321     static Bitmap jpegImageToBitmap(Image image) throws IOException {
322         assertThat(image.getFormat()).isEqualTo(JPEG);
323         return ImageDecoder.decodeBitmap(
324                 ImageDecoder.createSource(image.getPlanes()[0].getBuffer())).copy(
325                 Bitmap.Config.ARGB_8888, false);
326     }
327 
grantCameraPermission(int deviceId)328     static void grantCameraPermission(int deviceId) {
329         Context deviceContext = getInstrumentation().getTargetContext()
330                 .createDeviceContext(deviceId);
331         deviceContext.getPackageManager().grantRuntimePermission("android.virtualdevice.cts.camera",
332                 CAMERA, UserHandle.of(deviceContext.getUserId()));
333     }
334 
getMaximumTextureSize()335     static int getMaximumTextureSize() {
336         EGLDisplay eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
337         assumeFalse(eglDisplay.equals(EGL_NO_DISPLAY));
338         int[] version = new int[2];
339         eglInitialize(eglDisplay, version, 0, version, 1);
340 
341         int[] attribList = {EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8,
342                 EGL_ALPHA_SIZE, 8, EGL_NONE};
343 
344         EGLConfig[] configs = new EGLConfig[1];
345         int[] numConfigs = new int[1];
346         if (!eglChooseConfig(
347                 eglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
348             return 0;
349         }
350 
351         int[] attrib2_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
352         EGLContext eglContext = eglCreateContext(eglDisplay, configs[0], EGL_NO_CONTEXT,
353                 attrib2_list, 0);
354         eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglContext);
355 
356         int[] maxSize = new int[1];
357         glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxSize, 0);
358 
359         eglDestroyContext(eglDisplay, eglContext);
360 
361         return maxSize[0];
362     }
363 
VirtualCameraUtils()364     private VirtualCameraUtils() {}
365 }
366