1 /*
2  * Copyright 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS;
20 
21 import android.graphics.ImageFormat;
22 import android.graphics.SurfaceTexture;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CaptureRequest;
26 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
27 import android.media.CamcorderProfile;
28 import android.media.Image;
29 import android.media.ImageReader;
30 import android.media.MediaCodec;
31 import android.media.MediaCodecInfo;
32 import android.media.MediaFormat;
33 import android.media.MediaMuxer;
34 import android.opengl.EGL14;
35 import android.opengl.EGLConfig;
36 import android.opengl.EGLContext;
37 import android.opengl.EGLDisplay;
38 import android.opengl.EGLExt;
39 import android.opengl.EGLSurface;
40 import android.opengl.GLES11Ext;
41 import android.opengl.GLES20;
42 import android.opengl.Matrix;
43 import android.util.Log;
44 import android.util.Size;
45 import android.view.Surface;
46 
47 import static junit.framework.Assert.assertEquals;
48 import static junit.framework.Assert.assertFalse;
49 import static junit.framework.Assert.assertNotNull;
50 import static junit.framework.Assert.assertNotSame;
51 import static junit.framework.Assert.assertTrue;
52 
53 import org.junit.Test;
54 import org.junit.runner.RunWith;
55 import org.junit.runners.Parameterized;
56 
57 import java.io.File;
58 import java.io.IOException;
59 import java.nio.ByteBuffer;
60 import java.nio.ByteOrder;
61 import java.nio.FloatBuffer;
62 import java.util.ArrayList;
63 
64 /**
65  * Basic test for applying graphics manipulation to a captured video, using openGL for graphics
66  * rendering and MediaCodec for video encoding/decoding.
67  */
68 
69 @RunWith(Parameterized.class)
70 public class CameraGPURecordingTest extends Camera2AndroidTestCase {
71     private static final String TAG = "CameraGPURecordingTest";
72     private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC;
73     private static final int IFRAME_INTERVAL = 5;
74     private static final long DURATION_SEC = 8;
75     private static final String SWAPPED_FRAGMENT_SHADER =
76             "#extension GL_OES_EGL_image_external : require\n"
77                     + "precision mediump float;\n"
78                     + "varying vec2 vTextureCoord;\n"
79                     + "uniform samplerExternalOES sTexture;\n"
80                     + "void main() {\n"
81                     + "  gl_FragColor = texture2D(sTexture, vTextureCoord).gbra;\n"
82                     + "}\n";
83 
84     private MediaCodec mEncoder;
85     private CodecInputSurface mInputSurface;
86     private MediaMuxer mMuxer;
87     private int mTrackIndex;
88     private boolean mMuxerStarted;
89     private SurfaceTextureHolder mSurfaceTextureHolder;
90     private MediaCodec.BufferInfo mBufferInfo;
91 
92     /*
93      * Tests the basic camera -> GPU -> encoder path. Applies a fragment shader every other frame to
94      * perform a color tweak.
95      */
96     @Test
testCameraGpuEncoderPath()97     public void testCameraGpuEncoderPath() throws Exception {
98         for (String id : getCameraIdsUnderTest()) {
99             if (!mAllStaticInfo.get(id).isCapabilitySupported(
100                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE)) {
101                 Log.i(TAG, "Camera " + id + " is not BACKWARD_COMPATIBLE and does not support "
102                         + "TEMPLATE_RECORD for createCaptureRequest");
103                 continue;
104             }
105             if (mAllStaticInfo.get(id).isExternalCamera()) {
106                 Log.i(TAG, "Camera " + id + " does not support CamcorderProfile, skipping");
107                 continue;
108             }
109             Log.i(TAG, "Testing Camera " + id + " without video snapshot");
110             captureGpuEncoderPath(id, false /*enableVideoSnapshot*/);
111 
112             Log.i(TAG, "Testing Camera " + id + " with video snapshot");
113             captureGpuEncoderPath(id, true /*enableVideoSnapshot*/);
114         }
115     }
116 
captureGpuEncoderPath(String id, boolean enableVideoSnapshot)117     private void captureGpuEncoderPath(String id, boolean enableVideoSnapshot) throws Exception {
118 
119         CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P);
120         // Downgrade if 720p isn't available.
121         if (profile == null) {
122             profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
123         }
124         // Skip the test if neither of 720 or 480 camcorder profile is available.
125         if (profile == null) {
126             Log.i(TAG, "Camcorder profile not available for camera " + id);
127             return;
128         }
129 
130         try {
131             openDevice(id);
132             prepareEncoder(profile);
133             mInputSurface.makeCurrent();
134 
135             mSurfaceTextureHolder = new SurfaceTextureHolder();
136             SurfaceTexture surfaceTexture = mSurfaceTextureHolder.getSurfaceTexture();
137             CameraTestUtils.SimpleImageReaderListener imageListener =
138                     new CameraTestUtils.SimpleImageReaderListener();
139             Size snapshotSize = mOrderedStillSizes.get(0);
140             if (enableVideoSnapshot) {
141                 createDefaultImageReader(snapshotSize, ImageFormat.JPEG, 1, imageListener);
142             }
143             CaptureRequest.Builder previewRequest = createSessionAndCaptureRequest(surfaceTexture,
144                     enableVideoSnapshot);
145             CameraTestUtils.SimpleCaptureCallback previewListener =
146                     new CameraTestUtils.SimpleCaptureCallback();
147 
148             mCameraSession.setRepeatingRequest(previewRequest.build(),
149                     previewListener, mHandler);
150 
151             long startWhen = System.nanoTime();
152             long desiredEnd = startWhen + DURATION_SEC * 1000000000L;
153             int frameCount = 0;
154 
155             while (System.nanoTime() < desiredEnd) {
156                 // Feed any pending encoder output into the muxer.
157                 drainEncoder(/*endOfStream=*/ false);
158 
159                 String fragmentShader = null;
160                 if ((frameCount % 2) != 0) {
161                     fragmentShader = SWAPPED_FRAGMENT_SHADER;
162                 }
163                 mSurfaceTextureHolder.changeFragmentShader(fragmentShader);
164 
165                 // Acquire a new frame of input, and render it to the Surface.  If we had a
166                 // GLSurfaceView we could switch EGL contexts and call drawImage() a second
167                 // time to render it on screen.  The texture can be shared between contexts by
168                 // passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context
169                 // argument.
170                 mSurfaceTextureHolder.awaitNewImage();
171                 mSurfaceTextureHolder.drawImage();
172 
173                 frameCount++;
174 
175                 // Set the presentation time stamp from the SurfaceTexture's time stamp.  This
176                 // will be used by MediaMuxer to set the PTS in the video.
177                 Log.v(TAG, "present: "
178                         + ((surfaceTexture.getTimestamp() - startWhen) / 1000000.0)
179                         + "ms");
180                 mInputSurface.setPresentationTime(surfaceTexture.getTimestamp());
181 
182                 // Submit it to the encoder.  The eglSwapBuffers call will block if the input
183                 // is full, which would be bad if it stayed full until we dequeued an output
184                 // buffer (which we can't do, since we're stuck here).  So long as we fully
185                 // drain the encoder before supplying additional input, the system guarantees
186                 // that we can supply another frame without blocking.
187                 Log.v(TAG, "sending frame to encoder");
188                 mInputSurface.swapBuffers();
189             }
190 
191             if (enableVideoSnapshot) {
192                 CaptureRequest.Builder snapshotBuilder =
193                         mCamera.createCaptureRequest(CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
194                 snapshotBuilder.addTarget(mReaderSurface);
195                 mCameraSession.capture(snapshotBuilder.build(),
196                         new CameraTestUtils.SimpleCaptureCallback(), mHandler);
197 
198                 Image image = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
199                 CameraTestUtils.validateImage(image, snapshotSize.getWidth(),
200                         snapshotSize.getHeight(), ImageFormat.JPEG, /*filePath*/null);
201                 image.close();
202             }
203 
204             mCameraSession.stopRepeating();
205             previewListener.drain();
206             // send end-of-stream to encoder, and drain remaining output
207             drainEncoder(true);
208         } finally {
209             closeDevice(id);
210             releaseEncoder();
211             releaseSurfaceTexture();
212             closeDefaultImageReader();
213         }
214     }
215 
prepareEncoder(CamcorderProfile profile)216     private void prepareEncoder(CamcorderProfile profile) throws Exception {
217         mBufferInfo = new MediaCodec.BufferInfo();
218         assertNotNull("Camcorder profile should not be null", profile);
219         int width = profile.videoFrameWidth;
220         int height = profile.videoFrameHeight;
221         MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
222         format.setInteger(
223                 MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
224         format.setInteger(MediaFormat.KEY_BIT_RATE, profile.videoBitRate);
225         format.setInteger(MediaFormat.KEY_FRAME_RATE, profile.videoFrameRate);
226         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
227         Log.v(TAG, "format: " + format);
228 
229         mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
230         mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
231         mInputSurface = new CodecInputSurface(mEncoder.createInputSurface());
232         mEncoder.start();
233 
234         File filesDir = mContext.getPackageManager().isInstantApp()
235                 ? mContext.getFilesDir()
236                 : mContext.getExternalFilesDir(null);
237         long timestamp = System.currentTimeMillis();
238         String outputPath =
239                 new File(filesDir.getPath(), "test-" + timestamp + "." + width + "x" + height
240                         + ".mp4").toString();
241         Log.i(TAG, "Output file is " + outputPath);
242         try {
243             mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
244         } catch (IOException ioe) {
245             throw new RuntimeException("MediaMuxer creation failed", ioe);
246         }
247 
248         mTrackIndex = -1;
249         mMuxerStarted = false;
250     }
251 
createSessionAndCaptureRequest(SurfaceTexture preview, boolean enableSnapshot)252     private CaptureRequest.Builder createSessionAndCaptureRequest(SurfaceTexture preview,
253             boolean enableSnapshot) throws Exception {
254         Surface previewSurface = new Surface(preview);
255         preview.setDefaultBufferSize(640, 480);
256 
257         ArrayList<Surface> sessionOutputs = new ArrayList<>();
258         sessionOutputs.add(previewSurface);
259         if (enableSnapshot && mReaderSurface != null) {
260             sessionOutputs.add(mReaderSurface);
261         }
262 
263         createSession(sessionOutputs);
264 
265         CaptureRequest.Builder previewRequest =
266                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
267 
268         previewRequest.addTarget(previewSurface);
269 
270         return previewRequest;
271     }
272 
releaseEncoder()273     private void releaseEncoder() {
274         Log.v(TAG, "releasing encoder objects");
275         if (mEncoder != null) {
276             mEncoder.stop();
277             mEncoder.release();
278             mEncoder = null;
279         }
280         if (mInputSurface != null) {
281             mInputSurface.release();
282             mInputSurface = null;
283         }
284         if (mMuxer != null) {
285             mMuxer.stop();
286             mMuxer.release();
287             mMuxer = null;
288         }
289     }
290 
releaseSurfaceTexture()291     private void releaseSurfaceTexture() {
292         if (mSurfaceTextureHolder != null) {
293             mSurfaceTextureHolder.release();
294             mSurfaceTextureHolder = null;
295         }
296     }
297 
drainEncoder(boolean endOfStream)298     private void drainEncoder(boolean endOfStream) {
299         Log.v(TAG, "drainEncoder(" + endOfStream + ")");
300 
301         if (endOfStream) {
302             Log.v(TAG, "sending EOS to encoder");
303             mEncoder.signalEndOfInputStream();
304         }
305 
306         while (true) {
307             int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, /*timeoutUs=*/ 1000);
308             assertTrue(String.format("Unexpected result from encoder.dequeueOutputBuffer: %d",
309                     encoderStatus),
310                     encoderStatus >= 0
311                         || (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER)
312                         || (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED));
313             if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
314                 // no output available yet
315                 if (!endOfStream) {
316                     break;
317                 } else {
318                     Log.v(TAG, "no output available, spinning to await EOS");
319                 }
320             } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
321                 // should happen before receiving buffers, and should only happen once
322                 assertFalse("Format changed twice", mMuxerStarted);
323                 MediaFormat newFormat = mEncoder.getOutputFormat();
324                 Log.d(TAG, "encoder output format changed: " + newFormat);
325 
326                 // now that we have the Magic Goodies, start the muxer
327                 mTrackIndex = mMuxer.addTrack(newFormat);
328                 mMuxer.start();
329                 mMuxerStarted = true;
330             } else {
331                 ByteBuffer encodedData = mEncoder.getOutputBuffer(encoderStatus);
332                 assertNotNull(String.format("EncoderOutputBuffer %d was null", encoderStatus),
333                         encodedData);
334 
335                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
336                     // The codec config data was pulled out and fed to the muxer when we got
337                     // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
338                     Log.v(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
339                     mBufferInfo.size = 0;
340                 }
341 
342                 if (mBufferInfo.size != 0) {
343                     assertTrue("Muxer hasn't started", mMuxerStarted);
344 
345                     // adjust the ByteBuffer values to match BufferInfo (not needed?)
346                     encodedData.position(mBufferInfo.offset);
347                     encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
348 
349                     mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
350                     Log.v(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
351                 }
352 
353                 mEncoder.releaseOutputBuffer(encoderStatus, false);
354 
355                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
356                     if (!endOfStream) {
357                         Log.w(TAG, "reached end of stream unexpectedly");
358                     } else {
359                         Log.v(TAG, "end of stream reached");
360                     }
361                     break;
362                 }
363             }
364         }
365     }
366 
367     /**
368      * Holds state associated with a Surface used for MediaCodec encoder input.
369      *
370      * <p>The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses
371      * that to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be
372      * sent to the video encoder.
373      *
374      * <p>This object owns the Surface -- releasing this will release the Surface too.
375      */
376     private static class CodecInputSurface {
377         private static final int EGL_RECORDABLE_ANDROID = 0x3142;
378 
379         private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
380         private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
381         private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
382         private Surface mSurface;
383 
384         /** Creates a CodecInputSurface from a Surface. */
CodecInputSurface(Surface surface)385         CodecInputSurface(Surface surface) {
386             assertNotNull("CodecInputSurface is NULL", surface);
387             mSurface = surface;
388             eglSetup();
389         }
390 
391         /** Prepares EGL. We want a GLES 2.0 context and a surface that supports recording. */
eglSetup()392         private void eglSetup() {
393             mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
394             assertNotSame("Unable to get EGL14 display", mEGLDisplay, EGL14.EGL_NO_DISPLAY);
395 
396             int[] version = new int[2];
397             assertTrue("Unable to initialize EGL14", EGL14.eglInitialize(mEGLDisplay, version, 0,
398                     version, 1));
399 
400             // Configure EGL for recording and OpenGL ES 2.0.
401             int[] attribList = {
402                 EGL14.EGL_RED_SIZE,
403                 8,
404                 EGL14.EGL_GREEN_SIZE,
405                 8,
406                 EGL14.EGL_BLUE_SIZE,
407                 8,
408                 EGL14.EGL_ALPHA_SIZE,
409                 8,
410                 EGL14.EGL_RENDERABLE_TYPE,
411                 EGL14.EGL_OPENGL_ES2_BIT,
412                 EGL_RECORDABLE_ANDROID,
413                 1,
414                 EGL14.EGL_NONE
415             };
416             EGLConfig[] configs = new EGLConfig[1];
417             int[] numConfigs = new int[1];
418             EGL14.eglChooseConfig(
419                     mEGLDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0);
420             checkEglError("eglCreateContext RGB888+recordable ES2");
421 
422             // Configure context for OpenGL ES 2.0.
423             int[] attrib_list = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
424             mEGLContext =
425                     EGL14.eglCreateContext(
426                             mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, attrib_list, 0);
427             checkEglError("eglCreateContext");
428 
429             // Create a window surface, and attach it to the Surface we received.
430             int[] surfaceAttribs = {EGL14.EGL_NONE};
431             mEGLSurface =
432                     EGL14.eglCreateWindowSurface(
433                             mEGLDisplay, configs[0], mSurface, surfaceAttribs, 0);
434             checkEglError("eglCreateWindowSurface");
435         }
436 
437         /**
438          * Discards all resources held by this class, notably the EGL context. Also releases the
439          * Surface that was passed to our constructor.
440          */
release()441         public void release() {
442             if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
443                 EGL14.eglMakeCurrent(
444                         mEGLDisplay,
445                         EGL14.EGL_NO_SURFACE,
446                         EGL14.EGL_NO_SURFACE,
447                         EGL14.EGL_NO_CONTEXT);
448                 EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
449                 EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
450                 EGL14.eglReleaseThread();
451                 EGL14.eglTerminate(mEGLDisplay);
452             }
453             mSurface.release();
454 
455             mEGLDisplay = EGL14.EGL_NO_DISPLAY;
456             mEGLContext = EGL14.EGL_NO_CONTEXT;
457             mEGLSurface = EGL14.EGL_NO_SURFACE;
458 
459             mSurface = null;
460         }
461 
462         /** Makes our EGL context and surface current. */
makeCurrent()463         public void makeCurrent() {
464             EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
465             checkEglError("eglMakeCurrent");
466         }
467 
468         /** Calls eglSwapBuffers. Use this to "publish" the current frame. */
swapBuffers()469         public void swapBuffers() {
470             EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
471             checkEglError("eglSwapBuffers");
472         }
473 
474         /** Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. */
setPresentationTime(long nsecs)475         public void setPresentationTime(long nsecs) {
476             EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
477             checkEglError("eglPresentationTimeANDROID");
478         }
479 
480         /** Checks for EGL errors. */
checkEglError(String msg)481         private void checkEglError(String msg) {
482             int error = EGL14.eglGetError();
483             assertEquals(String.format("%s : EGL error: 0x%s", msg, Integer.toHexString(error)),
484                     EGL14.EGL_SUCCESS, error);
485         }
486     }
487 
488     /**
489      * Manages a SurfaceTexture. Creates SurfaceTexture and TextureRender objects, and provides
490      * functions that wait for frames and render them to the current EGL surface.
491      *
492      * <p>The SurfaceTexture can be passed to Camera.setPreviewTexture() to receive camera output.
493      */
494     private static class SurfaceTextureHolder implements SurfaceTexture.OnFrameAvailableListener {
495         private SurfaceTexture mSurfaceTexture;
496         private SurfaceTextureRender mTextureRender;
497 
498         private final Object mFrameSyncObject = new Object(); // guards mFrameAvailable
499         private volatile boolean mFrameAvailable;
500 
501         /** Creates instances of TextureRender and SurfaceTexture. */
SurfaceTextureHolder()502         SurfaceTextureHolder() {
503             mTextureRender = new SurfaceTextureRender();
504             Log.v(TAG, "textureID=" + mTextureRender.getTextureId());
505             mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
506             mSurfaceTexture.setOnFrameAvailableListener(this);
507         }
508 
release()509         public void release() {
510             mTextureRender = null;
511             mSurfaceTexture = null;
512         }
513 
514         /** Returns the SurfaceTexture. */
getSurfaceTexture()515         public SurfaceTexture getSurfaceTexture() {
516             return mSurfaceTexture;
517         }
518 
519         /** Replaces the fragment shader. */
changeFragmentShader(String fragmentShader)520         public void changeFragmentShader(String fragmentShader) {
521             mTextureRender.changeFragmentShader(fragmentShader);
522         }
523 
524         /**
525          * Latches the next buffer into the texture. Must be called from the thread that created the
526          * OutputSurface object.
527          */
awaitNewImage()528         public void awaitNewImage() {
529             synchronized (mFrameSyncObject) {
530                 try {
531                     // Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
532                     // stalling the test if it doesn't arrive.
533                     // The logic below ensures that even in the case of spurious wakes, instead
534                     // of moving forward, we are still waiting up to 2500 ms to make sure we
535                     // give enough time for mFrameAvailable to be set to true.
536                     long expiry = System.currentTimeMillis() + 2500L;
537                     while (!mFrameAvailable && System.currentTimeMillis() < expiry) {
538                         // Do not wait for 0 or negative time. 0=indefinite, -ve=undefined(?)
539                         mFrameSyncObject.wait(Math.max(expiry - System.currentTimeMillis(), 1));
540                     }
541                     assertTrue("Camera frame wait timed out", mFrameAvailable);
542                 } catch (InterruptedException ie) {
543                     // shouldn't happen
544                     throw new RuntimeException(ie);
545                 }
546                 mFrameAvailable = false;
547             }
548 
549             // Latch the data.
550             mTextureRender.checkGlError("before updateTexImage");
551             mSurfaceTexture.updateTexImage();
552         }
553 
554         /** Draws the data from SurfaceTexture onto the current EGL surface. */
drawImage()555         public void drawImage() {
556             mTextureRender.drawFrame(mSurfaceTexture);
557         }
558 
559         @Override
onFrameAvailable(SurfaceTexture st)560         public void onFrameAvailable(SurfaceTexture st) {
561             Log.v(TAG, "new frame available");
562             synchronized (mFrameSyncObject) {
563                 assertFalse("mFrameAvailable already set, frame could be dropped", mFrameAvailable);
564                 mFrameAvailable = true;
565                 mFrameSyncObject.notifyAll();
566             }
567         }
568     }
569 
570     /** Code for rendering a texture onto a surface using OpenGL ES 2.0. */
571     private static class SurfaceTextureRender {
572         private static final int FLOAT_SIZE_BYTES = 4;
573         private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
574         private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
575         private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
576 
577         private final FloatBuffer mTriangleVertices;
578 
579         private static final String VERTEX_SHADER =
580                 "uniform mat4 uMVPMatrix;\n"
581                         + "uniform mat4 uSTMatrix;\n"
582                         + "attribute vec4 aPosition;\n"
583                         + "attribute vec4 aTextureCoord;\n"
584                         + "varying vec2 vTextureCoord;\n"
585                         + "void main() {\n"
586                         + "    gl_Position = uMVPMatrix * aPosition;\n"
587                         + "    vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n"
588                         + "}\n";
589 
590         private static final String FRAGMENT_SHADER =
591                 "#extension GL_OES_EGL_image_external : require\n"
592                         + "precision mediump float;\n"
593                         + "varying vec2 vTextureCoord;\n"
594                         + "uniform samplerExternalOES sTexture;\n"
595                         + "void main() {\n"
596                         + "    gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
597                         + "}\n";
598 
599         private final float[] mMVPMatrix = new float[16];
600         private final float[] mSTMatrix = new float[16];
601 
602         private int mProgram;
603         private int mTextureID = -12345;
604         private int mMVPMatrixHandle;
605         private int mSTMatrixHandle;
606         private int mPositionHandle;
607         private int mTextureHandle;
608 
SurfaceTextureRender()609         SurfaceTextureRender() {
610             final float[] triangleVerticesData = {
611                     // X, Y, Z, U, V
612                     -1.0f, -1.0f, 0, 0.f, 0.f,
613                     1.0f, -1.0f, 0, 1.f, 0.f,
614                     -1.0f, 1.0f, 0, 0.f, 1.f,
615                     1.0f, 1.0f, 0, 1.f, 1.f,
616             };
617             mTriangleVertices =
618                     ByteBuffer.allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES)
619                             .order(ByteOrder.nativeOrder())
620                             .asFloatBuffer();
621             mTriangleVertices.put(triangleVerticesData).position(0);
622 
623             Matrix.setIdentityM(mSTMatrix, 0);
624             surfaceCreated();
625         }
626 
getTextureId()627         public int getTextureId() {
628             return mTextureID;
629         }
630 
drawFrame(SurfaceTexture st)631         public void drawFrame(SurfaceTexture st) {
632             checkGlError("onDrawFrame start");
633             st.getTransformMatrix(mSTMatrix);
634 
635             // (optional) clear to green so we can see if we're failing to set pixels
636             GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
637             GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
638 
639             GLES20.glUseProgram(mProgram);
640             checkGlError("glUseProgram");
641 
642             GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
643             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
644 
645             mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
646             GLES20.glVertexAttribPointer(
647                     mPositionHandle,
648                     3,
649                     GLES20.GL_FLOAT,
650                     false,
651                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
652                     mTriangleVertices);
653             checkGlError("glVertexAttribPointer maPosition");
654             GLES20.glEnableVertexAttribArray(mPositionHandle);
655             checkGlError("glEnableVertexAttribArray maPositionHandle");
656 
657             mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
658             GLES20.glVertexAttribPointer(
659                     mTextureHandle,
660                     2,
661                     GLES20.GL_FLOAT,
662                     false,
663                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
664                     mTriangleVertices);
665             checkGlError("glVertexAttribPointer maTextureHandle");
666             GLES20.glEnableVertexAttribArray(mTextureHandle);
667             checkGlError("glEnableVertexAttribArray maTextureHandle");
668 
669             Matrix.setIdentityM(mMVPMatrix, 0);
670             GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
671             GLES20.glUniformMatrix4fv(mSTMatrixHandle, 1, false, mSTMatrix, 0);
672 
673             GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
674             checkGlError("glDrawArrays");
675 
676             // IMPORTANT: on some devices, if you are sharing the external texture between two
677             // contexts, one context may not see updates to the texture unless you un-bind and
678             // re-bind it.  If you're not using shared EGL contexts, you don't need to bind
679             // texture 0 here.
680             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
681         }
682 
683         /**
684          * Initializes GL state. Call this after the EGL surface has been created and made current.
685          */
surfaceCreated()686         public void surfaceCreated() {
687             mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
688             assertTrue("Failed creating program", mProgram != 0);
689 
690             mPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
691             checkLocation(mPositionHandle, "aPosition");
692             mTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
693             checkLocation(mTextureHandle, "aTextureCoord");
694 
695             mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
696             checkLocation(mMVPMatrixHandle, "uMVPMatrix");
697             mSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
698             checkLocation(mSTMatrixHandle, "uSTMatrix");
699 
700             int[] textures = new int[1];
701             GLES20.glGenTextures(1, textures, 0);
702 
703             mTextureID = textures[0];
704             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
705             checkGlError("glBindTexture mTextureID");
706 
707             GLES20.glTexParameterf(
708                     GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
709                     GLES20.GL_TEXTURE_MIN_FILTER,
710                     GLES20.GL_NEAREST);
711             GLES20.glTexParameterf(
712                     GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
713                     GLES20.GL_TEXTURE_MAG_FILTER,
714                     GLES20.GL_LINEAR);
715             GLES20.glTexParameteri(
716                     GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
717                     GLES20.GL_TEXTURE_WRAP_S,
718                     GLES20.GL_CLAMP_TO_EDGE);
719             GLES20.glTexParameteri(
720                     GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
721                     GLES20.GL_TEXTURE_WRAP_T,
722                     GLES20.GL_CLAMP_TO_EDGE);
723             checkGlError("glTexParameter");
724         }
725 
726         /*
727          * Replaces the fragment shader. Pass in null to reset to default.
728          */
changeFragmentShader(String fragmentShader)729         public void changeFragmentShader(String fragmentShader) {
730             if (fragmentShader == null) {
731                 fragmentShader = FRAGMENT_SHADER;
732             }
733             GLES20.glDeleteProgram(mProgram);
734             mProgram = createProgram(VERTEX_SHADER, fragmentShader);
735             assertTrue("Failed creating program", mProgram != 0);
736         }
737 
loadShader(int shaderType, String source)738         private int loadShader(int shaderType, String source) {
739             int shader = GLES20.glCreateShader(shaderType);
740             checkGlError("glCreateShader type=" + shaderType);
741             GLES20.glShaderSource(shader, source);
742             GLES20.glCompileShader(shader);
743             int[] compiled = new int[1];
744             GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
745             if (compiled[0] == 0) {
746                 Log.e(TAG, "Could not compile shader " + shaderType + ":");
747                 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
748                 GLES20.glDeleteShader(shader);
749                 shader = 0;
750             }
751             return shader;
752         }
753 
createProgram(String vertexSource, String fragmentSource)754         private int createProgram(String vertexSource, String fragmentSource) {
755             int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
756             if (vertexShader == 0) {
757                 return 0;
758             }
759             int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
760             if (pixelShader == 0) {
761                 return 0;
762             }
763 
764             int program = GLES20.glCreateProgram();
765             if (program == 0) {
766                 Log.e(TAG, "Could not create program");
767                 return 0;
768             }
769             GLES20.glAttachShader(program, vertexShader);
770             checkGlError("glAttachShader");
771             GLES20.glAttachShader(program, pixelShader);
772             checkGlError("glAttachShader");
773             GLES20.glLinkProgram(program);
774             int[] linkStatus = new int[1];
775             GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
776             if (linkStatus[0] != GLES20.GL_TRUE) {
777                 Log.e(TAG, "Could not link program: ");
778                 Log.e(TAG, GLES20.glGetProgramInfoLog(program));
779                 GLES20.glDeleteProgram(program);
780                 program = 0;
781             }
782             return program;
783         }
784 
checkGlError(String op)785         public void checkGlError(String op) {
786             int error = GLES20.glGetError();
787             assertEquals(String.format("%s : glError %d", op, error), GLES20.GL_NO_ERROR, error);
788         }
789 
checkLocation(int location, String label)790         public static void checkLocation(int location, String label) {
791             assertTrue(String.format("Unable to locate %s in program", label), location >= 0);
792         }
793     }
794 }
795