1 /*
2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "RenderTopView.h"
18 
19 #include "VideoTex.h"
20 #include "glError.h"
21 #include "shader.h"
22 #include "shader_projectedTex.h"
23 #include "shader_simpleTex.h"
24 
25 #include <aidl/android/hardware/automotive/evs/Stream.h>
26 #include <android-base/logging.h>
27 #include <math/mat4.h>
28 #include <math/vec3.h>
29 
30 namespace {
31 
32 using aidl::android::hardware::automotive::evs::BufferDesc;
33 using aidl::android::hardware::automotive::evs::IEvsEnumerator;
34 using aidl::android::hardware::automotive::evs::Stream;
35 
36 // Simple aliases to make geometric math using vectors more readable
37 const unsigned X = 0;
38 const unsigned Y = 1;
39 const unsigned Z = 2;
40 
41 // Since we assume no roll in these views, we can simplify the required math
unitVectorFromPitchAndYaw(float pitch,float yaw)42 android::vec3 unitVectorFromPitchAndYaw(float pitch, float yaw) {
43     float sinPitch, cosPitch;
44     sincosf(pitch, &sinPitch, &cosPitch);
45     float sinYaw, cosYaw;
46     sincosf(yaw, &sinYaw, &cosYaw);
47     return android::vec3(cosPitch * -sinYaw, cosPitch * cosYaw, sinPitch);
48 }
49 
50 // Helper function to set up a perspective matrix with independent horizontal and vertical
51 // angles of view.
perspective(float hfov,float vfov,float near,float far)52 android::mat4 perspective(float hfov, float vfov, float near, float far) {
53     const float tanHalfFovX = tanf(hfov * 0.5f);
54     const float tanHalfFovY = tanf(vfov * 0.5f);
55 
56     android::mat4 p(0.0f);
57     p[0][0] = 1.0f / tanHalfFovX;
58     p[1][1] = 1.0f / tanHalfFovY;
59     p[2][2] = -(far + near) / (far - near);
60     p[2][3] = -1.0f;
61     p[3][2] = -(2.0f * far * near) / (far - near);
62     return p;
63 }
64 
65 // Helper function to set up a view matrix for a camera given it's yaw & pitch & location
66 // Yes, with a bit of work, we could use lookAt, but it does a lot of extra work
67 // internally that we can short cut.
cameraLookMatrix(const ConfigManager::CameraInfo & cam)68 android::mat4 cameraLookMatrix(const ConfigManager::CameraInfo& cam) {
69     float sinYaw, cosYaw;
70     sincosf(cam.yaw, &sinYaw, &cosYaw);
71 
72     // Construct principal unit vectors
73     android::vec3 vAt = unitVectorFromPitchAndYaw(cam.pitch, cam.yaw);
74     android::vec3 vRt = android::vec3(cosYaw, sinYaw, 0.0f);
75     android::vec3 vUp = -cross(vAt, vRt);
76     android::vec3 eye = android::vec3(cam.position[X], cam.position[Y], cam.position[Z]);
77 
78     android::mat4 Result(1.0f);
79     Result[0][0] = vRt.x;
80     Result[1][0] = vRt.y;
81     Result[2][0] = vRt.z;
82     Result[0][1] = vUp.x;
83     Result[1][1] = vUp.y;
84     Result[2][1] = vUp.z;
85     Result[0][2] = -vAt.x;
86     Result[1][2] = -vAt.y;
87     Result[2][2] = -vAt.z;
88     Result[3][0] = -dot(vRt, eye);
89     Result[3][1] = -dot(vUp, eye);
90     Result[3][2] = dot(vAt, eye);
91     return Result;
92 }
93 
94 }  // namespace
95 
RenderTopView(std::shared_ptr<IEvsEnumerator> enumerator,const std::vector<ConfigManager::CameraInfo> & camList,const ConfigManager & mConfig)96 RenderTopView::RenderTopView(std::shared_ptr<IEvsEnumerator> enumerator,
97                              const std::vector<ConfigManager::CameraInfo>& camList,
98                              const ConfigManager& mConfig) :
99       mEnumerator(enumerator), mConfig(mConfig) {
100     // Copy the list of cameras we're to employ into our local storage.  We'll create and
101     // associate a streaming video texture when we are activated.
102     mActiveCameras.reserve(camList.size());
103     for (unsigned i = 0; i < camList.size(); i++) {
104         mActiveCameras.emplace_back(camList[i]);
105     }
106 }
107 
activate()108 bool RenderTopView::activate() {
109     // Ensure GL is ready to go...
110     if (!prepareGL()) {
111         LOG(ERROR) << "Error initializing GL";
112         return false;
113     }
114 
115     // Load our shader programs
116     mPgmAssets.simpleTexture =
117             buildShaderProgram(vtxShader_simpleTexture, pixShader_simpleTexture, "simpleTexture");
118     if (!mPgmAssets.simpleTexture) {
119         LOG(ERROR) << "Failed to build shader program";
120         return false;
121     }
122     mPgmAssets.projectedTexture =
123             buildShaderProgram(vtxShader_projectedTexture, pixShader_projectedTexture,
124                                "projectedTexture");
125     if (!mPgmAssets.projectedTexture) {
126         LOG(ERROR) << "Failed to build shader program";
127         return false;
128     }
129 
130     // Load the checkerboard text image
131     mTexAssets.checkerBoard.reset(
132             createTextureFromPng("/system/etc/automotive/evs/LabeledChecker.png"));
133     if (!mTexAssets.checkerBoard) {
134         LOG(ERROR) << "Failed to load checkerboard texture";
135         return false;
136     }
137 
138     // Load the car image
139     mTexAssets.carTopView.reset(createTextureFromPng("/system/etc/automotive/evs/CarFromTop.png"));
140     if (!mTexAssets.carTopView) {
141         LOG(ERROR) << "Failed to load carTopView texture";
142         return false;
143     }
144 
145     // Set up streaming video textures for our associated cameras
146     for (auto&& cam : mActiveCameras) {
147         // We are passing an empty stream configuration; this will make EVS
148         // choose the default stream configuration.
149         std::unique_ptr<Stream> emptyCfg(new Stream());
150         cam.tex.reset(createVideoTexture(mEnumerator, cam.info.cameraId.c_str(),
151                                          std::move(emptyCfg), sDisplay));
152         if (!cam.tex) {
153             LOG(ERROR) << "Failed to set up video texture for " << cam.info.cameraId << " ("
154                        << cam.info.function << ")";
155             return false;
156         }
157     }
158 
159     return true;
160 }
161 
deactivate()162 void RenderTopView::deactivate() {
163     // Release our video textures
164     // We can't hold onto it because some other Render object might need the same camera
165     for (auto&& cam : mActiveCameras) {
166         cam.tex = nullptr;
167     }
168 }
169 
drawFrame(const BufferDesc & tgtBuffer)170 bool RenderTopView::drawFrame(const BufferDesc& tgtBuffer) {
171     // Tell GL to render to the given buffer
172     if (!attachRenderTarget(tgtBuffer)) {
173         LOG(ERROR) << "Failed to attached render target";
174         return false;
175     }
176 
177     // Set up our top down projection matrix from car space (world units, Xfwd, Yright, Zup)
178     // to view space (-1 to 1)
179     const float top = mConfig.getDisplayTopLocation();
180     const float bottom = mConfig.getDisplayBottomLocation();
181     const float right = mConfig.getDisplayRightLocation(sAspectRatio);
182     const float left = mConfig.getDisplayLeftLocation(sAspectRatio);
183 
184     const float near = 10.0f;  // arbitrary top of view volume
185     const float far = 0.0f;    // ground plane is at zero
186 
187     // We can use a simple, unrotated ortho view since the screen and car space axis are
188     // naturally aligned in the top down view.
189     orthoMatrix = android::mat4::ortho(left, right, top, bottom, near, far);
190 
191     // Refresh our video texture contents.  We do it all at once in hopes of getting
192     // better coherence among images.  This does not guarantee synchronization, of course...
193     for (auto&& cam : mActiveCameras) {
194         if (cam.tex) {
195             cam.tex->refresh();
196         }
197     }
198 
199     // Iterate over all the cameras and project their images onto the ground plane
200     for (auto&& cam : mActiveCameras) {
201         renderCameraOntoGroundPlane(cam);
202     }
203 
204     // Draw the car image
205     renderCarTopView();
206 
207     // Now that everythign is submitted, release our hold on the texture resource
208     detachRenderTarget();
209 
210     // Wait for the rendering to finish
211     glFinish();
212     detachRenderTarget();
213     return true;
214 }
215 
216 //
217 // Responsible for drawing the car's self image in the top down view.
218 // Draws in car model space (units of meters with origin at center of rear axel)
219 // NOTE:  We probably want to eventually switch to using a VertexArray based model system.
220 //
renderCarTopView()221 void RenderTopView::renderCarTopView() {
222     // Compute the corners of our image footprint in car space
223     const float carLengthInTexels = mConfig.carGraphicRearPixel() - mConfig.carGraphicFrontPixel();
224     const float carSpaceUnitsPerTexel = mConfig.getCarLength() / carLengthInTexels;
225     const float textureHeightInCarSpace = mTexAssets.carTopView->height() * carSpaceUnitsPerTexel;
226     const float textureAspectRatio =
227             (float)mTexAssets.carTopView->width() / mTexAssets.carTopView->height();
228     const float pixelsBehindCarInImage =
229             mTexAssets.carTopView->height() - mConfig.carGraphicRearPixel();
230     const float textureExtentBehindCarInCarSpace = pixelsBehindCarInImage * carSpaceUnitsPerTexel;
231 
232     const float btCS = mConfig.getRearLocation() - textureExtentBehindCarInCarSpace;
233     const float tpCS = textureHeightInCarSpace + btCS;
234     const float ltCS = 0.5f * textureHeightInCarSpace * textureAspectRatio;
235     const float rtCS = -ltCS;
236 
237     GLfloat vertsCarPos[] = {
238             ltCS, tpCS, 0.0f,  // left top in car space
239             rtCS, tpCS, 0.0f,  // right top
240             ltCS, btCS, 0.0f,  // left bottom
241             rtCS, btCS, 0.0f   // right bottom
242     };
243     // NOTE:  We didn't flip the image in the texture, so V=0 is actually the top of the image
244     GLfloat vertsCarTex[] = {
245             0.0f, 0.0f,  // left top
246             1.0f, 0.0f,  // right top
247             0.0f, 1.0f,  // left bottom
248             1.0f, 1.0f   // right bottom
249     };
250     glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vertsCarPos);
251     glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, vertsCarTex);
252     glEnableVertexAttribArray(0);
253     glEnableVertexAttribArray(1);
254 
255     glEnable(GL_BLEND);
256     glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
257 
258     glUseProgram(mPgmAssets.simpleTexture);
259     GLint loc = glGetUniformLocation(mPgmAssets.simpleTexture, "cameraMat");
260     glUniformMatrix4fv(loc, 1, false, orthoMatrix.asArray());
261     glBindTexture(GL_TEXTURE_2D, mTexAssets.carTopView->glId());
262 
263     glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
264 
265     glDisable(GL_BLEND);
266 
267     glDisableVertexAttribArray(0);
268     glDisableVertexAttribArray(1);
269 }
270 
271 // NOTE:  Might be worth reviewing the ideas at
272 // http://math.stackexchange.com/questions/1691895/inverse-of-perspective-matrix
273 // to see if that simplifies the math, although we'll still want to compute the actual ground
274 // interception points taking into account the pitchLimit as below.
renderCameraOntoGroundPlane(const ActiveCamera & cam)275 void RenderTopView::renderCameraOntoGroundPlane(const ActiveCamera& cam) {
276     // How far is the farthest any camera should even consider projecting it's image?
277     const float visibleSizeV = mConfig.getDisplayTopLocation() - mConfig.getDisplayBottomLocation();
278     const float visibleSizeH = visibleSizeV * sAspectRatio;
279     const float maxRange = (visibleSizeH > visibleSizeV) ? visibleSizeH : visibleSizeV;
280 
281     // Construct the projection matrix (View + Projection) associated with this sensor
282     const android::mat4 V = cameraLookMatrix(cam.info);
283     const android::mat4 P =
284             perspective(cam.info.hfov, cam.info.vfov, cam.info.position[Z], maxRange);
285     const android::mat4 projectionMatix = P * V;
286 
287     // Just draw the whole darn ground plane for now -- we're wasting fill rate, but so what?
288     // A 2x optimization would be to draw only the 1/2 space of the window in the direction
289     // the sensor is facing.  A more complex solution would be to construct the intersection
290     // of the sensor volume with the ground plane and render only that geometry.
291     const float top = mConfig.getDisplayTopLocation();
292     const float bottom = mConfig.getDisplayBottomLocation();
293     const float wsHeight = top - bottom;
294     const float wsWidth = wsHeight * sAspectRatio;
295     const float right = wsWidth * 0.5f;
296     const float left = -right;
297 
298     const android::vec3 topLeft(left, top, 0.0f);
299     const android::vec3 topRight(right, top, 0.0f);
300     const android::vec3 botLeft(left, bottom, 0.0f);
301     const android::vec3 botRight(right, bottom, 0.0f);
302 
303     GLfloat vertsPos[] = {
304             topLeft[X], topLeft[Y], topLeft[Z], topRight[X], topRight[Y], topRight[Z],
305             botLeft[X], botLeft[Y], botLeft[Z], botRight[X], botRight[Y], botRight[Z],
306     };
307     glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vertsPos);
308     glEnableVertexAttribArray(0);
309 
310     glDisable(GL_BLEND);
311 
312     glUseProgram(mPgmAssets.projectedTexture);
313     GLint locCam = glGetUniformLocation(mPgmAssets.projectedTexture, "cameraMat");
314     glUniformMatrix4fv(locCam, 1, false, orthoMatrix.asArray());
315     GLint locProj = glGetUniformLocation(mPgmAssets.projectedTexture, "projectionMat");
316     glUniformMatrix4fv(locProj, 1, false, projectionMatix.asArray());
317 
318     GLuint texId;
319     if (cam.tex) {
320         texId = cam.tex->glId();
321     } else {
322         texId = mTexAssets.checkerBoard->glId();
323     }
324     glBindTexture(GL_TEXTURE_2D, texId);
325 
326     glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
327 
328     glDisableVertexAttribArray(0);
329 }
330