1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "VtsHalEvsTest"
18 
19 #include "FrameHandler.h"
20 #include "FormatConvert.h"
21 
22 #include <aidl/android/hardware/graphics/common/HardwareBuffer.h>
23 #include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
24 #include <aidlcommonsupport/NativeHandle.h>
25 #include <android-base/logging.h>
26 #include <ui/GraphicBuffer.h>
27 #include <ui/GraphicBufferAllocator.h>
28 
29 namespace {
30 
31 using ::aidl::android::hardware::automotive::evs::BufferDesc;
32 using ::aidl::android::hardware::automotive::evs::CameraDesc;
33 using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
34 using ::aidl::android::hardware::automotive::evs::EvsEventType;
35 using ::aidl::android::hardware::automotive::evs::IEvsCamera;
36 using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
37 using ::aidl::android::hardware::common::NativeHandle;
38 using ::aidl::android::hardware::graphics::common::HardwareBuffer;
39 using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
40 using ::ndk::ScopedAStatus;
41 using std::chrono_literals::operator""s;
42 
dupNativeHandle(const NativeHandle & handle,bool doDup)43 NativeHandle dupNativeHandle(const NativeHandle& handle, bool doDup) {
44     NativeHandle dup;
45 
46     dup.fds = std::vector<::ndk::ScopedFileDescriptor>(handle.fds.size());
47     if (!doDup) {
48         for (auto i = 0; i < handle.fds.size(); ++i) {
49             dup.fds.at(i).set(handle.fds[i].get());
50         }
51     } else {
52         for (auto i = 0; i < handle.fds.size(); ++i) {
53             dup.fds[i] = std::move(handle.fds[i].dup());
54         }
55     }
56     dup.ints = handle.ints;
57 
58     return std::move(dup);
59 }
60 
dupHardwareBuffer(const HardwareBuffer & buffer,bool doDup)61 HardwareBuffer dupHardwareBuffer(const HardwareBuffer& buffer, bool doDup) {
62     HardwareBuffer dup = {
63             .description = buffer.description,
64             .handle = dupNativeHandle(buffer.handle, doDup),
65     };
66 
67     return std::move(dup);
68 }
69 
dupBufferDesc(const BufferDesc & src,bool doDup)70 BufferDesc dupBufferDesc(const BufferDesc& src, bool doDup) {
71     BufferDesc dup = {
72             .buffer = dupHardwareBuffer(src.buffer, doDup),
73             .pixelSizeBytes = src.pixelSizeBytes,
74             .bufferId = src.bufferId,
75             .deviceId = src.deviceId,
76             .timestamp = src.timestamp,
77             .metadata = src.metadata,
78     };
79 
80     return std::move(dup);
81 }
82 
comparePayload(const EvsEventDesc & l,const EvsEventDesc & r)83 bool comparePayload(const EvsEventDesc& l, const EvsEventDesc& r) {
84     return std::equal(l.payload.begin(), l.payload.end(), r.payload.begin());
85 }
86 
87 } // namespace
88 
FrameHandler(const std::shared_ptr<IEvsCamera> & pCamera,const CameraDesc & cameraInfo,const std::shared_ptr<IEvsDisplay> & pDisplay,BufferControlFlag mode)89 FrameHandler::FrameHandler(const std::shared_ptr<IEvsCamera>& pCamera, const CameraDesc& cameraInfo,
90                            const std::shared_ptr<IEvsDisplay>& pDisplay, BufferControlFlag mode)
91     : mCamera(pCamera), mCameraInfo(cameraInfo), mDisplay(pDisplay), mReturnMode(mode) {
92     // Nothing but member initialization here.
93 }
94 
shutdown()95 void FrameHandler::shutdown() {
96     // Make sure we're not still streaming
97     blockingStopStream();
98 
99     // At this point, the receiver thread is no longer running, so we can safely drop
100     // our remote object references so they can be freed
101     mCamera = nullptr;
102     mDisplay = nullptr;
103 }
104 
startStream()105 bool FrameHandler::startStream() {
106     // Tell the camera to start streaming
107     auto status = mCamera->startVideoStream(ref<FrameHandler>());
108     if (!status.isOk()) {
109         return false;
110     }
111 
112     // Mark ourselves as running
113     mLock.lock();
114     mRunning = true;
115     mLock.unlock();
116 
117     return true;
118 }
119 
asyncStopStream()120 void FrameHandler::asyncStopStream() {
121     // Tell the camera to stop streaming.
122     // This will result in a null frame being delivered when the stream actually stops.
123     mCamera->stopVideoStream();
124 }
125 
blockingStopStream()126 void FrameHandler::blockingStopStream() {
127     // Tell the stream to stop
128     asyncStopStream();
129 
130     // Wait until the stream has actually stopped
131     std::unique_lock<std::mutex> lock(mEventLock);
132     if (mRunning) {
133         mEventSignal.wait(lock, [this]() { return !mRunning; });
134     }
135 }
136 
returnHeldBuffer()137 bool FrameHandler::returnHeldBuffer() {
138     std::lock_guard<std::mutex> lock(mLock);
139 
140     // Return the oldest buffer we're holding
141     if (mHeldBuffers.empty()) {
142         // No buffers are currently held
143         return false;
144     }
145 
146     std::vector<BufferDesc> buffers = std::move(mHeldBuffers.front());
147     mHeldBuffers.pop();
148     mCamera->doneWithFrame(buffers);
149 
150     return true;
151 }
152 
isRunning()153 bool FrameHandler::isRunning() {
154     std::lock_guard<std::mutex> lock(mLock);
155     return mRunning;
156 }
157 
waitForFrameCount(unsigned frameCount)158 void FrameHandler::waitForFrameCount(unsigned frameCount) {
159     // Wait until we've seen at least the requested number of frames (could be more)
160     std::unique_lock<std::mutex> lock(mLock);
161     mFrameSignal.wait(lock, [this, frameCount]() { return mFramesReceived >= frameCount; });
162 }
163 
getFramesCounters(unsigned * received,unsigned * displayed)164 void FrameHandler::getFramesCounters(unsigned* received, unsigned* displayed) {
165     std::lock_guard<std::mutex> lock(mLock);
166 
167     if (received) {
168         *received = mFramesReceived;
169     }
170     if (displayed) {
171         *displayed = mFramesDisplayed;
172     }
173 }
174 
deliverFrame(const std::vector<BufferDesc> & buffers)175 ScopedAStatus FrameHandler::deliverFrame(const std::vector<BufferDesc>& buffers) {
176     mLock.lock();
177     // For VTS tests, FrameHandler uses a single frame among delivered frames.
178     auto bufferIdx = mFramesDisplayed % buffers.size();
179     auto& buffer = buffers[bufferIdx];
180     mLock.unlock();
181 
182     // Store a dimension of a received frame.
183     mFrameWidth = buffer.buffer.description.width;
184     mFrameHeight = buffer.buffer.description.height;
185 
186     // If we were given an opened display at construction time, then send the received
187     // image back down the camera.
188     bool displayed = false;
189     if (mDisplay) {
190         // Get the output buffer we'll use to display the imagery
191         BufferDesc tgtBuffer;
192         auto status = mDisplay->getTargetBuffer(&tgtBuffer);
193         if (!status.isOk()) {
194             printf("Didn't get target buffer - frame lost\n");
195             LOG(ERROR) << "Didn't get requested output buffer -- skipping this frame.";
196         } else {
197             // Copy the contents of the of buffer.memHandle into tgtBuffer
198             copyBufferContents(tgtBuffer, buffer);
199 
200             // Send the target buffer back for display
201             auto status = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
202             if (!status.isOk()) {
203                 printf("AIDL error on display buffer (%d)- frame lost\n",
204                        status.getServiceSpecificError());
205                 LOG(ERROR) << "Error making the remote function call.  AIDL said "
206                            << status.getServiceSpecificError();
207             } else {
208                 // Everything looks good!
209                 // Keep track so tests or watch dogs can monitor progress
210                 displayed = true;
211             }
212         }
213     }
214 
215     mLock.lock();
216     // increases counters
217     ++mFramesReceived;
218     mFramesDisplayed += (int)displayed;
219     mLock.unlock();
220     mFrameSignal.notify_all();
221 
222     switch (mReturnMode) {
223         case eAutoReturn: {
224             // Send the camera buffer back now that the client has seen it
225             LOG(DEBUG) << "Calling doneWithFrame";
226             if (!mCamera->doneWithFrame(buffers).isOk()) {
227                 LOG(WARNING) << "Failed to return buffers";
228             }
229             break;
230         }
231 
232         case eNoAutoReturn: {
233             // Hang onto the buffer handles for now -- the client will return it explicitly later
234             std::vector<BufferDesc> buffersToHold;
235             for (const auto& buffer : buffers) {
236                 buffersToHold.push_back(dupBufferDesc(buffer, /* doDup = */ true));
237             }
238             mHeldBuffers.push(std::move(buffersToHold));
239             break;
240         }
241     }
242 
243     LOG(DEBUG) << "Frame handling complete";
244     return ScopedAStatus::ok();
245 }
246 
notify(const EvsEventDesc & event)247 ScopedAStatus FrameHandler::notify(const EvsEventDesc& event) {
248     // Local flag we use to keep track of when the stream is stopping
249     std::unique_lock<std::mutex> lock(mEventLock);
250     mLatestEventDesc.aType = event.aType;
251     mLatestEventDesc.payload = event.payload;
252     if (mLatestEventDesc.aType == EvsEventType::STREAM_STOPPED) {
253         // Signal that the last frame has been received and the stream is stopped
254         mRunning = false;
255     } else if (mLatestEventDesc.aType == EvsEventType::PARAMETER_CHANGED) {
256         LOG(DEBUG) << "Camera parameter " << mLatestEventDesc.payload[0] << " is changed to "
257                    << mLatestEventDesc.payload[1];
258     } else {
259         LOG(DEBUG) << "Received an event " << eventToString(mLatestEventDesc.aType);
260     }
261     lock.unlock();
262     mEventSignal.notify_one();
263 
264     return ScopedAStatus::ok();
265 }
266 
copyBufferContents(const BufferDesc & tgtBuffer,const BufferDesc & srcBuffer)267 bool FrameHandler::copyBufferContents(const BufferDesc& tgtBuffer, const BufferDesc& srcBuffer) {
268     bool success = true;
269     const HardwareBufferDescription* pSrcDesc =
270             reinterpret_cast<const HardwareBufferDescription*>(&srcBuffer.buffer.description);
271     const HardwareBufferDescription* pTgtDesc =
272             reinterpret_cast<const HardwareBufferDescription*>(&tgtBuffer.buffer.description);
273 
274     // Make sure we don't run off the end of either buffer
275     const unsigned width = std::min(pTgtDesc->width, pSrcDesc->width);
276     const unsigned height = std::min(pTgtDesc->height, pSrcDesc->height);
277 
278     // FIXME: We duplicate file descriptors twice below; consider using TAKE_HANDLE
279     // instead of CLONE_HANDLE.
280     buffer_handle_t target = ::android::dupFromAidl(tgtBuffer.buffer.handle);
281     ::android::sp<android::GraphicBuffer> tgt = new android::GraphicBuffer(
282             target, android::GraphicBuffer::CLONE_HANDLE, pTgtDesc->width, pTgtDesc->height,
283             static_cast<android::PixelFormat>(pTgtDesc->format), pTgtDesc->layers,
284             static_cast<uint64_t>(pTgtDesc->usage), pTgtDesc->stride);
285 
286     buffer_handle_t source = ::android::dupFromAidl(srcBuffer.buffer.handle);
287     ::android::sp<android::GraphicBuffer> src = new android::GraphicBuffer(
288             source, android::GraphicBuffer::CLONE_HANDLE, pSrcDesc->width, pSrcDesc->height,
289             static_cast<android::PixelFormat>(pSrcDesc->format), pSrcDesc->layers,
290             static_cast<uint64_t>(pSrcDesc->usage), pSrcDesc->stride);
291 
292     // Lock our source buffer for reading (current expectation are for this to be NV21 format)
293     uint8_t* srcPixels = nullptr;
294     src->lock(GRALLOC_USAGE_SW_READ_OFTEN, (void**)&srcPixels);
295 
296     // Lock our target buffer for writing (should be either RGBA8888 or BGRA8888 format)
297     uint32_t* tgtPixels = nullptr;
298     tgt->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)&tgtPixels);
299 
300     if (srcPixels && tgtPixels) {
301         using namespace ::android::hardware::automotive::evs::common;
302         if (static_cast<android_pixel_format_t>(pTgtDesc->format) == HAL_PIXEL_FORMAT_RGBA_8888) {
303             if (static_cast<android_pixel_format_t>(pSrcDesc->format) ==
304                 HAL_PIXEL_FORMAT_YCRCB_420_SP) {  // 420SP == NV21
305                 Utils::copyNV21toRGB32(width, height, srcPixels, tgtPixels, pTgtDesc->stride);
306             } else if (static_cast<android_pixel_format_t>(pSrcDesc->format) ==
307                        HAL_PIXEL_FORMAT_YV12) {  // YUV_420P == YV12
308                 Utils::copyYV12toRGB32(width, height, srcPixels, tgtPixels, pTgtDesc->stride);
309             } else if (static_cast<android_pixel_format_t>(pSrcDesc->format) ==
310                        HAL_PIXEL_FORMAT_YCBCR_422_I) {  // YUYV
311                 Utils::copyYUYVtoRGB32(width, height, srcPixels, pSrcDesc->stride, tgtPixels,
312                                        pTgtDesc->stride);
313             } else if (pSrcDesc->format == pTgtDesc->format) {  // 32bit RGBA
314                 Utils::copyMatchedInterleavedFormats(width, height, srcPixels, pSrcDesc->stride,
315                                                      tgtPixels, pTgtDesc->stride,
316                                                      tgtBuffer.pixelSizeBytes);
317             } else {
318                 LOG(ERROR) << "Camera buffer format is not supported";
319                 success = false;
320             }
321         } else if (static_cast<android_pixel_format_t>(pTgtDesc->format) ==
322                    HAL_PIXEL_FORMAT_BGRA_8888) {
323             if (static_cast<android_pixel_format_t>(pSrcDesc->format) ==
324                 HAL_PIXEL_FORMAT_YCRCB_420_SP) {  // 420SP == NV21
325                 Utils::copyNV21toBGR32(width, height, srcPixels, tgtPixels, pTgtDesc->stride);
326             } else if (static_cast<android_pixel_format_t>(pSrcDesc->format) ==
327                        HAL_PIXEL_FORMAT_YV12) {  // YUV_420P == YV12
328                 Utils::copyYV12toBGR32(width, height, srcPixels, tgtPixels, pTgtDesc->stride);
329             } else if (static_cast<android_pixel_format_t>(pSrcDesc->format) ==
330                        HAL_PIXEL_FORMAT_YCBCR_422_I) {  // YUYV
331                 Utils::copyYUYVtoBGR32(width, height, srcPixels, pSrcDesc->stride, tgtPixels,
332                                        pTgtDesc->stride);
333             } else if (pSrcDesc->format == pTgtDesc->format) {  // 32bit RGBA
334                 Utils::copyMatchedInterleavedFormats(width, height, srcPixels, pSrcDesc->stride,
335                                                      tgtPixels, pTgtDesc->stride,
336                                                      tgtBuffer.pixelSizeBytes);
337             } else {
338                 LOG(ERROR) << "Camera buffer format is not supported";
339                 success = false;
340             }
341         } else {
342             // We always expect 32 bit RGB for the display output for now.  Is there a need for 565?
343             LOG(ERROR) << "Diplay buffer is always expected to be 32bit RGBA";
344             success = false;
345         }
346     } else {
347         LOG(ERROR) << "Failed to lock buffer contents for contents transfer";
348         success = false;
349     }
350 
351     if (srcPixels) {
352         src->unlock();
353     }
354     if (tgtPixels) {
355         tgt->unlock();
356     }
357 
358     return success;
359 }
360 
getFrameDimension(unsigned * width,unsigned * height)361 void FrameHandler::getFrameDimension(unsigned* width, unsigned* height) {
362     if (width) {
363         *width = mFrameWidth;
364     }
365 
366     if (height) {
367         *height = mFrameHeight;
368     }
369 }
370 
waitForEvent(const EvsEventDesc & aTargetEvent,EvsEventDesc & aReceivedEvent,bool ignorePayload)371 bool FrameHandler::waitForEvent(const EvsEventDesc& aTargetEvent, EvsEventDesc& aReceivedEvent,
372                                 bool ignorePayload) {
373     // Wait until we get an expected parameter change event.
374     std::unique_lock<std::mutex> lock(mEventLock);
375     auto now = std::chrono::system_clock::now();
376     bool found = false;
377     while (!found) {
378         bool result = mEventSignal.wait_until(
379                 lock, now + 5s, [this, aTargetEvent, ignorePayload, &aReceivedEvent, &found]() {
380                     found = (mLatestEventDesc.aType == aTargetEvent.aType) &&
381                             (ignorePayload || comparePayload(mLatestEventDesc, aTargetEvent));
382                     aReceivedEvent.aType = mLatestEventDesc.aType;
383                     aReceivedEvent.payload = mLatestEventDesc.payload;
384                     return found;
385                 });
386 
387         if (!result) {
388             LOG(WARNING) << "A timer is expired before a target event has happened.";
389             break;
390         }
391     }
392 
393     return found;
394 }
395 
eventToString(const EvsEventType aType)396 const char* FrameHandler::eventToString(const EvsEventType aType) {
397     switch (aType) {
398         case EvsEventType::STREAM_STARTED:
399             return "STREAM_STARTED";
400         case EvsEventType::STREAM_STOPPED:
401             return "STREAM_STOPPED";
402         case EvsEventType::FRAME_DROPPED:
403             return "FRAME_DROPPED";
404         case EvsEventType::TIMEOUT:
405             return "TIMEOUT";
406         case EvsEventType::PARAMETER_CHANGED:
407             return "PARAMETER_CHANGED";
408         case EvsEventType::MASTER_RELEASED:
409             return "MASTER_RELEASED";
410         default:
411             return "Unknown";
412     }
413 }
414