1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "ExtCamOfflnSsn"
18 #include <android/log.h>
19
20 #include "ExternalCameraOfflineSession.h"
21
22 #include <aidl/android/hardware/camera/device/BufferStatus.h>
23 #include <aidl/android/hardware/camera/device/ErrorMsg.h>
24 #include <aidl/android/hardware/camera/device/ShutterMsg.h>
25 #include <aidl/android/hardware/camera/device/StreamBuffer.h>
26 #include <aidlcommonsupport/NativeHandle.h>
27 #include <convert.h>
28 #include <linux/videodev2.h>
29 #include <sync/sync.h>
30 #include <utils/Trace.h>
31
32 #define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs
33 #include <libyuv.h>
34
35 namespace {
36
37 // Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer.
38 constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */;
39
40 } // anonymous namespace
41
42 namespace android {
43 namespace hardware {
44 namespace camera {
45 namespace device {
46 namespace implementation {
47
48 using ::aidl::android::hardware::camera::device::BufferStatus;
49 using ::aidl::android::hardware::camera::device::ErrorMsg;
50 using ::aidl::android::hardware::camera::device::ShutterMsg;
51 using ::aidl::android::hardware::camera::device::StreamBuffer;
52
53 // Static instance
54 HandleImporter ExternalCameraOfflineSession::sHandleImporter;
55
ExternalCameraOfflineSession(const CroppingType & croppingType,const common::V1_0::helper::CameraMetadata & chars,const std::string & cameraId,const std::string & exifMake,const std::string & exifModel,uint32_t blobBufferSize,bool afTrigger,const std::vector<Stream> & offlineStreams,std::deque<std::shared_ptr<HalRequest>> & offlineReqs,const std::map<int,CirculatingBuffers> & circulatingBuffers)56 ExternalCameraOfflineSession::ExternalCameraOfflineSession(
57 const CroppingType& croppingType, const common::V1_0::helper::CameraMetadata& chars,
58 const std::string& cameraId, const std::string& exifMake, const std::string& exifModel,
59 uint32_t blobBufferSize, bool afTrigger, const std::vector<Stream>& offlineStreams,
60 std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
61 const std::map<int, CirculatingBuffers>& circulatingBuffers)
62 : mCroppingType(croppingType),
63 mChars(chars),
64 mCameraId(cameraId),
65 mExifMake(exifMake),
66 mExifModel(exifModel),
67 mBlobBufferSize(blobBufferSize),
68 mAfTrigger(afTrigger),
69 mOfflineStreams(offlineStreams),
70 mOfflineReqs(offlineReqs),
71 mCirculatingBuffers(circulatingBuffers) {}
72
~ExternalCameraOfflineSession()73 ExternalCameraOfflineSession::~ExternalCameraOfflineSession() {
74 close();
75 }
76
initialize()77 bool ExternalCameraOfflineSession::initialize() {
78 mResultMetadataQueue =
79 std::make_shared<ResultMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);
80 if (!mResultMetadataQueue->isValid()) {
81 ALOGE("%s: invalid result fmq", __FUNCTION__);
82 return true;
83 }
84 return false;
85 }
86
importBuffer(int32_t streamId,uint64_t bufId,buffer_handle_t buf,buffer_handle_t ** outBufPtr)87 Status ExternalCameraOfflineSession::importBuffer(int32_t streamId, uint64_t bufId,
88 buffer_handle_t buf,
89 buffer_handle_t** outBufPtr) {
90 Mutex::Autolock _l(mCbsLock);
91 return importBufferImpl(mCirculatingBuffers, sHandleImporter, streamId, bufId, buf, outBufPtr);
92 }
93
processCaptureResult(std::shared_ptr<HalRequest> & req)94 Status ExternalCameraOfflineSession::processCaptureResult(std::shared_ptr<HalRequest>& req) {
95 ATRACE_CALL();
96 // Fill output buffers
97 std::vector<CaptureResult> results;
98 results.resize(1);
99 CaptureResult& result = results[0];
100 result.frameNumber = req->frameNumber;
101 result.partialResult = 1;
102 result.inputBuffer.streamId = -1;
103 result.outputBuffers.resize(req->buffers.size());
104 for (size_t i = 0; i < req->buffers.size(); i++) {
105 StreamBuffer& outputBuffer = result.outputBuffers[i];
106 outputBuffer.streamId = req->buffers[i].streamId;
107 outputBuffer.bufferId = req->buffers[i].bufferId;
108 if (req->buffers[i].fenceTimeout) {
109 outputBuffer.status = BufferStatus::ERROR;
110 if (req->buffers[i].acquireFence >= 0) {
111 native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
112 handle->data[0] = req->buffers[i].acquireFence;
113 result.outputBuffers[i].releaseFence = android::dupToAidl(handle);
114 native_handle_delete(handle);
115 }
116 notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER);
117 } else {
118 result.outputBuffers[i].status = BufferStatus::OK;
119 // TODO: refactor
120 if (req->buffers[i].acquireFence >= 0) {
121 native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
122 handle->data[0] = req->buffers[i].acquireFence;
123 outputBuffer.releaseFence = android::dupToAidl(handle);
124 native_handle_delete(handle);
125 }
126 }
127 }
128
129 // Fill capture result metadata
130 fillCaptureResult(req->setting, req->shutterTs);
131 const camera_metadata_t* rawResult = req->setting.getAndLock();
132 convertToAidl(rawResult, &result.result);
133 req->setting.unlock(rawResult);
134
135 // Callback into framework
136 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
137 freeReleaseFences(results);
138 return Status::OK;
139 }
140
141 #define UPDATE(md, tag, data, size) \
142 do { \
143 if ((md).update((tag), (data), (size))) { \
144 ALOGE("Update " #tag " failed!"); \
145 return BAD_VALUE; \
146 } \
147 } while (0)
148
fillCaptureResult(common::V1_0::helper::CameraMetadata md,nsecs_t timestamp)149 status_t ExternalCameraOfflineSession::fillCaptureResult(common::V1_0::helper::CameraMetadata md,
150 nsecs_t timestamp) {
151 bool afTrigger = false;
152 {
153 std::lock_guard<std::mutex> lk(mAfTriggerLock);
154 afTrigger = mAfTrigger;
155 if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) {
156 camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER);
157 if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) {
158 mAfTrigger = afTrigger = true;
159 } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) {
160 mAfTrigger = afTrigger = false;
161 }
162 }
163 }
164
165 // For USB camera, the USB camera handles everything and we don't have control
166 // over AF. We only simply fake the AF metadata based on the request
167 // received here.
168 uint8_t afState;
169 if (afTrigger) {
170 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
171 } else {
172 afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
173 }
174 UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1);
175
176 camera_metadata_ro_entry activeArraySize = mChars.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
177
178 return fillCaptureResultCommon(md, timestamp, activeArraySize);
179 }
invokeProcessCaptureResultCallback(std::vector<CaptureResult> & results,bool tryWriteFmq)180 void ExternalCameraOfflineSession::invokeProcessCaptureResultCallback(
181 std::vector<CaptureResult>& results, bool tryWriteFmq) {
182 if (mProcessCaptureResultLock.tryLock() != OK) {
183 const nsecs_t NS_TO_SECOND = 1E9;
184 ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__);
185 if (mProcessCaptureResultLock.timedLock(/* 1s */ NS_TO_SECOND) != OK) {
186 ALOGE("%s: cannot acquire lock in 1s, cannot proceed", __FUNCTION__);
187 return;
188 }
189 }
190 if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) {
191 for (CaptureResult& result : results) {
192 if (!result.result.metadata.empty()) {
193 if (mResultMetadataQueue->write(
194 reinterpret_cast<int8_t*>(result.result.metadata.data()),
195 result.result.metadata.size())) {
196 result.fmqResultSize = result.result.metadata.size();
197 result.result.metadata.clear();
198 } else {
199 ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__);
200 result.fmqResultSize = 0;
201 }
202 } else {
203 result.fmqResultSize = 0;
204 }
205 }
206 }
207 auto status = mCallback->processCaptureResult(results);
208 if (!status.isOk()) {
209 ALOGE("%s: processCaptureResult ERROR : %d:%d", __FUNCTION__, status.getExceptionCode(),
210 status.getServiceSpecificError());
211 }
212
213 mProcessCaptureResultLock.unlock();
214 }
215
processCaptureRequestError(const std::shared_ptr<HalRequest> & req,std::vector<NotifyMsg> * outMsgs,std::vector<CaptureResult> * outResults)216 Status ExternalCameraOfflineSession::processCaptureRequestError(
217 const std::shared_ptr<HalRequest>& req, std::vector<NotifyMsg>* outMsgs,
218 std::vector<CaptureResult>* outResults) {
219 ATRACE_CALL();
220
221 if (outMsgs == nullptr) {
222 notifyError(/*frameNum*/ req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_REQUEST);
223 } else {
224 NotifyMsg shutter;
225 shutter.set<NotifyMsg::Tag::shutter>(ShutterMsg{
226 .frameNumber = req->frameNumber,
227 .timestamp = req->shutterTs,
228 });
229
230 NotifyMsg error;
231 error.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = req->frameNumber,
232 .errorStreamId = -1,
233 .errorCode = ErrorCode::ERROR_REQUEST});
234 outMsgs->push_back(shutter);
235 outMsgs->push_back(error);
236 }
237
238 // Fill output buffers
239 CaptureResult result;
240 result.frameNumber = req->frameNumber;
241 result.partialResult = 1;
242 result.inputBuffer.streamId = -1;
243 result.outputBuffers.resize(req->buffers.size());
244 for (size_t i = 0; i < req->buffers.size(); i++) {
245 StreamBuffer& outputBuffer = result.outputBuffers[i];
246 outputBuffer.streamId = req->buffers[i].streamId;
247 outputBuffer.bufferId = req->buffers[i].bufferId;
248 outputBuffer.status = BufferStatus::ERROR;
249 if (req->buffers[i].acquireFence >= 0) {
250 native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
251 handle->data[0] = req->buffers[i].acquireFence;
252 outputBuffer.releaseFence = dupToAidl(handle);
253 native_handle_delete(handle);
254 }
255 }
256
257 if (outResults == nullptr) {
258 // Callback into framework
259 std::vector<CaptureResult> results(1);
260 results[0] = std::move(result);
261 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
262 freeReleaseFences(results);
263 } else {
264 outResults->push_back(std::move(result));
265 }
266 return Status::OK;
267 }
268
getJpegBufferSize(int32_t,int32_t) const269 ssize_t ExternalCameraOfflineSession::getJpegBufferSize(int32_t, int32_t) const {
270 // Empty implementation here as the jpeg buffer size is passed in by ctor
271 return 0;
272 }
273
notifyError(int32_t frameNumber,int32_t streamId,ErrorCode ec)274 void ExternalCameraOfflineSession::notifyError(int32_t frameNumber, int32_t streamId,
275 ErrorCode ec) {
276 NotifyMsg msg;
277 msg.set<NotifyMsg::Tag::error>(
278 ErrorMsg{.frameNumber = frameNumber, .errorStreamId = streamId, .errorCode = ec});
279 mCallback->notify({msg});
280 }
281
setCallback(const std::shared_ptr<ICameraDeviceCallback> & in_cb)282 ScopedAStatus ExternalCameraOfflineSession::setCallback(
283 const std::shared_ptr<ICameraDeviceCallback>& in_cb) {
284 Mutex::Autolock _il(mInterfaceLock);
285 if (mCallback != nullptr && in_cb != nullptr) {
286 ALOGE("%s: callback must not be set twice!", __FUNCTION__);
287 return fromStatus(Status::OK);
288 }
289 mCallback = in_cb;
290
291 initOutputThread();
292
293 if (mOutputThread == nullptr) {
294 ALOGE("%s: init OutputThread failed!", __FUNCTION__);
295 }
296 return fromStatus(Status::OK);
297 }
initOutputThread()298 void ExternalCameraOfflineSession::initOutputThread() {
299 if (mOutputThread != nullptr) {
300 ALOGE("%s: OutputThread already exist!", __FUNCTION__);
301 return;
302 }
303
304 // Grab a shared_ptr to 'this' from ndk::SharedRefBase::ref()
305 std::shared_ptr<ExternalCameraOfflineSession> thiz = ref<ExternalCameraOfflineSession>();
306
307 mBufferRequestThread = std::make_shared<ExternalCameraDeviceSession::BufferRequestThread>(
308 /*parent=*/thiz, mCallback);
309 mBufferRequestThread->run();
310
311 mOutputThread = std::make_shared<OutputThread>(/*parent=*/thiz, mCroppingType, mChars,
312 mBufferRequestThread, mOfflineReqs);
313
314 mOutputThread->setExifMakeModel(mExifMake, mExifModel);
315
316 Size inputSize = {mOfflineReqs[0]->frameIn->mWidth, mOfflineReqs[0]->frameIn->mHeight};
317 Size maxThumbSize = getMaxThumbnailResolution(mChars);
318 mOutputThread->allocateIntermediateBuffers(inputSize, maxThumbSize, mOfflineStreams,
319 mBlobBufferSize);
320
321 mOutputThread->run();
322 }
323
getCaptureResultMetadataQueue(MQDescriptor<int8_t,SynchronizedReadWrite> * _aidl_return)324 ScopedAStatus ExternalCameraOfflineSession::getCaptureResultMetadataQueue(
325 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
326 Mutex::Autolock _il(mInterfaceLock);
327 *_aidl_return = mResultMetadataQueue->dupeDesc();
328 return fromStatus(Status::OK);
329 }
330
close()331 ScopedAStatus ExternalCameraOfflineSession::close() {
332 Mutex::Autolock _il(mInterfaceLock);
333 {
334 Mutex::Autolock _l(mLock);
335 if (mClosed) {
336 ALOGW("%s: offline session already closed!", __FUNCTION__);
337 return fromStatus(Status::OK);
338 }
339 }
340 if (mBufferRequestThread != nullptr) {
341 mBufferRequestThread->requestExitAndWait();
342 mBufferRequestThread.reset();
343 }
344 if (mOutputThread) {
345 mOutputThread->flush();
346 mOutputThread->requestExitAndWait();
347 mOutputThread.reset();
348 }
349
350 Mutex::Autolock _l(mLock);
351 // free all buffers
352 {
353 Mutex::Autolock _cbl(mCbsLock);
354 for (auto& stream : mOfflineStreams) {
355 cleanupBuffersLocked(stream.id);
356 }
357 }
358 mCallback.reset();
359 mClosed = true;
360 return fromStatus(Status::OK);
361 }
cleanupBuffersLocked(int32_t id)362 void ExternalCameraOfflineSession::cleanupBuffersLocked(int32_t id) {
363 for (auto& pair : mCirculatingBuffers.at(id)) {
364 sHandleImporter.freeBuffer(pair.second);
365 }
366 mCirculatingBuffers[id].clear();
367 mCirculatingBuffers.erase(id);
368 }
369
threadLoop()370 bool ExternalCameraOfflineSession::OutputThread::threadLoop() {
371 auto parent = mParent.lock();
372 if (parent == nullptr) {
373 ALOGE("%s: session has been disconnected!", __FUNCTION__);
374 return false;
375 }
376
377 if (mOfflineReqs.empty()) {
378 ALOGI("%s: all offline requests are processed. Stopping.", __FUNCTION__);
379 return false;
380 }
381
382 std::shared_ptr<HalRequest> req = mOfflineReqs.front();
383 mOfflineReqs.pop_front();
384
385 auto onDeviceError = [&](auto... args) {
386 ALOGE(args...);
387 parent->notifyError(req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_DEVICE);
388 signalRequestDone();
389 return false;
390 };
391
392 if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
393 return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
394 req->frameIn->mFourcc & 0xFF, (req->frameIn->mFourcc >> 8) & 0xFF,
395 (req->frameIn->mFourcc >> 16) & 0xFF,
396 (req->frameIn->mFourcc >> 24) & 0xFF);
397 }
398
399 int res = requestBufferStart(req->buffers);
400 if (res != 0) {
401 ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res);
402 return onDeviceError("%s: failed to send buffer request!", __FUNCTION__);
403 }
404
405 std::unique_lock<std::mutex> lk(mBufferLock);
406 // Convert input V4L2 frame to YU12 of the same size
407 // TODO: see if we can save some computation by converting to YV12 here
408 uint8_t* inData;
409 size_t inDataSize;
410 if (req->frameIn->getData(&inData, &inDataSize) != 0) {
411 lk.unlock();
412 return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__);
413 }
414
415 // TODO: in some special case maybe we can decode jpg directly to gralloc output?
416 if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
417 ATRACE_BEGIN("MJPGtoI420");
418 int convRes = libyuv::MJPGToI420(
419 inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y),
420 mYu12FrameLayout.yStride, static_cast<uint8_t*>(mYu12FrameLayout.cb),
421 mYu12FrameLayout.cStride, static_cast<uint8_t*>(mYu12FrameLayout.cr),
422 mYu12FrameLayout.cStride, mYu12Frame->mWidth, mYu12Frame->mHeight,
423 mYu12Frame->mWidth, mYu12Frame->mHeight);
424 ATRACE_END();
425
426 if (convRes != 0) {
427 // For some webcam, the first few V4L2 frames might be malformed...
428 ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, convRes);
429 lk.unlock();
430 Status st = parent->processCaptureRequestError(req);
431 if (st != Status::OK) {
432 return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
433 }
434 signalRequestDone();
435 return true;
436 }
437 }
438
439 ATRACE_BEGIN("Wait for BufferRequest done");
440 res = waitForBufferRequestDone(&req->buffers);
441 ATRACE_END();
442
443 if (res != 0) {
444 ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res);
445 lk.unlock();
446 return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__);
447 }
448
449 ALOGV("%s processing new request", __FUNCTION__);
450 const int kSyncWaitTimeoutMs = 500;
451 for (auto& halBuf : req->buffers) {
452 if (*(halBuf.bufPtr) == nullptr) {
453 ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId);
454 halBuf.fenceTimeout = true;
455 } else if (halBuf.acquireFence >= 0) {
456 int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs);
457 if (ret) {
458 halBuf.fenceTimeout = true;
459 } else {
460 ::close(halBuf.acquireFence);
461 halBuf.acquireFence = -1;
462 }
463 }
464
465 if (halBuf.fenceTimeout) {
466 continue;
467 }
468
469 // Gralloc lockYCbCr the buffer
470 switch (halBuf.format) {
471 case PixelFormat::BLOB: {
472 int ret = createJpegLocked(halBuf, req->setting);
473
474 if (ret != 0) {
475 lk.unlock();
476 return onDeviceError("%s: createJpegLocked failed with %d", __FUNCTION__, ret);
477 }
478 } break;
479 case PixelFormat::Y16: {
480 void* outLayout = sHandleImporter.lock(
481 *(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), inDataSize);
482
483 std::memcpy(outLayout, inData, inDataSize);
484
485 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
486 if (relFence >= 0) {
487 halBuf.acquireFence = relFence;
488 }
489 } break;
490 case PixelFormat::YCBCR_420_888:
491 case PixelFormat::YV12: {
492 android::Rect outRect{0, 0, static_cast<int32_t>(halBuf.width),
493 static_cast<int32_t>(halBuf.height)};
494 android_ycbcr result = sHandleImporter.lockYCbCr(
495 *(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), outRect);
496 ALOGV("%s: outLayout y %p cb %p cr %p y_str %zu c_str %zu c_step %zu", __FUNCTION__,
497 result.y, result.cb, result.cr, result.ystride, result.cstride,
498 result.chroma_step);
499 if (result.ystride > UINT32_MAX || result.cstride > UINT32_MAX ||
500 result.chroma_step > UINT32_MAX) {
501 return onDeviceError("%s: lockYCbCr failed. Unexpected values!", __FUNCTION__);
502 }
503 YCbCrLayout outLayout = {.y = result.y,
504 .cb = result.cb,
505 .cr = result.cr,
506 .yStride = static_cast<uint32_t>(result.ystride),
507 .cStride = static_cast<uint32_t>(result.cstride),
508 .chromaStep = static_cast<uint32_t>(result.chroma_step)};
509
510 // Convert to output buffer size/format
511 uint32_t outputFourcc = getFourCcFromLayout(outLayout);
512 ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, outputFourcc & 0xFF,
513 (outputFourcc >> 8) & 0xFF, (outputFourcc >> 16) & 0xFF,
514 (outputFourcc >> 24) & 0xFF);
515
516 YCbCrLayout cropAndScaled;
517 ATRACE_BEGIN("cropAndScaleLocked");
518 int ret = cropAndScaleLocked(mYu12Frame, Size{halBuf.width, halBuf.height},
519 &cropAndScaled);
520 ATRACE_END();
521 if (ret != 0) {
522 lk.unlock();
523 return onDeviceError("%s: crop and scale failed!", __FUNCTION__);
524 }
525
526 Size sz{halBuf.width, halBuf.height};
527 ATRACE_BEGIN("formatConvert");
528 ret = formatConvert(cropAndScaled, outLayout, sz, outputFourcc);
529 ATRACE_END();
530 if (ret != 0) {
531 lk.unlock();
532 return onDeviceError("%s: format coversion failed!", __FUNCTION__);
533 }
534 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
535 if (relFence >= 0) {
536 halBuf.acquireFence = relFence;
537 }
538 } break;
539 default:
540 lk.unlock();
541 return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format);
542 }
543 } // for each buffer
544 mScaledYu12Frames.clear();
545
546 // Don't hold the lock while calling back to parent
547 lk.unlock();
548 Status st = parent->processCaptureResult(req);
549 if (st != Status::OK) {
550 return onDeviceError("%s: failed to process capture result!", __FUNCTION__);
551 }
552 signalRequestDone();
553 return true;
554 }
555
556 } // namespace implementation
557 } // namespace device
558 } // namespace camera
559 } // namespace hardware
560 } // namespace android
561