1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define FAILURE_DEBUG_PREFIX "QemuCamera"
18
19 #include <inttypes.h>
20 #include <cstdlib>
21
22 #include <log/log.h>
23 #include <system/camera_metadata.h>
24 #include <linux/videodev2.h>
25 #include <ui/GraphicBufferAllocator.h>
26 #include <ui/GraphicBufferMapper.h>
27
28 #include <gralloc_cb_bp.h>
29
30 #include "debug.h"
31 #include "jpeg.h"
32 #include "metadata_utils.h"
33 #include "QemuCamera.h"
34 #include "qemu_channel.h"
35
36 namespace android {
37 namespace hardware {
38 namespace camera {
39 namespace provider {
40 namespace implementation {
41 namespace hw {
42
43 using base::unique_fd;
44
45 namespace {
46 constexpr char kClass[] = "QemuCamera";
47
48 constexpr int kMinFPS = 2;
49 constexpr int kMedFPS = 15;
50 constexpr int kMaxFPS = 30;
51 constexpr int64_t kOneSecondNs = 1000000000;
52
53 constexpr int64_t kMinFrameDurationNs = kOneSecondNs / kMaxFPS;
54 constexpr int64_t kMaxFrameDurationNs = kOneSecondNs / kMinFPS;
55 constexpr int64_t kDefaultFrameDurationNs = kOneSecondNs / kMedFPS;
56
57 constexpr int64_t kMinSensorExposureTimeNs = kOneSecondNs / 20000;
58 constexpr int64_t kMaxSensorExposureTimeNs = kOneSecondNs / 2;
59 constexpr int64_t kDefaultSensorExposureTimeNs = kOneSecondNs / 100;
60
61 constexpr int32_t kMinSensorSensitivity = 25;
62 constexpr int32_t kMaxSensorSensitivity = 1600;
63 constexpr int32_t kDefaultSensorSensitivity = 200;
64
65 constexpr float kMinAperture = 1.4;
66 constexpr float kMaxAperture = 16.0;
67 constexpr float kDefaultAperture = 4.0;
68
69 constexpr int32_t kDefaultJpegQuality = 85;
70
71 const float kColorCorrectionGains[4] = {1.0f, 1.0f, 1.0f, 1.0f};
72
73 const camera_metadata_rational_t kRationalZero = {
74 .numerator = 0, .denominator = 128
75 };
76 const camera_metadata_rational_t kRationalOne = {
77 .numerator = 128, .denominator = 128
78 };
79
80 const camera_metadata_rational_t kColorCorrectionTransform[9] = {
81 kRationalOne, kRationalZero, kRationalZero,
82 kRationalZero, kRationalOne, kRationalZero,
83 kRationalZero, kRationalZero, kRationalOne
84 };
85
86 const camera_metadata_rational kNeutralColorPoint[3] = {
87 {1023, 1}, {1023, 1}, {1023, 1}
88 };
89
90 const double kSensorNoiseProfile[8] = {
91 1.0, .000001, 1.0, .000001, 1.0, .000001, 1.0, .000001
92 };
93
94 // system/media/camera/docs/docs.html#dynamic_android.statistics.lensShadingMap
95 const float kLensShadingMap[] = {
96 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
97 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
98 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
99 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
100 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
101 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3
102 };
103
usageOr(const BufferUsage a,const BufferUsage b)104 constexpr BufferUsage usageOr(const BufferUsage a, const BufferUsage b) {
105 return static_cast<BufferUsage>(static_cast<uint64_t>(a) | static_cast<uint64_t>(b));
106 }
107
usageTest(const BufferUsage a,const BufferUsage b)108 constexpr bool usageTest(const BufferUsage a, const BufferUsage b) {
109 return (static_cast<uint64_t>(a) & static_cast<uint64_t>(b)) != 0;
110 }
111
112 } // namespace
113
QemuCamera(const Parameters & params)114 QemuCamera::QemuCamera(const Parameters& params)
115 : mParams(params)
116 , mAFStateMachine(200, 1, 2) {}
117
118 std::tuple<PixelFormat, BufferUsage, Dataspace, int32_t>
overrideStreamParams(const PixelFormat format,const BufferUsage usage,const Dataspace dataspace) const119 QemuCamera::overrideStreamParams(const PixelFormat format,
120 const BufferUsage usage,
121 const Dataspace dataspace) const {
122 constexpr BufferUsage kExtraUsage = usageOr(BufferUsage::CAMERA_OUTPUT,
123 BufferUsage::CPU_WRITE_OFTEN);
124
125 switch (format) {
126 case PixelFormat::IMPLEMENTATION_DEFINED:
127 if (usageTest(usage, BufferUsage::VIDEO_ENCODER)) {
128 return {PixelFormat::YCBCR_420_888, usageOr(usage, kExtraUsage),
129 Dataspace::JFIF, 8};
130 } else {
131 return {PixelFormat::RGBA_8888, usageOr(usage, kExtraUsage),
132 Dataspace::UNKNOWN, 4};
133 }
134
135 case PixelFormat::YCBCR_420_888:
136 return {PixelFormat::YCBCR_420_888, usageOr(usage, kExtraUsage),
137 Dataspace::JFIF, usageTest(usage, BufferUsage::VIDEO_ENCODER) ? 8 : 4};
138
139 case PixelFormat::RAW16:
140 return {PixelFormat::RAW16, usageOr(usage, kExtraUsage),
141 Dataspace::SRGB_LINEAR, 4};
142
143 case PixelFormat::RGBA_8888:
144 return {PixelFormat::RGBA_8888, usageOr(usage, kExtraUsage),
145 Dataspace::UNKNOWN, usageTest(usage, BufferUsage::VIDEO_ENCODER) ? 8 : 4};
146
147 case PixelFormat::BLOB:
148 switch (dataspace) {
149 case Dataspace::JFIF:
150 return {PixelFormat::BLOB, usageOr(usage, kExtraUsage),
151 Dataspace::JFIF, 4}; // JPEG
152 default:
153 return {format, usage, dataspace, FAILURE(kErrorBadDataspace)};
154 }
155
156 default:
157 return {format, usage, dataspace, FAILURE(kErrorBadFormat)};
158 }
159 }
160
configure(const CameraMetadata & sessionParams,size_t nStreams,const Stream * streams,const HalStream * halStreams)161 bool QemuCamera::configure(const CameraMetadata& sessionParams,
162 size_t nStreams,
163 const Stream* streams,
164 const HalStream* halStreams) {
165 applyMetadata(sessionParams);
166
167 if (!mQemuChannel.ok()) {
168 auto qemuChannel = qemuOpenChannel(std::string("name=") + mParams.name);
169 if (!qemuChannel.ok()) {
170 return false;
171 }
172
173 static const char kConnectQuery[] = "connect";
174 if (qemuRunQuery(qemuChannel.get(), kConnectQuery, sizeof(kConnectQuery)) < 0) {
175 return false;
176 }
177
178 static const char kStartQuery[] = "start";
179 if (qemuRunQuery(qemuChannel.get(), kStartQuery, sizeof(kStartQuery)) < 0) {
180 return false;
181 }
182
183 mQemuChannel = std::move(qemuChannel);
184 }
185
186 mStreamInfoCache.clear();
187 for (; nStreams > 0; --nStreams, ++streams, ++halStreams) {
188 const int32_t id = streams->id;
189 LOG_ALWAYS_FATAL_IF(halStreams->id != id);
190 StreamInfo& si = mStreamInfoCache[id];
191 si.size.width = streams->width;
192 si.size.height = streams->height;
193 si.pixelFormat = halStreams->overrideFormat;
194 si.blobBufferSize = streams->bufferSize;
195 }
196
197 return true;
198 }
199
close()200 void QemuCamera::close() {
201 mStreamInfoCache.clear();
202
203 if (mQemuChannel.ok()) {
204 static const char kStopQuery[] = "stop";
205 if (qemuRunQuery(mQemuChannel.get(), kStopQuery, sizeof(kStopQuery)) >= 0) {
206 static const char kDisconnectQuery[] = "disconnect";
207 qemuRunQuery(mQemuChannel.get(), kDisconnectQuery, sizeof(kDisconnectQuery));
208 }
209
210 mQemuChannel.reset();
211 }
212 }
213
214 std::tuple<int64_t, int64_t, CameraMetadata,
215 std::vector<StreamBuffer>, std::vector<DelayedStreamBuffer>>
processCaptureRequest(CameraMetadata metadataUpdate,Span<CachedStreamBuffer * > csbs)216 QemuCamera::processCaptureRequest(CameraMetadata metadataUpdate,
217 Span<CachedStreamBuffer*> csbs) {
218 CameraMetadata resultMetadata = metadataUpdate.metadata.empty() ?
219 updateCaptureResultMetadata() :
220 applyMetadata(std::move(metadataUpdate));
221
222 const size_t csbsSize = csbs.size();
223 std::vector<StreamBuffer> outputBuffers;
224 std::vector<DelayedStreamBuffer> delayedOutputBuffers;
225 outputBuffers.reserve(csbsSize);
226
227 for (size_t i = 0; i < csbsSize; ++i) {
228 CachedStreamBuffer* csb = csbs[i];
229 LOG_ALWAYS_FATAL_IF(!csb); // otherwise mNumBuffersInFlight will be hard
230
231 const StreamInfo* si = csb->getStreamInfo<StreamInfo>();
232 if (!si) {
233 const auto sii = mStreamInfoCache.find(csb->getStreamId());
234 if (sii == mStreamInfoCache.end()) {
235 ALOGE("%s:%s:%d could not find stream=%d in the cache",
236 kClass, __func__, __LINE__, csb->getStreamId());
237 } else {
238 si = &sii->second;
239 csb->setStreamInfo(si);
240 }
241 }
242
243 if (si) {
244 captureFrame(*si, csb, &outputBuffers, &delayedOutputBuffers);
245 } else {
246 outputBuffers.push_back(csb->finish(false));
247 }
248 }
249
250 return make_tuple((mQemuChannel.ok() ? mFrameDurationNs : FAILURE(-1)),
251 mSensorExposureDurationNs,
252 std::move(resultMetadata), std::move(outputBuffers),
253 std::move(delayedOutputBuffers));
254 }
255
captureFrame(const StreamInfo & si,CachedStreamBuffer * csb,std::vector<StreamBuffer> * outputBuffers,std::vector<DelayedStreamBuffer> * delayedOutputBuffers) const256 void QemuCamera::captureFrame(const StreamInfo& si,
257 CachedStreamBuffer* csb,
258 std::vector<StreamBuffer>* outputBuffers,
259 std::vector<DelayedStreamBuffer>* delayedOutputBuffers) const {
260 switch (si.pixelFormat) {
261 case PixelFormat::YCBCR_420_888:
262 outputBuffers->push_back(csb->finish(captureFrameYUV(si, csb)));
263 break;
264
265 case PixelFormat::RGBA_8888:
266 outputBuffers->push_back(csb->finish(captureFrameRGBA(si, csb)));
267 break;
268
269 case PixelFormat::RAW16:
270 delayedOutputBuffers->push_back(captureFrameRAW16(si, csb));
271 break;
272
273 case PixelFormat::BLOB:
274 delayedOutputBuffers->push_back(captureFrameJpeg(si, csb));
275 break;
276
277 default:
278 ALOGE("%s:%s:%d: unexpected pixelFormat=0x%" PRIx32,
279 kClass, __func__, __LINE__,
280 static_cast<uint32_t>(si.pixelFormat));
281 outputBuffers->push_back(csb->finish(false));
282 break;
283 }
284 }
285
captureFrameYUV(const StreamInfo & si,CachedStreamBuffer * csb) const286 bool QemuCamera::captureFrameYUV(const StreamInfo& si,
287 CachedStreamBuffer* csb) const {
288 const cb_handle_t* const cb = cb_handle_t::from(csb->getBuffer());
289 if (!cb) {
290 return FAILURE(false);
291 }
292
293 if (!csb->waitAcquireFence(mFrameDurationNs / 2000000)) {
294 return FAILURE(false);
295 }
296
297 const auto size = si.size;
298 android_ycbcr ycbcr;
299 if (GraphicBufferMapper::get().lockYCbCr(
300 cb, static_cast<uint32_t>(BufferUsage::CPU_WRITE_OFTEN),
301 {size.width, size.height}, &ycbcr) != NO_ERROR) {
302 return FAILURE(false);
303 }
304
305 bool const res = queryFrame(si.size, V4L2_PIX_FMT_YUV420,
306 mExposureComp, cb->getMmapedOffset());
307
308 LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(cb) != NO_ERROR);
309 return res;
310 }
311
captureFrameRGBA(const StreamInfo & si,CachedStreamBuffer * csb) const312 bool QemuCamera::captureFrameRGBA(const StreamInfo& si,
313 CachedStreamBuffer* csb) const {
314 const cb_handle_t* const cb = cb_handle_t::from(csb->getBuffer());
315 if (!cb) {
316 return FAILURE(false);
317 }
318
319 if (!csb->waitAcquireFence(mFrameDurationNs / 2000000)) {
320 return FAILURE(false);
321 }
322
323 const auto size = si.size;
324 void* mem = nullptr;
325 if (GraphicBufferMapper::get().lock(
326 cb, static_cast<uint32_t>(BufferUsage::CPU_WRITE_OFTEN),
327 {size.width, size.height}, &mem) != NO_ERROR) {
328 return FAILURE(false);
329 }
330
331 bool const res = queryFrame(si.size, V4L2_PIX_FMT_RGB32,
332 mExposureComp, cb->getMmapedOffset());
333 LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(cb) != NO_ERROR);
334 return res;
335 }
336
captureFrameRAW16(const StreamInfo & si,CachedStreamBuffer * csb) const337 DelayedStreamBuffer QemuCamera::captureFrameRAW16(const StreamInfo& si,
338 CachedStreamBuffer* csb) const {
339 const native_handle_t* const image = captureFrameForCompressing(
340 si.size, PixelFormat::RGBA_8888, V4L2_PIX_FMT_RGB32);
341
342 const Rect<uint16_t> imageSize = si.size;
343 const int64_t frameDurationNs = mFrameDurationNs;
344 CameraMetadata metadata = mCaptureResultMetadata;
345
346 return [csb, image, imageSize, metadata = std::move(metadata),
347 frameDurationNs](const bool ok) -> StreamBuffer {
348 StreamBuffer sb;
349 if (ok && image && csb->waitAcquireFence(frameDurationNs / 1000000)) {
350 void* mem = nullptr;
351 if (GraphicBufferMapper::get().lock(
352 image, static_cast<uint32_t>(BufferUsage::CPU_READ_OFTEN),
353 {imageSize.width, imageSize.height}, &mem) == NO_ERROR) {
354 sb = csb->finish(convertRGBAtoRAW16(imageSize, mem, csb->getBuffer()));
355 LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(image) != NO_ERROR);
356 } else {
357 sb = csb->finish(FAILURE(false));
358 }
359 } else {
360 sb = csb->finish(false);
361 }
362
363 if (image) {
364 GraphicBufferAllocator::get().free(image);
365 }
366
367 return sb;
368 };
369 }
370
captureFrameJpeg(const StreamInfo & si,CachedStreamBuffer * csb) const371 DelayedStreamBuffer QemuCamera::captureFrameJpeg(const StreamInfo& si,
372 CachedStreamBuffer* csb) const {
373 const native_handle_t* const image = captureFrameForCompressing(
374 si.size, PixelFormat::YCBCR_420_888, V4L2_PIX_FMT_YUV420);
375
376 const Rect<uint16_t> imageSize = si.size;
377 const uint32_t jpegBufferSize = si.blobBufferSize;
378 const int64_t frameDurationNs = mFrameDurationNs;
379 CameraMetadata metadata = mCaptureResultMetadata;
380
381 return [csb, image, imageSize, metadata = std::move(metadata), jpegBufferSize,
382 frameDurationNs](const bool ok) -> StreamBuffer {
383 StreamBuffer sb;
384 if (ok && image && csb->waitAcquireFence(frameDurationNs / 1000000)) {
385 android_ycbcr imageYcbcr;
386 if (GraphicBufferMapper::get().lockYCbCr(
387 image, static_cast<uint32_t>(BufferUsage::CPU_READ_OFTEN),
388 {imageSize.width, imageSize.height}, &imageYcbcr) == NO_ERROR) {
389 sb = csb->finish(compressJpeg(imageSize, imageYcbcr, metadata,
390 csb->getBuffer(), jpegBufferSize));
391 LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(image) != NO_ERROR);
392 } else {
393 sb = csb->finish(FAILURE(false));
394 }
395 } else {
396 sb = csb->finish(false);
397 }
398
399 if (image) {
400 GraphicBufferAllocator::get().free(image);
401 }
402
403 return sb;
404 };
405 }
406
captureFrameForCompressing(const Rect<uint16_t> dim,const PixelFormat bufferFormat,const uint32_t qemuFormat) const407 const native_handle_t* QemuCamera::captureFrameForCompressing(
408 const Rect<uint16_t> dim,
409 const PixelFormat bufferFormat,
410 const uint32_t qemuFormat) const {
411 constexpr BufferUsage kUsage = usageOr(BufferUsage::CAMERA_OUTPUT,
412 BufferUsage::CPU_READ_OFTEN);
413
414 GraphicBufferAllocator& gba = GraphicBufferAllocator::get();
415 const native_handle_t* image = nullptr;
416 uint32_t stride;
417
418 if (gba.allocate(dim.width, dim.height, static_cast<int>(bufferFormat), 1,
419 static_cast<uint64_t>(kUsage), &image, &stride,
420 "QemuCamera") != NO_ERROR) {
421 return FAILURE(nullptr);
422 }
423
424 const cb_handle_t* const cb = cb_handle_t::from(image);
425 if (!cb) {
426 gba.free(image);
427 return FAILURE(nullptr);
428 }
429
430 if (!queryFrame(dim, qemuFormat, mExposureComp, cb->getMmapedOffset())) {
431 gba.free(image);
432 return FAILURE(nullptr);
433 }
434
435 return image;
436 }
437
queryFrame(const Rect<uint16_t> dim,const uint32_t pixelFormat,const float exposureComp,const uint64_t dataOffset) const438 bool QemuCamera::queryFrame(const Rect<uint16_t> dim,
439 const uint32_t pixelFormat,
440 const float exposureComp,
441 const uint64_t dataOffset) const {
442 constexpr float scaleR = 1;
443 constexpr float scaleG = 1;
444 constexpr float scaleB = 1;
445
446 char queryStr[128];
447 const int querySize = snprintf(queryStr, sizeof(queryStr),
448 "frame dim=%" PRIu32 "x%" PRIu32 " pix=%" PRIu32 " offset=%" PRIu64
449 " whiteb=%g,%g,%g expcomp=%g time=%d",
450 dim.width, dim.height, static_cast<uint32_t>(pixelFormat), dataOffset,
451 scaleR, scaleG, scaleB, exposureComp, 0);
452
453 return qemuRunQuery(mQemuChannel.get(), queryStr, querySize + 1) >= 0;
454 }
455
calculateExposureComp(const int64_t exposureNs,const int sensorSensitivity,const float aperture)456 float QemuCamera::calculateExposureComp(const int64_t exposureNs,
457 const int sensorSensitivity,
458 const float aperture) {
459 return (double(exposureNs) * sensorSensitivity
460 * kDefaultAperture * kDefaultAperture) /
461 (double(kDefaultSensorExposureTimeNs) * kDefaultSensorSensitivity
462 * aperture * aperture);
463 }
464
applyMetadata(const CameraMetadata & metadata)465 CameraMetadata QemuCamera::applyMetadata(const CameraMetadata& metadata) {
466 const camera_metadata_t* const raw =
467 reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data());
468 camera_metadata_ro_entry_t entry;
469
470 mFrameDurationNs = getFrameDuration(raw, kDefaultFrameDurationNs,
471 kMinFrameDurationNs, kMaxFrameDurationNs);
472
473 if (find_camera_metadata_ro_entry(raw, ANDROID_SENSOR_EXPOSURE_TIME, &entry)) {
474 mSensorExposureDurationNs = std::min(mFrameDurationNs, kDefaultSensorExposureTimeNs);
475 } else {
476 mSensorExposureDurationNs = entry.data.i64[0];
477 }
478
479 if (find_camera_metadata_ro_entry(raw, ANDROID_SENSOR_SENSITIVITY, &entry)) {
480 mSensorSensitivity = kDefaultSensorSensitivity;
481 } else {
482 mSensorSensitivity = entry.data.i32[0];
483 }
484
485 if (find_camera_metadata_ro_entry(raw, ANDROID_LENS_APERTURE, &entry)) {
486 mAperture = kDefaultAperture;
487 } else {
488 mAperture = entry.data.f[0];
489 }
490
491 const camera_metadata_enum_android_control_af_mode_t afMode =
492 find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_MODE, &entry) ?
493 ANDROID_CONTROL_AF_MODE_OFF :
494 static_cast<camera_metadata_enum_android_control_af_mode_t>(entry.data.u8[0]);
495
496 const camera_metadata_enum_android_control_af_trigger_t afTrigger =
497 find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_TRIGGER, &entry) ?
498 ANDROID_CONTROL_AF_TRIGGER_IDLE :
499 static_cast<camera_metadata_enum_android_control_af_trigger_t>(entry.data.u8[0]);
500
501 const auto af = mAFStateMachine(afMode, afTrigger);
502
503 mExposureComp = calculateExposureComp(mSensorExposureDurationNs,
504 mSensorSensitivity, mAperture);
505
506 CameraMetadataMap m = parseCameraMetadataMap(metadata);
507
508 m[ANDROID_COLOR_CORRECTION_GAINS] = kColorCorrectionGains;
509 m[ANDROID_COLOR_CORRECTION_TRANSFORM] = kColorCorrectionTransform;
510 m[ANDROID_CONTROL_AE_STATE] = uint8_t(ANDROID_CONTROL_AE_STATE_CONVERGED);
511 m[ANDROID_CONTROL_AF_STATE] = uint8_t(af.first);
512 m[ANDROID_CONTROL_AWB_STATE] = uint8_t(ANDROID_CONTROL_AWB_STATE_CONVERGED);
513 m[ANDROID_FLASH_STATE] = uint8_t(ANDROID_FLASH_STATE_UNAVAILABLE);
514 m[ANDROID_LENS_APERTURE] = mAperture;
515 m[ANDROID_LENS_FOCUS_DISTANCE] = af.second;
516 m[ANDROID_LENS_STATE] = uint8_t(getAfLensState(af.first));
517 m[ANDROID_REQUEST_PIPELINE_DEPTH] = uint8_t(4);
518 m[ANDROID_SENSOR_FRAME_DURATION] = mFrameDurationNs;
519 m[ANDROID_SENSOR_EXPOSURE_TIME] = mSensorExposureDurationNs;
520 m[ANDROID_SENSOR_SENSITIVITY] = mSensorSensitivity;
521 m[ANDROID_SENSOR_TIMESTAMP] = int64_t(0);
522 m[ANDROID_SENSOR_NEUTRAL_COLOR_POINT] = kNeutralColorPoint;
523 m[ANDROID_SENSOR_NOISE_PROFILE] = kSensorNoiseProfile;
524 m[ANDROID_SENSOR_ROLLING_SHUTTER_SKEW] = kMinSensorExposureTimeNs;
525 m[ANDROID_STATISTICS_SCENE_FLICKER] = uint8_t(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
526
527 if (!find_camera_metadata_ro_entry(raw, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &entry)
528 && (entry.data.u8[0] == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON)) {
529 m[ANDROID_STATISTICS_LENS_SHADING_MAP] = kLensShadingMap;
530 }
531
532 std::optional<CameraMetadata> maybeSerialized =
533 serializeCameraMetadataMap(m);
534
535 if (maybeSerialized) {
536 mCaptureResultMetadata = std::move(maybeSerialized.value());
537 }
538
539 { // reset ANDROID_CONTROL_AF_TRIGGER to IDLE
540 camera_metadata_t* const raw =
541 reinterpret_cast<camera_metadata_t*>(mCaptureResultMetadata.metadata.data());
542
543 camera_metadata_ro_entry_t entry;
544 const auto newTriggerValue = ANDROID_CONTROL_AF_TRIGGER_IDLE;
545
546 if (find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_TRIGGER, &entry)) {
547 return mCaptureResultMetadata;
548 } else if (entry.data.i32[0] == newTriggerValue) {
549 return mCaptureResultMetadata;
550 } else {
551 CameraMetadata result = mCaptureResultMetadata;
552
553 if (update_camera_metadata_entry(raw, entry.index, &newTriggerValue, 1, nullptr)) {
554 ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_CONTROL_AF_TRIGGER) "
555 "failed", kClass, __func__, __LINE__);
556 }
557
558 return result;
559 }
560 }
561 }
562
updateCaptureResultMetadata()563 CameraMetadata QemuCamera::updateCaptureResultMetadata() {
564 camera_metadata_t* const raw =
565 reinterpret_cast<camera_metadata_t*>(mCaptureResultMetadata.metadata.data());
566
567 const auto af = mAFStateMachine();
568
569 camera_metadata_ro_entry_t entry;
570
571 if (find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_STATE, &entry)) {
572 ALOGW("%s:%s:%d: find_camera_metadata_ro_entry(ANDROID_CONTROL_AF_STATE) failed",
573 kClass, __func__, __LINE__);
574 } else if (update_camera_metadata_entry(raw, entry.index, &af.first, 1, nullptr)) {
575 ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_CONTROL_AF_STATE) failed",
576 kClass, __func__, __LINE__);
577 }
578
579 if (find_camera_metadata_ro_entry(raw, ANDROID_LENS_FOCUS_DISTANCE, &entry)) {
580 ALOGW("%s:%s:%d: find_camera_metadata_ro_entry(ANDROID_LENS_FOCUS_DISTANCE) failed",
581 kClass, __func__, __LINE__);
582 } else if (update_camera_metadata_entry(raw, entry.index, &af.second, 1, nullptr)) {
583 ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_LENS_FOCUS_DISTANCE) failed",
584 kClass, __func__, __LINE__);
585 }
586
587 return metadataCompact(mCaptureResultMetadata);
588 }
589
590 ////////////////////////////////////////////////////////////////////////////////
591
getTargetFpsRanges() const592 Span<const std::pair<int32_t, int32_t>> QemuCamera::getTargetFpsRanges() const {
593 // ordered to satisfy testPreviewFpsRangeByCamera
594 static const std::pair<int32_t, int32_t> targetFpsRanges[] = {
595 {kMinFPS, kMedFPS},
596 {kMedFPS, kMedFPS},
597 {kMinFPS, kMaxFPS},
598 {kMaxFPS, kMaxFPS},
599 };
600
601 return targetFpsRanges;
602 }
603
getAvailableThumbnailSizes() const604 Span<const Rect<uint16_t>> QemuCamera::getAvailableThumbnailSizes() const {
605 return {mParams.availableThumbnailResolutions.begin(),
606 mParams.availableThumbnailResolutions.end()};
607 }
608
isBackFacing() const609 bool QemuCamera::isBackFacing() const {
610 return mParams.isBackFacing;
611 }
612
getAvailableApertures() const613 Span<const float> QemuCamera::getAvailableApertures() const {
614 static const float availableApertures[] = {
615 1.4, 2.0, 2.8, 4.0, 5.6, 8.0, 11.0, 16.0
616 };
617
618 return availableApertures;
619 }
620
getMaxNumOutputStreams() const621 std::tuple<int32_t, int32_t, int32_t> QemuCamera::getMaxNumOutputStreams() const {
622 return {
623 1, // raw
624 2, // processed
625 1, // jpeg
626 };
627 }
628
getAvailableCapabilitiesBitmap() const629 uint32_t QemuCamera::getAvailableCapabilitiesBitmap() const {
630 return
631 (1U << ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) |
632 (1U << ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS) |
633 (1U << ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
634 }
635
getSupportedPixelFormats() const636 Span<const PixelFormat> QemuCamera::getSupportedPixelFormats() const {
637 static const PixelFormat supportedPixelFormats[] = {
638 PixelFormat::IMPLEMENTATION_DEFINED,
639 PixelFormat::YCBCR_420_888,
640 PixelFormat::RGBA_8888,
641 PixelFormat::RAW16,
642 PixelFormat::BLOB,
643 };
644
645 return {supportedPixelFormats};
646 }
647
getMinFrameDurationNs() const648 int64_t QemuCamera::getMinFrameDurationNs() const {
649 return kMinFrameDurationNs;
650 }
651
getSensorSize() const652 Rect<uint16_t> QemuCamera::getSensorSize() const {
653 return mParams.sensorSize;
654 }
655
getSensorColorFilterArrangement() const656 uint8_t QemuCamera::getSensorColorFilterArrangement() const {
657 return ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
658 }
659
getSensorSensitivityRange() const660 std::pair<int32_t, int32_t> QemuCamera::getSensorSensitivityRange() const {
661 return {kMinSensorSensitivity, kMaxSensorSensitivity};
662 }
663
getSensorExposureTimeRange() const664 std::pair<int64_t, int64_t> QemuCamera::getSensorExposureTimeRange() const {
665 return {kMinSensorExposureTimeNs, kMaxSensorExposureTimeNs};
666 }
667
getSensorMaxFrameDuration() const668 int64_t QemuCamera::getSensorMaxFrameDuration() const {
669 return kMaxSensorExposureTimeNs;
670 }
671
getSupportedResolutions() const672 Span<const Rect<uint16_t>> QemuCamera::getSupportedResolutions() const {
673 return {mParams.supportedResolutions.begin(), mParams.supportedResolutions.end()};
674 }
675
getDefaultTargetFpsRange(const RequestTemplate tpl) const676 std::pair<int32_t, int32_t> QemuCamera::getDefaultTargetFpsRange(const RequestTemplate tpl) const {
677 switch (tpl) {
678 case RequestTemplate::PREVIEW:
679 case RequestTemplate::VIDEO_RECORD:
680 case RequestTemplate::VIDEO_SNAPSHOT:
681 return {kMaxFPS, kMaxFPS};
682
683 default:
684 return {kMinFPS, kMaxFPS};
685 }
686 }
687
getDefaultAperture() const688 float QemuCamera::getDefaultAperture() const {
689 return kDefaultAperture;
690 }
691
getDefaultSensorExpTime() const692 int64_t QemuCamera::getDefaultSensorExpTime() const {
693 return kDefaultSensorExposureTimeNs;
694 }
695
getDefaultSensorFrameDuration() const696 int64_t QemuCamera::getDefaultSensorFrameDuration() const {
697 return kMinFrameDurationNs;
698 }
699
getDefaultSensorSensitivity() const700 int32_t QemuCamera::getDefaultSensorSensitivity() const {
701 return kDefaultSensorSensitivity;
702 }
703
704 } // namespace hw
705 } // namespace implementation
706 } // namespace provider
707 } // namespace camera
708 } // namespace hardware
709 } // namespace android
710