1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include <gtest/gtest.h>
17
18 #include <aidl/Vintf.h>
19 #include <aidl/android/hardware/camera/common/VendorTagSection.h>
20 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
21 #include <aidlcommonsupport/NativeHandle.h>
22 #include <camera_aidl_test.h>
23 #include <cutils/properties.h>
24 #include <device_cb.h>
25 #include <empty_device_cb.h>
26 #include <grallocusage/GrallocUsageConversion.h>
27 #include <gtest/gtest.h>
28 #include <hardware/gralloc.h>
29 #include <hardware/gralloc1.h>
30 #include <hidl/GtestPrinter.h>
31 #include <hidl/HidlSupport.h>
32 #include <torch_provider_cb.h>
33 #include <com_android_internal_camera_flags.h>
34 #include <list>
35 #include <nativebase/nativebase.h>
36
37 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
38 using ::aidl::android::hardware::camera::common::CameraResourceCost;
39 using ::aidl::android::hardware::camera::common::TorchModeStatus;
40 using ::aidl::android::hardware::camera::common::VendorTagSection;
41 using ::aidl::android::hardware::camera::device::ICameraDevice;
42 using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
43 using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
44 using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
45 using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
46 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
47
48 using ::ndk::ScopedAStatus;
49
50 namespace {
51 const int32_t kBurstFrameCount = 10;
52 const uint32_t kMaxStillWidth = 2048;
53 const uint32_t kMaxStillHeight = 1536;
54
55 const int64_t kEmptyFlushTimeoutMSec = 200;
56 namespace flags = com::android::internal::camera::flags;
57
58 const static std::vector<int64_t> kMandatoryUseCases = {
59 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
60 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
61 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
62 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
63 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
64 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
65 } // namespace
66
TEST_P(CameraAidlTest,getCameraIdList)67 TEST_P(CameraAidlTest, getCameraIdList) {
68 std::vector<std::string> idList;
69 ScopedAStatus ret = mProvider->getCameraIdList(&idList);
70 ASSERT_TRUE(ret.isOk());
71
72 for (size_t i = 0; i < idList.size(); i++) {
73 ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
74 }
75 }
76
77 // Test if ICameraProvider::getVendorTags returns Status::OK
TEST_P(CameraAidlTest,getVendorTags)78 TEST_P(CameraAidlTest, getVendorTags) {
79 std::vector<VendorTagSection> vendorTags;
80 ScopedAStatus ret = mProvider->getVendorTags(&vendorTags);
81
82 ASSERT_TRUE(ret.isOk());
83 for (size_t i = 0; i < vendorTags.size(); i++) {
84 ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str());
85 for (auto& tag : vendorTags[i].tags) {
86 ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(),
87 (int)tag.tagType);
88 }
89 }
90 }
91
92 // Test if ICameraProvider::setCallback returns Status::OK
TEST_P(CameraAidlTest,setCallback)93 TEST_P(CameraAidlTest, setCallback) {
94 struct ProviderCb : public BnCameraProviderCallback {
95 ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
96 CameraDeviceStatus newStatus) override {
97 ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(),
98 (int)newStatus);
99 return ScopedAStatus::ok();
100 }
101 ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
102 TorchModeStatus newStatus) override {
103 ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(),
104 (int)newStatus);
105 return ScopedAStatus::ok();
106 }
107 ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName,
108 const std::string& physicalCameraDeviceName,
109 CameraDeviceStatus newStatus) override {
110 ALOGI("physical camera device status callback name %s, physical camera name %s,"
111 " status %d",
112 cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus);
113 return ScopedAStatus::ok();
114 }
115 };
116
117 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
118 ScopedAStatus ret = mProvider->setCallback(cb);
119 ASSERT_TRUE(ret.isOk());
120 ret = mProvider->setCallback(nullptr);
121 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
122 }
123
124 // Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
TEST_P(CameraAidlTest,getCameraDeviceInterface)125 TEST_P(CameraAidlTest, getCameraDeviceInterface) {
126 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
127
128 for (const auto& name : cameraDeviceNames) {
129 std::shared_ptr<ICameraDevice> cameraDevice;
130 ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice);
131 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
132 ret.getServiceSpecificError());
133 ASSERT_TRUE(ret.isOk());
134 ASSERT_NE(cameraDevice, nullptr);
135 }
136 }
137
138 // Verify that the device resource cost can be retrieved and the values are
139 // correct.
TEST_P(CameraAidlTest,getResourceCost)140 TEST_P(CameraAidlTest, getResourceCost) {
141 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
142
143 for (const auto& deviceName : cameraDeviceNames) {
144 std::shared_ptr<ICameraDevice> cameraDevice;
145 ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice);
146 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
147 ret.getServiceSpecificError());
148 ASSERT_TRUE(ret.isOk());
149 ASSERT_NE(cameraDevice, nullptr);
150
151 CameraResourceCost resourceCost;
152 ret = cameraDevice->getResourceCost(&resourceCost);
153 ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(),
154 ret.getServiceSpecificError());
155 ASSERT_TRUE(ret.isOk());
156
157 ALOGI(" Resource cost is %d", resourceCost.resourceCost);
158 ASSERT_LE(resourceCost.resourceCost, 100u);
159
160 for (const auto& name : resourceCost.conflictingDevices) {
161 ALOGI(" Conflicting device: %s", name.c_str());
162 }
163 }
164 }
165
166 // Validate the integrity of manual flash strength control metadata
TEST_P(CameraAidlTest,validateManualFlashStrengthControlKeys)167 TEST_P(CameraAidlTest, validateManualFlashStrengthControlKeys) {
168 if (flags::camera_manual_flash_strength_control()) {
169 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
170 for (const auto& name : cameraDeviceNames) {
171 ALOGI("validateManualFlashStrengthControlKeys: Testing camera device %s", name.c_str());
172 CameraMetadata meta;
173 std::shared_ptr<ICameraDevice> cameraDevice;
174 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
175 &cameraDevice /*out*/);
176 ndk::ScopedAStatus ret = cameraDevice->getCameraCharacteristics(&meta);
177 ASSERT_TRUE(ret.isOk());
178 const camera_metadata_t* staticMeta =
179 reinterpret_cast<const camera_metadata_t*>(meta.metadata.data());
180 verifyManualFlashStrengthControlCharacteristics(staticMeta);
181 ret = mSession->close();
182 mSession = nullptr;
183 ASSERT_TRUE(ret.isOk());
184 }
185 } else {
186 ALOGI("validateManualFlashStrengthControlKeys: Test skipped.\n");
187 GTEST_SKIP();
188 }
189 }
190
TEST_P(CameraAidlTest,systemCameraTest)191 TEST_P(CameraAidlTest, systemCameraTest) {
192 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
193 std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap;
194 for (const auto& name : cameraDeviceNames) {
195 std::shared_ptr<ICameraDevice> device;
196 ALOGI("systemCameraTest: Testing camera device %s", name.c_str());
197 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
198 ASSERT_TRUE(ret.isOk());
199 ASSERT_NE(device, nullptr);
200
201 CameraMetadata cameraCharacteristics;
202 ret = device->getCameraCharacteristics(&cameraCharacteristics);
203 ASSERT_TRUE(ret.isOk());
204
205 const camera_metadata_t* staticMeta =
206 reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data());
207 Status rc = isLogicalMultiCamera(staticMeta);
208 if (rc == Status::OPERATION_NOT_SUPPORTED) {
209 return;
210 }
211
212 ASSERT_EQ(rc, Status::OK);
213 std::unordered_set<std::string> physicalIds;
214 ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK);
215 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
216 Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind);
217 ASSERT_EQ(retStatus, Status::OK);
218
219 for (auto physicalId : physicalIds) {
220 bool isPublicId = false;
221 for (auto& deviceName : cameraDeviceNames) {
222 std::string publicVersion, publicId;
223 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
224 if (physicalId == publicId) {
225 isPublicId = true;
226 break;
227 }
228 }
229
230 // For hidden physical cameras, collect their associated logical cameras
231 // and store the system camera kind.
232 if (!isPublicId) {
233 auto it = hiddenPhysicalIdToLogicalMap.find(physicalId);
234 if (it == hiddenPhysicalIdToLogicalMap.end()) {
235 hiddenPhysicalIdToLogicalMap.insert(std::make_pair(
236 physicalId, std::vector<SystemCameraKind>({systemCameraKind})));
237 } else {
238 it->second.push_back(systemCameraKind);
239 }
240 }
241 }
242 }
243
244 // Check that the system camera kind of the logical cameras associated with
245 // each hidden physical camera is the same.
246 for (const auto& it : hiddenPhysicalIdToLogicalMap) {
247 SystemCameraKind neededSystemCameraKind = it.second.front();
248 for (auto foundSystemCamera : it.second) {
249 ASSERT_EQ(neededSystemCameraKind, foundSystemCamera);
250 }
251 }
252 }
253
254 // Verify that the static camera characteristics can be retrieved
255 // successfully.
TEST_P(CameraAidlTest,getCameraCharacteristics)256 TEST_P(CameraAidlTest, getCameraCharacteristics) {
257 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
258
259 for (const auto& name : cameraDeviceNames) {
260 std::shared_ptr<ICameraDevice> device;
261 ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
262 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
263 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
264 ret.getServiceSpecificError());
265 ASSERT_TRUE(ret.isOk());
266 ASSERT_NE(device, nullptr);
267
268 CameraMetadata chars;
269 ret = device->getCameraCharacteristics(&chars);
270 ASSERT_TRUE(ret.isOk());
271 verifyCameraCharacteristics(chars);
272 verifyMonochromeCharacteristics(chars);
273 verifyRecommendedConfigs(chars);
274 verifyHighSpeedRecordingCharacteristics(name, chars);
275 verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames);
276
277 ASSERT_TRUE(ret.isOk());
278
279 // getPhysicalCameraCharacteristics will fail for publicly
280 // advertised camera IDs.
281 std::string version, cameraId;
282 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId));
283 CameraMetadata devChars;
284 ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars);
285 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
286 ASSERT_EQ(0, devChars.metadata.size());
287 }
288 }
289
TEST_P(CameraAidlTest,getSessionCharacteristics)290 TEST_P(CameraAidlTest, getSessionCharacteristics) {
291 if (flags::feature_combination_query()) {
292 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
293
294 for (const auto& name : cameraDeviceNames) {
295 std::shared_ptr<ICameraDevice> device;
296 ALOGI("getSessionCharacteristics: Testing camera device %s", name.c_str());
297 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
298 ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(),
299 ret.getServiceSpecificError());
300 ASSERT_TRUE(ret.isOk());
301 ASSERT_NE(device, nullptr);
302
303 int32_t interfaceVersion = -1;
304 ret = device->getInterfaceVersion(&interfaceVersion);
305 ASSERT_TRUE(ret.isOk());
306 bool supportSessionCharacteristics =
307 (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
308 if (!supportSessionCharacteristics) {
309 continue;
310 }
311
312 CameraMetadata meta;
313 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
314 &device /*out*/);
315
316 std::vector<AvailableStream> outputStreams;
317 camera_metadata_t* staticMeta =
318 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
319 outputStreams.clear();
320 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
321 ASSERT_NE(0u, outputStreams.size());
322
323 AvailableStream sampleStream = outputStreams[0];
324
325 int32_t streamId = 0;
326 Stream stream = {streamId,
327 StreamType::OUTPUT,
328 sampleStream.width,
329 sampleStream.height,
330 static_cast<PixelFormat>(sampleStream.format),
331 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
332 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
333 Dataspace::UNKNOWN,
334 StreamRotation::ROTATION_0,
335 std::string(),
336 /*bufferSize*/ 0,
337 /*groupId*/ -1,
338 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
339 RequestAvailableDynamicRangeProfilesMap::
340 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
341
342 std::vector<Stream> streams = {stream};
343 StreamConfiguration config;
344 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config);
345
346 CameraMetadata camera_chars;
347 ret = device->getCameraCharacteristics(&camera_chars);
348 ASSERT_TRUE(ret.isOk());
349
350 CameraMetadata session_chars;
351 ret = device->getSessionCharacteristics(config, &session_chars);
352 ASSERT_TRUE(ret.isOk());
353 verifySessionCharacteristics(session_chars, camera_chars);
354
355 ret = mSession->close();
356 mSession = nullptr;
357 ASSERT_TRUE(ret.isOk());
358 }
359 } else {
360 ALOGI("getSessionCharacteristics: Test skipped.\n");
361 GTEST_SKIP();
362 }
363 }
364
365 // Verify that the torch strength level can be set and retrieved successfully.
TEST_P(CameraAidlTest,turnOnTorchWithStrengthLevel)366 TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) {
367 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
368
369 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
370 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
371 ASSERT_TRUE(ret.isOk());
372
373 for (const auto& name : cameraDeviceNames) {
374 int32_t defaultLevel;
375 std::shared_ptr<ICameraDevice> device;
376 ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str());
377
378 ret = mProvider->getCameraDeviceInterface(name, &device);
379 ASSERT_TRUE(ret.isOk());
380 ASSERT_NE(device, nullptr);
381
382 CameraMetadata chars;
383 ret = device->getCameraCharacteristics(&chars);
384 ASSERT_TRUE(ret.isOk());
385
386 const camera_metadata_t* staticMeta =
387 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
388 bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta);
389 camera_metadata_ro_entry entry;
390 int rc = find_camera_metadata_ro_entry(staticMeta,
391 ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry);
392 if (torchStrengthControlSupported) {
393 ASSERT_EQ(rc, 0);
394 ASSERT_GT(entry.count, 0);
395 defaultLevel = *entry.data.i32;
396 ALOGI("Default level is:%d", defaultLevel);
397 }
398
399 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
400 ret = device->turnOnTorchWithStrengthLevel(2);
401 ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError());
402 // OPERATION_NOT_SUPPORTED check
403 if (!torchStrengthControlSupported) {
404 ALOGI("Torch strength control not supported.");
405 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
406 ret.getServiceSpecificError());
407 } else {
408 {
409 ASSERT_TRUE(ret.isOk());
410 std::unique_lock<std::mutex> l(mTorchLock);
411 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
412 auto timeout = std::chrono::system_clock::now() +
413 std::chrono::seconds(kTorchTimeoutSec);
414 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
415 }
416 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
417 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
418 }
419 ALOGI("getTorchStrengthLevel: Testing");
420 int32_t strengthLevel;
421 ret = device->getTorchStrengthLevel(&strengthLevel);
422 ASSERT_TRUE(ret.isOk());
423 ALOGI("Torch strength level is : %d", strengthLevel);
424 ASSERT_EQ(strengthLevel, 2);
425
426 // Turn OFF the torch and verify torch strength level is reset to default level.
427 ALOGI("Testing torch strength level reset after turning the torch OFF.");
428 ret = device->setTorchMode(false);
429 ASSERT_TRUE(ret.isOk());
430 {
431 std::unique_lock<std::mutex> l(mTorchLock);
432 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
433 auto timeout = std::chrono::system_clock::now() +
434 std::chrono::seconds(kTorchTimeoutSec);
435 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
436 }
437 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
438 }
439
440 ret = device->getTorchStrengthLevel(&strengthLevel);
441 ASSERT_TRUE(ret.isOk());
442 ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel);
443 ASSERT_EQ(strengthLevel, defaultLevel);
444 }
445 }
446 }
447
448 // In case it is supported verify that torch can be enabled.
449 // Check for corresponding torch callbacks as well.
TEST_P(CameraAidlTest,setTorchMode)450 TEST_P(CameraAidlTest, setTorchMode) {
451 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
452
453 std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this);
454 ndk::ScopedAStatus ret = mProvider->setCallback(cb);
455 ALOGI("setCallback returns status: %d", ret.getServiceSpecificError());
456 ASSERT_TRUE(ret.isOk());
457 ASSERT_NE(cb, nullptr);
458
459 for (const auto& name : cameraDeviceNames) {
460 std::shared_ptr<ICameraDevice> device;
461 ALOGI("setTorchMode: Testing camera device %s", name.c_str());
462 ret = mProvider->getCameraDeviceInterface(name, &device);
463 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
464 ret.getServiceSpecificError());
465 ASSERT_TRUE(ret.isOk());
466 ASSERT_NE(device, nullptr);
467
468 CameraMetadata metadata;
469 ret = device->getCameraCharacteristics(&metadata);
470 ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError());
471 ASSERT_TRUE(ret.isOk());
472 camera_metadata_t* staticMeta =
473 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
474 bool torchSupported = isTorchSupported(staticMeta);
475
476 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
477 ret = device->setTorchMode(true);
478 ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError());
479 if (!torchSupported) {
480 ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED),
481 ret.getServiceSpecificError());
482 } else {
483 ASSERT_TRUE(ret.isOk());
484 {
485 std::unique_lock<std::mutex> l(mTorchLock);
486 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
487 auto timeout = std::chrono::system_clock::now() +
488 std::chrono::seconds(kTorchTimeoutSec);
489 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
490 }
491 ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
492 mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
493 }
494
495 ret = device->setTorchMode(false);
496 ASSERT_TRUE(ret.isOk());
497 {
498 std::unique_lock<std::mutex> l(mTorchLock);
499 while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
500 auto timeout = std::chrono::system_clock::now() +
501 std::chrono::seconds(kTorchTimeoutSec);
502 ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout));
503 }
504 ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
505 }
506 }
507 }
508 }
509
510 // Check dump functionality.
TEST_P(CameraAidlTest,dump)511 TEST_P(CameraAidlTest, dump) {
512 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
513
514 for (const auto& name : cameraDeviceNames) {
515 std::shared_ptr<ICameraDevice> device;
516 ALOGI("dump: Testing camera device %s", name.c_str());
517
518 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
519 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
520 ret.getServiceSpecificError());
521 ASSERT_TRUE(ret.isOk());
522 ASSERT_NE(device, nullptr);
523
524 int raw_handle = open(kDumpOutput, O_RDWR);
525 ASSERT_GE(raw_handle, 0);
526
527 auto retStatus = device->dump(raw_handle, nullptr, 0);
528 ASSERT_EQ(retStatus, ::android::OK);
529 close(raw_handle);
530 }
531 }
532
533 // Open, dump, then close
TEST_P(CameraAidlTest,openClose)534 TEST_P(CameraAidlTest, openClose) {
535 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
536
537 for (const auto& name : cameraDeviceNames) {
538 std::shared_ptr<ICameraDevice> device;
539 ALOGI("openClose: Testing camera device %s", name.c_str());
540 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
541 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
542 ret.getServiceSpecificError());
543 ASSERT_TRUE(ret.isOk());
544 ASSERT_NE(device, nullptr);
545
546 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
547
548 ret = device->open(cb, &mSession);
549 ASSERT_TRUE(ret.isOk());
550 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
551 ret.getServiceSpecificError());
552 ASSERT_NE(mSession, nullptr);
553 int raw_handle = open(kDumpOutput, O_RDWR);
554 ASSERT_GE(raw_handle, 0);
555
556 auto retStatus = device->dump(raw_handle, nullptr, 0);
557 ASSERT_EQ(retStatus, ::android::OK);
558 close(raw_handle);
559
560 ret = mSession->close();
561 mSession = nullptr;
562 ASSERT_TRUE(ret.isOk());
563 // TODO: test all session API calls return INTERNAL_ERROR after close
564 // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
565 }
566 }
567
568 // Check whether all common default request settings can be successfully
569 // constructed.
TEST_P(CameraAidlTest,constructDefaultRequestSettings)570 TEST_P(CameraAidlTest, constructDefaultRequestSettings) {
571 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
572
573 for (const auto& name : cameraDeviceNames) {
574 std::shared_ptr<ICameraDevice> device;
575 ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
576 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device);
577 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
578 ret.getServiceSpecificError());
579 ASSERT_TRUE(ret.isOk());
580 ASSERT_NE(device, nullptr);
581
582 int32_t interfaceVersion;
583 ret = device->getInterfaceVersion(&interfaceVersion);
584 ASSERT_TRUE(ret.isOk());
585 bool supportFeatureCombinationQuery =
586 (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
587
588 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
589 ret = device->open(cb, &mSession);
590 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
591 ret.getServiceSpecificError());
592 ASSERT_TRUE(ret.isOk());
593 ASSERT_NE(mSession, nullptr);
594
595 for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL;
596 t++) {
597 RequestTemplate reqTemplate = (RequestTemplate)t;
598 CameraMetadata rawMetadata;
599 ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata);
600 ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(),
601 ret.getServiceSpecificError());
602
603 if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
604 reqTemplate == RequestTemplate::MANUAL) {
605 // optional templates
606 ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
607 ret.getServiceSpecificError());
608 } else {
609 ASSERT_TRUE(ret.isOk());
610 }
611
612 if (ret.isOk()) {
613 validateDefaultRequestMetadata(reqTemplate, rawMetadata);
614 } else {
615 ASSERT_EQ(0u, rawMetadata.metadata.size());
616 }
617
618 if (flags::feature_combination_query()) {
619 if (supportFeatureCombinationQuery) {
620 CameraMetadata rawMetadata2;
621 ndk::ScopedAStatus ret2 =
622 device->constructDefaultRequestSettings(reqTemplate, &rawMetadata2);
623
624 ASSERT_EQ(ret.isOk(), ret2.isOk());
625 ASSERT_EQ(ret.getStatus(), ret2.getStatus());
626
627 ASSERT_EQ(rawMetadata.metadata.size(), rawMetadata2.metadata.size());
628 if (ret2.isOk()) {
629 validateDefaultRequestMetadata(reqTemplate, rawMetadata2);
630 }
631 }
632 }
633 }
634 ret = mSession->close();
635 mSession = nullptr;
636 ASSERT_TRUE(ret.isOk());
637 }
638 }
639
640 // Verify that all supported stream formats and sizes can be configured
641 // successfully.
TEST_P(CameraAidlTest,configureStreamsAvailableOutputs)642 TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) {
643 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
644 std::vector<AvailableStream> outputStreams;
645
646 for (const auto& name : cameraDeviceNames) {
647 CameraMetadata meta;
648 std::shared_ptr<ICameraDevice> device;
649
650 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/);
651
652 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
653 outputStreams.clear();
654 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
655 ASSERT_NE(0u, outputStreams.size());
656
657 int32_t jpegBufferSize = 0;
658 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
659 ASSERT_NE(0u, jpegBufferSize);
660
661 int32_t streamId = 0;
662 int32_t streamConfigCounter = 0;
663 for (auto& it : outputStreams) {
664 Stream stream;
665 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
666 stream.id = streamId;
667 stream.streamType = StreamType::OUTPUT;
668 stream.width = it.width;
669 stream.height = it.height;
670 stream.format = static_cast<PixelFormat>(it.format);
671 stream.dataSpace = dataspace;
672 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
673 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
674 stream.rotation = StreamRotation::ROTATION_0;
675 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
676 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
677 stream.useCase = ScalerAvailableStreamUseCases::
678 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
679 stream.colorSpace = static_cast<int>(
680 RequestAvailableColorSpaceProfilesMap::
681 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
682
683 std::vector<Stream> streams = {stream};
684 StreamConfiguration config;
685 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
686 jpegBufferSize);
687
688 verifyStreamCombination(device, config, /*expectedStatus*/ true);
689
690 config.streamConfigCounter = streamConfigCounter++;
691 std::vector<HalStream> halConfigs;
692 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
693 ASSERT_TRUE(ret.isOk());
694 ASSERT_EQ(halConfigs.size(), 1);
695 ASSERT_EQ(halConfigs[0].id, streamId);
696
697 streamId++;
698 }
699
700 ndk::ScopedAStatus ret = mSession->close();
701 mSession = nullptr;
702 ASSERT_TRUE(ret.isOk());
703 }
704 }
705
706 // Verify that mandatory concurrent streams and outputs are supported.
TEST_P(CameraAidlTest,configureConcurrentStreamsAvailableOutputs)707 TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) {
708 struct CameraTestInfo {
709 CameraMetadata staticMeta;
710 std::shared_ptr<ICameraDeviceSession> session;
711 std::shared_ptr<ICameraDevice> cameraDevice;
712 StreamConfiguration config;
713 };
714
715 std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider);
716 std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations =
717 getConcurrentDeviceCombinations(mProvider);
718 std::vector<AvailableStream> outputStreams;
719 for (const auto& cameraDeviceIds : concurrentDeviceCombinations) {
720 std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations;
721 std::vector<CameraTestInfo> cameraTestInfos;
722 for (const auto& id : cameraDeviceIds.combination) {
723 CameraTestInfo cti;
724 auto it = idToNameMap.find(id);
725 ASSERT_TRUE(idToNameMap.end() != it);
726 std::string name = it->second;
727
728 openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/,
729 &cti.cameraDevice /*out*/);
730
731 outputStreams.clear();
732 camera_metadata_t* staticMeta =
733 reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data());
734 ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams));
735 ASSERT_NE(0u, outputStreams.size());
736
737 int32_t jpegBufferSize = 0;
738 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
739 ASSERT_NE(0u, jpegBufferSize);
740
741 int32_t streamId = 0;
742 std::vector<Stream> streams(outputStreams.size());
743 size_t j = 0;
744 for (const auto& s : outputStreams) {
745 Stream stream;
746 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format));
747 stream.id = streamId++;
748 stream.streamType = StreamType::OUTPUT;
749 stream.width = s.width;
750 stream.height = s.height;
751 stream.format = static_cast<PixelFormat>(s.format);
752 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
753 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
754 stream.dataSpace = dataspace;
755 stream.rotation = StreamRotation::ROTATION_0;
756 stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT};
757 stream.dynamicRangeProfile = RequestAvailableDynamicRangeProfilesMap::
758 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
759 streams[j] = stream;
760 j++;
761 }
762
763 // Add the created stream configs to cameraIdsAndStreamCombinations
764 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config,
765 jpegBufferSize);
766
767 cti.config.streamConfigCounter = outputStreams.size();
768 CameraIdAndStreamCombination cameraIdAndStreamCombination;
769 cameraIdAndStreamCombination.cameraId = id;
770 cameraIdAndStreamCombination.streamConfiguration = cti.config;
771 cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination);
772 cameraTestInfos.push_back(cti);
773 }
774 // Now verify that concurrent streams are supported
775 bool combinationSupported;
776 ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported(
777 cameraIdsAndStreamCombinations, &combinationSupported);
778 ASSERT_TRUE(ret.isOk());
779 ASSERT_EQ(combinationSupported, true);
780
781 // Test the stream can actually be configured
782 for (auto& cti : cameraTestInfos) {
783 if (cti.session != nullptr) {
784 verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true);
785 }
786
787 if (cti.session != nullptr) {
788 std::vector<HalStream> streamConfigs;
789 ret = cti.session->configureStreams(cti.config, &streamConfigs);
790 ASSERT_TRUE(ret.isOk());
791 ASSERT_EQ(cti.config.streams.size(), streamConfigs.size());
792 }
793 }
794
795 for (auto& cti : cameraTestInfos) {
796 ret = cti.session->close();
797 ASSERT_TRUE(ret.isOk());
798 }
799 }
800 }
801
802 // Check for correct handling of invalid/incorrect configuration parameters.
TEST_P(CameraAidlTest,configureStreamsInvalidOutputs)803 TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) {
804 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
805 std::vector<AvailableStream> outputStreams;
806
807 for (const auto& name : cameraDeviceNames) {
808 CameraMetadata meta;
809 std::shared_ptr<ICameraDevice> cameraDevice;
810
811 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
812 &cameraDevice /*out*/);
813 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
814 outputStreams.clear();
815
816 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams));
817 ASSERT_NE(0u, outputStreams.size());
818
819 int32_t jpegBufferSize = 0;
820 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
821 ASSERT_NE(0u, jpegBufferSize);
822
823 int32_t streamId = 0;
824 Stream stream = {streamId++,
825 StreamType::OUTPUT,
826 static_cast<uint32_t>(0),
827 static_cast<uint32_t>(0),
828 static_cast<PixelFormat>(outputStreams[0].format),
829 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
830 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
831 Dataspace::UNKNOWN,
832 StreamRotation::ROTATION_0,
833 std::string(),
834 jpegBufferSize,
835 -1,
836 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
837 RequestAvailableDynamicRangeProfilesMap::
838 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
839 int32_t streamConfigCounter = 0;
840 std::vector<Stream> streams = {stream};
841 StreamConfiguration config;
842 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
843 jpegBufferSize);
844
845 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false);
846
847 config.streamConfigCounter = streamConfigCounter++;
848 std::vector<HalStream> halConfigs;
849 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
850 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
851 ret.getServiceSpecificError() ||
852 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
853
854 stream = {streamId++,
855 StreamType::OUTPUT,
856 /*width*/ INT32_MAX,
857 /*height*/ INT32_MAX,
858 static_cast<PixelFormat>(outputStreams[0].format),
859 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
860 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
861 Dataspace::UNKNOWN,
862 StreamRotation::ROTATION_0,
863 std::string(),
864 jpegBufferSize,
865 -1,
866 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
867 RequestAvailableDynamicRangeProfilesMap::
868 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
869
870 streams[0] = stream;
871 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
872 jpegBufferSize);
873
874 config.streamConfigCounter = streamConfigCounter++;
875 halConfigs.clear();
876 ret = mSession->configureStreams(config, &halConfigs);
877 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
878
879 for (auto& it : outputStreams) {
880 stream = {streamId++,
881 StreamType::OUTPUT,
882 it.width,
883 it.height,
884 static_cast<PixelFormat>(UINT32_MAX),
885 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
886 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
887 Dataspace::UNKNOWN,
888 StreamRotation::ROTATION_0,
889 std::string(),
890 jpegBufferSize,
891 -1,
892 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
893 RequestAvailableDynamicRangeProfilesMap::
894 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
895
896 streams[0] = stream;
897 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
898 jpegBufferSize);
899 config.streamConfigCounter = streamConfigCounter++;
900 halConfigs.clear();
901 ret = mSession->configureStreams(config, &halConfigs);
902 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
903 ret.getServiceSpecificError());
904
905 stream = {streamId++,
906 StreamType::OUTPUT,
907 it.width,
908 it.height,
909 static_cast<PixelFormat>(it.format),
910 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
911 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
912 Dataspace::UNKNOWN,
913 static_cast<StreamRotation>(UINT32_MAX),
914 std::string(),
915 jpegBufferSize,
916 -1,
917 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
918 RequestAvailableDynamicRangeProfilesMap::
919 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
920
921 streams[0] = stream;
922 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
923 jpegBufferSize);
924
925 config.streamConfigCounter = streamConfigCounter++;
926 halConfigs.clear();
927 ret = mSession->configureStreams(config, &halConfigs);
928 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
929 ret.getServiceSpecificError());
930 }
931
932 ret = mSession->close();
933 mSession = nullptr;
934 ASSERT_TRUE(ret.isOk());
935 }
936 }
937
938 // Check whether all supported ZSL output stream combinations can be
939 // configured successfully.
TEST_P(CameraAidlTest,configureStreamsZSLInputOutputs)940 TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) {
941 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
942 std::vector<AvailableStream> inputStreams;
943 std::vector<AvailableZSLInputOutput> inputOutputMap;
944
945 for (const auto& name : cameraDeviceNames) {
946 CameraMetadata meta;
947 std::shared_ptr<ICameraDevice> cameraDevice;
948
949 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
950 &cameraDevice /*out*/);
951 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
952
953 Status rc = isZSLModeAvailable(staticMeta);
954 if (Status::OPERATION_NOT_SUPPORTED == rc) {
955 ndk::ScopedAStatus ret = mSession->close();
956 mSession = nullptr;
957 ASSERT_TRUE(ret.isOk());
958 continue;
959 }
960 ASSERT_EQ(Status::OK, rc);
961
962 inputStreams.clear();
963 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams));
964 ASSERT_NE(0u, inputStreams.size());
965
966 inputOutputMap.clear();
967 ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap));
968 ASSERT_NE(0u, inputOutputMap.size());
969
970 bool supportMonoY8 = false;
971 if (Status::OK == isMonochromeCamera(staticMeta)) {
972 for (auto& it : inputStreams) {
973 if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) {
974 supportMonoY8 = true;
975 break;
976 }
977 }
978 }
979
980 int32_t jpegBufferSize = 0;
981 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
982 ASSERT_NE(0u, jpegBufferSize);
983
984 int32_t streamId = 0;
985 bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false;
986 uint32_t streamConfigCounter = 0;
987 for (auto& inputIter : inputOutputMap) {
988 AvailableStream input;
989 ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input));
990 ASSERT_NE(0u, inputStreams.size());
991
992 if (inputIter.inputFormat ==
993 static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) &&
994 inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
995 hasPrivToY8 = true;
996 } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
997 if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) {
998 hasY8ToBlob = true;
999 } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) {
1000 hasY8ToY8 = true;
1001 }
1002 }
1003 AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat};
1004 std::vector<AvailableStream> outputStreams;
1005 ASSERT_EQ(Status::OK,
1006 getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold));
1007 for (auto& outputIter : outputStreams) {
1008 Dataspace outputDataSpace =
1009 getDataspace(static_cast<PixelFormat>(outputIter.format));
1010 Stream zslStream = {
1011 streamId++,
1012 StreamType::OUTPUT,
1013 input.width,
1014 input.height,
1015 static_cast<PixelFormat>(input.format),
1016 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1017 GRALLOC_USAGE_HW_CAMERA_ZSL),
1018 Dataspace::UNKNOWN,
1019 StreamRotation::ROTATION_0,
1020 std::string(),
1021 jpegBufferSize,
1022 -1,
1023 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1024 RequestAvailableDynamicRangeProfilesMap::
1025 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1026 Stream inputStream = {
1027 streamId++,
1028 StreamType::INPUT,
1029 input.width,
1030 input.height,
1031 static_cast<PixelFormat>(input.format),
1032 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0),
1033 Dataspace::UNKNOWN,
1034 StreamRotation::ROTATION_0,
1035 std::string(),
1036 jpegBufferSize,
1037 -1,
1038 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1039 RequestAvailableDynamicRangeProfilesMap::
1040 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1041 Stream outputStream = {
1042 streamId++,
1043 StreamType::OUTPUT,
1044 outputIter.width,
1045 outputIter.height,
1046 static_cast<PixelFormat>(outputIter.format),
1047 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1048 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1049 outputDataSpace,
1050 StreamRotation::ROTATION_0,
1051 std::string(),
1052 jpegBufferSize,
1053 -1,
1054 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1055 RequestAvailableDynamicRangeProfilesMap::
1056 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1057
1058 std::vector<Stream> streams = {inputStream, zslStream, outputStream};
1059
1060 StreamConfiguration config;
1061 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1062 jpegBufferSize);
1063
1064 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
1065
1066 config.streamConfigCounter = streamConfigCounter++;
1067 std::vector<HalStream> halConfigs;
1068 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1069 ASSERT_TRUE(ret.isOk());
1070 ASSERT_EQ(3u, halConfigs.size());
1071 }
1072 }
1073
1074 if (supportMonoY8) {
1075 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1076 ASSERT_TRUE(hasPrivToY8);
1077 }
1078 if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) {
1079 ASSERT_TRUE(hasY8ToY8);
1080 ASSERT_TRUE(hasY8ToBlob);
1081 }
1082 }
1083
1084 ndk::ScopedAStatus ret = mSession->close();
1085 mSession = nullptr;
1086 ASSERT_TRUE(ret.isOk());
1087 }
1088 }
1089
1090 // Check whether session parameters are supported. If Hal support for them
1091 // exist, then try to configure a preview stream using them.
TEST_P(CameraAidlTest,configureStreamsWithSessionParameters)1092 TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) {
1093 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1094 std::vector<AvailableStream> outputPreviewStreams;
1095 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1096 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1097
1098 for (const auto& name : cameraDeviceNames) {
1099 CameraMetadata meta;
1100
1101 std::shared_ptr<ICameraDevice> unusedCameraDevice;
1102 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1103 &unusedCameraDevice /*out*/);
1104 camera_metadata_t* staticMetaBuffer =
1105 reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1106
1107 std::unordered_set<int32_t> availableSessionKeys;
1108 auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
1109 &availableSessionKeys);
1110 ASSERT_TRUE(Status::OK == rc);
1111 if (availableSessionKeys.empty()) {
1112 ndk::ScopedAStatus ret = mSession->close();
1113 mSession = nullptr;
1114 ASSERT_TRUE(ret.isOk());
1115 continue;
1116 }
1117
1118 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
1119 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
1120 modifiedSessionParams;
1121 constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW,
1122 &previewRequestSettings, &sessionParams);
1123 if (sessionParams.isEmpty()) {
1124 ndk::ScopedAStatus ret = mSession->close();
1125 mSession = nullptr;
1126 ASSERT_TRUE(ret.isOk());
1127 continue;
1128 }
1129
1130 outputPreviewStreams.clear();
1131
1132 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
1133 &previewThreshold));
1134 ASSERT_NE(0u, outputPreviewStreams.size());
1135
1136 Stream previewStream = {
1137 0,
1138 StreamType::OUTPUT,
1139 outputPreviewStreams[0].width,
1140 outputPreviewStreams[0].height,
1141 static_cast<PixelFormat>(outputPreviewStreams[0].format),
1142 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1143 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1144 Dataspace::UNKNOWN,
1145 StreamRotation::ROTATION_0,
1146 std::string(),
1147 /*bufferSize*/ 0,
1148 /*groupId*/ -1,
1149 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1150 RequestAvailableDynamicRangeProfilesMap::
1151 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1152
1153 std::vector<Stream> streams = {previewStream};
1154 StreamConfiguration config;
1155
1156 config.streams = streams;
1157 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
1158 modifiedSessionParams = sessionParams;
1159 auto sessionParamsBuffer = sessionParams.release();
1160 std::vector<uint8_t> rawSessionParam =
1161 std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer),
1162 reinterpret_cast<uint8_t*>(sessionParamsBuffer) +
1163 get_camera_metadata_size(sessionParamsBuffer));
1164
1165 config.sessionParams.metadata = rawSessionParam;
1166 config.streamConfigCounter = 0;
1167 config.streams = {previewStream};
1168 config.streamConfigCounter = 0;
1169 config.multiResolutionInputImage = false;
1170
1171 bool newSessionParamsAvailable = false;
1172 for (const auto& it : availableSessionKeys) {
1173 if (modifiedSessionParams.exists(it)) {
1174 modifiedSessionParams.erase(it);
1175 newSessionParamsAvailable = true;
1176 break;
1177 }
1178 }
1179 if (newSessionParamsAvailable) {
1180 auto modifiedSessionParamsBuffer = modifiedSessionParams.release();
1181 verifySessionReconfigurationQuery(mSession, sessionParamsBuffer,
1182 modifiedSessionParamsBuffer);
1183 modifiedSessionParams.acquire(modifiedSessionParamsBuffer);
1184 }
1185
1186 std::vector<HalStream> halConfigs;
1187 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1188 ASSERT_TRUE(ret.isOk());
1189 ASSERT_EQ(1u, halConfigs.size());
1190
1191 sessionParams.acquire(sessionParamsBuffer);
1192 ret = mSession->close();
1193 mSession = nullptr;
1194 ASSERT_TRUE(ret.isOk());
1195 }
1196 }
1197
1198 // Verify that all supported preview + still capture stream combinations
1199 // can be configured successfully.
TEST_P(CameraAidlTest,configureStreamsPreviewStillOutputs)1200 TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) {
1201 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1202 std::vector<AvailableStream> outputBlobStreams;
1203 std::vector<AvailableStream> outputPreviewStreams;
1204 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1205 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1206 AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)};
1207
1208 for (const auto& name : cameraDeviceNames) {
1209 CameraMetadata meta;
1210
1211 std::shared_ptr<ICameraDevice> cameraDevice;
1212 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1213 &cameraDevice /*out*/);
1214
1215 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1216
1217 // Check if camera support depth only
1218 if (isDepthOnly(staticMeta)) {
1219 ndk::ScopedAStatus ret = mSession->close();
1220 mSession = nullptr;
1221 ASSERT_TRUE(ret.isOk());
1222 continue;
1223 }
1224
1225 outputBlobStreams.clear();
1226 ASSERT_EQ(Status::OK,
1227 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1228 ASSERT_NE(0u, outputBlobStreams.size());
1229
1230 outputPreviewStreams.clear();
1231 ASSERT_EQ(Status::OK,
1232 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold));
1233 ASSERT_NE(0u, outputPreviewStreams.size());
1234
1235 int32_t jpegBufferSize = 0;
1236 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1237 ASSERT_NE(0u, jpegBufferSize);
1238
1239 int32_t streamId = 0;
1240 uint32_t streamConfigCounter = 0;
1241
1242 for (auto& blobIter : outputBlobStreams) {
1243 for (auto& previewIter : outputPreviewStreams) {
1244 Stream previewStream = {
1245 streamId++,
1246 StreamType::OUTPUT,
1247 previewIter.width,
1248 previewIter.height,
1249 static_cast<PixelFormat>(previewIter.format),
1250 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1251 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
1252 Dataspace::UNKNOWN,
1253 StreamRotation::ROTATION_0,
1254 std::string(),
1255 /*bufferSize*/ 0,
1256 /*groupId*/ -1,
1257 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1258 RequestAvailableDynamicRangeProfilesMap::
1259 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1260 Stream blobStream = {
1261 streamId++,
1262 StreamType::OUTPUT,
1263 blobIter.width,
1264 blobIter.height,
1265 static_cast<PixelFormat>(blobIter.format),
1266 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1267 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1268 Dataspace::JFIF,
1269 StreamRotation::ROTATION_0,
1270 std::string(),
1271 /*bufferSize*/ 0,
1272 /*groupId*/ -1,
1273 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1274 RequestAvailableDynamicRangeProfilesMap::
1275 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1276 std::vector<Stream> streams = {previewStream, blobStream};
1277 StreamConfiguration config;
1278
1279 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1280 jpegBufferSize);
1281 config.streamConfigCounter = streamConfigCounter++;
1282 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
1283
1284 std::vector<HalStream> halConfigs;
1285 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1286 ASSERT_TRUE(ret.isOk());
1287 ASSERT_EQ(2u, halConfigs.size());
1288 }
1289 }
1290
1291 ndk::ScopedAStatus ret = mSession->close();
1292 mSession = nullptr;
1293 ASSERT_TRUE(ret.isOk());
1294 }
1295 }
1296
1297 // In case constrained mode is supported, test whether it can be
1298 // configured. Additionally check for common invalid inputs when
1299 // using this mode.
TEST_P(CameraAidlTest,configureStreamsConstrainedOutputs)1300 TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) {
1301 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1302
1303 for (const auto& name : cameraDeviceNames) {
1304 CameraMetadata meta;
1305 std::shared_ptr<ICameraDevice> cameraDevice;
1306
1307 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1308 &cameraDevice /*out*/);
1309 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1310
1311 Status rc = isConstrainedModeAvailable(staticMeta);
1312 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1313 ndk::ScopedAStatus ret = mSession->close();
1314 mSession = nullptr;
1315 ASSERT_TRUE(ret.isOk());
1316 continue;
1317 }
1318 ASSERT_EQ(Status::OK, rc);
1319
1320 AvailableStream hfrStream;
1321 rc = pickConstrainedModeSize(staticMeta, hfrStream);
1322 ASSERT_EQ(Status::OK, rc);
1323
1324 int32_t streamId = 0;
1325 uint32_t streamConfigCounter = 0;
1326 Stream stream = {streamId,
1327 StreamType::OUTPUT,
1328 hfrStream.width,
1329 hfrStream.height,
1330 static_cast<PixelFormat>(hfrStream.format),
1331 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1332 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1333 Dataspace::UNKNOWN,
1334 StreamRotation::ROTATION_0,
1335 std::string(),
1336 /*bufferSize*/ 0,
1337 /*groupId*/ -1,
1338 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1339 RequestAvailableDynamicRangeProfilesMap::
1340 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1341 std::vector<Stream> streams = {stream};
1342 StreamConfiguration config;
1343 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1344 &config);
1345
1346 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
1347
1348 config.streamConfigCounter = streamConfigCounter++;
1349 std::vector<HalStream> halConfigs;
1350 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1351 ASSERT_TRUE(ret.isOk());
1352 ASSERT_EQ(1u, halConfigs.size());
1353 ASSERT_EQ(halConfigs[0].id, streamId);
1354
1355 stream = {streamId++,
1356 StreamType::OUTPUT,
1357 static_cast<uint32_t>(0),
1358 static_cast<uint32_t>(0),
1359 static_cast<PixelFormat>(hfrStream.format),
1360 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1361 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1362 Dataspace::UNKNOWN,
1363 StreamRotation::ROTATION_0,
1364 std::string(),
1365 /*bufferSize*/ 0,
1366 /*groupId*/ -1,
1367 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1368 RequestAvailableDynamicRangeProfilesMap::
1369 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1370 streams[0] = stream;
1371 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1372 &config);
1373
1374 config.streamConfigCounter = streamConfigCounter++;
1375 std::vector<HalStream> halConfig;
1376 ret = mSession->configureStreams(config, &halConfig);
1377 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1378 ret.getServiceSpecificError() ||
1379 static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError());
1380
1381 stream = {streamId++,
1382 StreamType::OUTPUT,
1383 INT32_MAX,
1384 INT32_MAX,
1385 static_cast<PixelFormat>(hfrStream.format),
1386 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1387 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1388 Dataspace::UNKNOWN,
1389 StreamRotation::ROTATION_0,
1390 std::string(),
1391 /*bufferSize*/ 0,
1392 /*groupId*/ -1,
1393 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1394 RequestAvailableDynamicRangeProfilesMap::
1395 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1396 streams[0] = stream;
1397 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1398 &config);
1399
1400 config.streamConfigCounter = streamConfigCounter++;
1401 halConfigs.clear();
1402 ret = mSession->configureStreams(config, &halConfigs);
1403 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1404
1405 stream = {streamId++,
1406 StreamType::OUTPUT,
1407 hfrStream.width,
1408 hfrStream.height,
1409 static_cast<PixelFormat>(UINT32_MAX),
1410 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1411 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1412 Dataspace::UNKNOWN,
1413 StreamRotation::ROTATION_0,
1414 std::string(),
1415 /*bufferSize*/ 0,
1416 /*groupId*/ -1,
1417 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1418 RequestAvailableDynamicRangeProfilesMap::
1419 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1420 streams[0] = stream;
1421 createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE,
1422 &config);
1423
1424 config.streamConfigCounter = streamConfigCounter++;
1425 halConfigs.clear();
1426 ret = mSession->configureStreams(config, &halConfigs);
1427 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
1428
1429 ret = mSession->close();
1430 mSession = nullptr;
1431 ASSERT_TRUE(ret.isOk());
1432 }
1433 }
1434
1435 // Verify that all supported video + snapshot stream combinations can
1436 // be configured successfully.
TEST_P(CameraAidlTest,configureStreamsVideoStillOutputs)1437 TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) {
1438 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1439 std::vector<AvailableStream> outputBlobStreams;
1440 std::vector<AvailableStream> outputVideoStreams;
1441 AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1442 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1443 AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
1444 static_cast<int32_t>(PixelFormat::BLOB)};
1445
1446 for (const auto& name : cameraDeviceNames) {
1447 CameraMetadata meta;
1448 std::shared_ptr<ICameraDevice> cameraDevice;
1449
1450 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
1451 &cameraDevice /*out*/);
1452
1453 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1454
1455 // Check if camera support depth only
1456 if (isDepthOnly(staticMeta)) {
1457 ndk::ScopedAStatus ret = mSession->close();
1458 mSession = nullptr;
1459 ASSERT_TRUE(ret.isOk());
1460 continue;
1461 }
1462
1463 outputBlobStreams.clear();
1464 ASSERT_EQ(Status::OK,
1465 getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold));
1466 ASSERT_NE(0u, outputBlobStreams.size());
1467
1468 outputVideoStreams.clear();
1469 ASSERT_EQ(Status::OK,
1470 getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold));
1471 ASSERT_NE(0u, outputVideoStreams.size());
1472
1473 int32_t jpegBufferSize = 0;
1474 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
1475 ASSERT_NE(0u, jpegBufferSize);
1476
1477 int32_t streamId = 0;
1478 uint32_t streamConfigCounter = 0;
1479 for (auto& blobIter : outputBlobStreams) {
1480 for (auto& videoIter : outputVideoStreams) {
1481 Stream videoStream = {
1482 streamId++,
1483 StreamType::OUTPUT,
1484 videoIter.width,
1485 videoIter.height,
1486 static_cast<PixelFormat>(videoIter.format),
1487 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1488 GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER),
1489 Dataspace::UNKNOWN,
1490 StreamRotation::ROTATION_0,
1491 std::string(),
1492 jpegBufferSize,
1493 /*groupId*/ -1,
1494 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1495 RequestAvailableDynamicRangeProfilesMap::
1496 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1497 Stream blobStream = {
1498 streamId++,
1499 StreamType::OUTPUT,
1500 blobIter.width,
1501 blobIter.height,
1502 static_cast<PixelFormat>(blobIter.format),
1503 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1504 GRALLOC1_CONSUMER_USAGE_CPU_READ),
1505 Dataspace::JFIF,
1506 StreamRotation::ROTATION_0,
1507 std::string(),
1508 jpegBufferSize,
1509 /*groupId*/ -1,
1510 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
1511 RequestAvailableDynamicRangeProfilesMap::
1512 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
1513 std::vector<Stream> streams = {videoStream, blobStream};
1514 StreamConfiguration config;
1515
1516 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
1517 jpegBufferSize);
1518 verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true);
1519
1520 config.streamConfigCounter = streamConfigCounter++;
1521 std::vector<HalStream> halConfigs;
1522 ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs);
1523 ASSERT_TRUE(ret.isOk());
1524 ASSERT_EQ(2u, halConfigs.size());
1525 }
1526 }
1527
1528 ndk::ScopedAStatus ret = mSession->close();
1529 mSession = nullptr;
1530 ASSERT_TRUE(ret.isOk());
1531 }
1532 }
1533
1534 // Generate and verify a camera capture request
TEST_P(CameraAidlTest,processCaptureRequestPreview)1535 TEST_P(CameraAidlTest, processCaptureRequestPreview) {
1536 // TODO(b/220897574): Failing with BUFFER_ERROR
1537 processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW,
1538 false /*secureOnlyCameras*/);
1539 }
1540
1541 // Generate and verify a secure camera capture request
TEST_P(CameraAidlTest,processSecureCaptureRequest)1542 TEST_P(CameraAidlTest, processSecureCaptureRequest) {
1543 processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE,
1544 true /*secureOnlyCameras*/);
1545 }
1546
TEST_P(CameraAidlTest,processCaptureRequestPreviewStabilization)1547 TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) {
1548 std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag;
1549 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false,
1550 cameraDeviceToTimeLag);
1551 processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true,
1552 cameraDeviceToTimeLag);
1553 }
1554
1555 // Generate and verify a multi-camera capture request
TEST_P(CameraAidlTest,processMultiCaptureRequestPreview)1556 TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) {
1557 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1558 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1559 static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1560 int64_t bufferId = 1;
1561 uint32_t frameNumber = 1;
1562 std::vector<uint8_t> settings;
1563 std::vector<uint8_t> emptySettings;
1564 std::string invalidPhysicalId = "-1";
1565
1566 for (const auto& name : cameraDeviceNames) {
1567 std::string version, deviceId;
1568 ALOGI("processMultiCaptureRequestPreview: Test device %s", name.c_str());
1569 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1570 CameraMetadata metadata;
1571
1572 std::shared_ptr<ICameraDevice> unusedDevice;
1573 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/,
1574 &unusedDevice /*out*/);
1575
1576 camera_metadata_t* staticMeta =
1577 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
1578 Status rc = isLogicalMultiCamera(staticMeta);
1579 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1580 ndk::ScopedAStatus ret = mSession->close();
1581 mSession = nullptr;
1582 ASSERT_TRUE(ret.isOk());
1583 continue;
1584 }
1585 ASSERT_EQ(Status::OK, rc);
1586
1587 std::unordered_set<std::string> physicalIds;
1588 rc = getPhysicalCameraIds(staticMeta, &physicalIds);
1589 ASSERT_TRUE(Status::OK == rc);
1590 ASSERT_TRUE(physicalIds.size() > 1);
1591
1592 std::unordered_set<int32_t> physicalRequestKeyIDs;
1593 rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1594 &physicalRequestKeyIDs);
1595 ASSERT_TRUE(Status::OK == rc);
1596 if (physicalRequestKeyIDs.empty()) {
1597 ndk::ScopedAStatus ret = mSession->close();
1598 mSession = nullptr;
1599 ASSERT_TRUE(ret.isOk());
1600 // The logical camera doesn't support any individual physical requests.
1601 continue;
1602 }
1603
1604 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings;
1605 android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings;
1606 constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW,
1607 &defaultPreviewSettings, &filteredSettings);
1608 if (filteredSettings.isEmpty()) {
1609 // No physical device settings in default request.
1610 ndk::ScopedAStatus ret = mSession->close();
1611 mSession = nullptr;
1612 ASSERT_TRUE(ret.isOk());
1613 continue;
1614 }
1615
1616 const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock();
1617 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1618 settings.assign(rawSettingsBuffer,
1619 rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1620 CameraMetadata settingsMetadata = {settings};
1621 overrideRotateAndCrop(&settingsMetadata);
1622
1623 ndk::ScopedAStatus ret = mSession->close();
1624 mSession = nullptr;
1625 ASSERT_TRUE(ret.isOk());
1626
1627 // Leave only 2 physical devices in the id set.
1628 auto it = physicalIds.begin();
1629 std::string physicalDeviceId = *it;
1630 it++;
1631 physicalIds.erase(++it, physicalIds.end());
1632 ASSERT_EQ(physicalIds.size(), 2u);
1633
1634 std::vector<HalStream> halStreams;
1635 bool supportsPartialResults = false;
1636 std::set<int32_t> halBufManagedStreamIds;
1637 int32_t partialResultCount = 0;
1638 Stream previewStream;
1639 std::shared_ptr<DeviceCb> cb;
1640
1641 configurePreviewStreams(
1642 name, mProvider, &previewThreshold, physicalIds, &mSession, &previewStream,
1643 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1644 &halBufManagedStreamIds /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, true);
1645 if (mSession == nullptr) {
1646 // stream combination not supported by HAL, skip test for device
1647 continue;
1648 }
1649
1650 ::aidl::android::hardware::common::fmq::MQDescriptor<
1651 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1652 descriptor;
1653 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1654 ASSERT_TRUE(resultQueueRet.isOk());
1655 std::shared_ptr<ResultMetadataQueue> resultQueue =
1656 std::make_shared<ResultMetadataQueue>(descriptor);
1657 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1658 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1659 resultQueue = nullptr;
1660 // Don't use the queue onwards.
1661 }
1662
1663 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1664 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1665 partialResultCount, physicalIds, resultQueue);
1666
1667 std::vector<CaptureRequest> requests(1);
1668 CaptureRequest& request = requests[0];
1669 request.frameNumber = frameNumber;
1670 request.fmqSettingsSize = 0;
1671 request.settings = settingsMetadata;
1672
1673 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1674
1675 std::vector<buffer_handle_t> graphicBuffers;
1676 graphicBuffers.reserve(halStreams.size());
1677 outputBuffers.resize(halStreams.size());
1678 size_t k = 0;
1679 for (const auto& halStream : halStreams) {
1680 buffer_handle_t buffer_handle;
1681 bool useHalBufManagerForStream =
1682 halBufManagedStreamIds.find(halStream.id) != halBufManagedStreamIds.end();
1683 if (useHalBufManagerForStream) {
1684 outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(),
1685 BufferStatus::OK, NativeHandle(), NativeHandle()};
1686 } else {
1687 allocateGraphicBuffer(previewStream.width, previewStream.height,
1688 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
1689 static_cast<uint64_t>(halStream.producerUsage),
1690 static_cast<uint64_t>(halStream.consumerUsage))),
1691 halStream.overrideFormat, &buffer_handle);
1692 graphicBuffers.push_back(buffer_handle);
1693 outputBuffers[k] = {
1694 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1695 BufferStatus::OK, NativeHandle(), NativeHandle()};
1696 bufferId++;
1697 }
1698 k++;
1699 }
1700
1701 std::vector<PhysicalCameraSetting> camSettings(1);
1702 const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock();
1703 uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer;
1704 camSettings[0].settings = {std::vector(
1705 rawFilteredSettingsBuffer,
1706 rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))};
1707 overrideRotateAndCrop(&camSettings[0].settings);
1708 camSettings[0].fmqSettingsSize = 0;
1709 camSettings[0].physicalCameraId = physicalDeviceId;
1710
1711 request.inputBuffer = {
1712 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1713 request.physicalCameraSettings = camSettings;
1714
1715 {
1716 std::unique_lock<std::mutex> l(mLock);
1717 mInflightMap.clear();
1718 mInflightMap[frameNumber] = inflightReq;
1719 }
1720
1721 int32_t numRequestProcessed = 0;
1722 std::vector<BufferCache> cachesToRemove;
1723 ndk::ScopedAStatus returnStatus =
1724 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1725 ASSERT_TRUE(returnStatus.isOk());
1726 ASSERT_EQ(numRequestProcessed, 1u);
1727
1728 {
1729 std::unique_lock<std::mutex> l(mLock);
1730 while (!inflightReq->errorCodeValid &&
1731 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1732 auto timeout = std::chrono::system_clock::now() +
1733 std::chrono::seconds(kStreamBufferTimeoutSec);
1734 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1735 }
1736
1737 ASSERT_FALSE(inflightReq->errorCodeValid);
1738 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1739
1740 request.frameNumber++;
1741 // Empty settings should be supported after the first call
1742 // for repeating requests.
1743 request.settings.metadata.clear();
1744 request.physicalCameraSettings[0].settings.metadata.clear();
1745 // The buffer has been registered to HAL by bufferId, so per
1746 // API contract we should send a null handle for this buffer
1747 request.outputBuffers[0].buffer = NativeHandle();
1748 mInflightMap.clear();
1749 inflightReq = std::make_shared<InFlightRequest>(
1750 static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults,
1751 partialResultCount, physicalIds, resultQueue);
1752 mInflightMap[request.frameNumber] = inflightReq;
1753 }
1754
1755 returnStatus =
1756 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1757 ASSERT_TRUE(returnStatus.isOk());
1758 ASSERT_EQ(numRequestProcessed, 1u);
1759
1760 {
1761 std::unique_lock<std::mutex> l(mLock);
1762 while (!inflightReq->errorCodeValid &&
1763 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1764 auto timeout = std::chrono::system_clock::now() +
1765 std::chrono::seconds(kStreamBufferTimeoutSec);
1766 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1767 }
1768
1769 ASSERT_FALSE(inflightReq->errorCodeValid);
1770 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1771 }
1772
1773 // Invalid physical camera id should fail process requests
1774 frameNumber++;
1775 camSettings[0].physicalCameraId = invalidPhysicalId;
1776 camSettings[0].settings.metadata = settings;
1777
1778 request.physicalCameraSettings = camSettings; // Invalid camera settings
1779 returnStatus =
1780 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1781 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
1782 returnStatus.getServiceSpecificError());
1783
1784 defaultPreviewSettings.unlock(settingsBuffer);
1785 filteredSettings.unlock(filteredSettingsBuffer);
1786
1787 if (halBufManagedStreamIds.size() != 0) {
1788 std::vector<int32_t> streamIds;
1789 for (size_t i = 0; i < halStreams.size(); i++) {
1790 int32_t streamId = halStreams[i].id;
1791 if (halBufManagedStreamIds.find(streamId) != halBufManagedStreamIds.end()) {
1792 streamIds.emplace_back(streamId);
1793 }
1794 }
1795 verifyBuffersReturned(mSession, streamIds, cb);
1796 }
1797
1798 ret = mSession->close();
1799 mSession = nullptr;
1800 ASSERT_TRUE(ret.isOk());
1801 }
1802 }
1803
1804 // Generate and verify an ultra high resolution capture request
TEST_P(CameraAidlTest,processUltraHighResolutionRequest)1805 TEST_P(CameraAidlTest, processUltraHighResolutionRequest) {
1806 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1807 int64_t bufferId = 1;
1808 int32_t frameNumber = 1;
1809 CameraMetadata settings;
1810
1811 for (const auto& name : cameraDeviceNames) {
1812 std::string version, deviceId;
1813 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1814 CameraMetadata meta;
1815
1816 std::shared_ptr<ICameraDevice> unusedDevice;
1817 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice);
1818 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1819 if (!isUltraHighResolution(staticMeta)) {
1820 ndk::ScopedAStatus ret = mSession->close();
1821 mSession = nullptr;
1822 ASSERT_TRUE(ret.isOk());
1823 continue;
1824 }
1825 CameraMetadata req;
1826 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1827 ndk::ScopedAStatus ret =
1828 mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req);
1829 ASSERT_TRUE(ret.isOk());
1830
1831 const camera_metadata_t* metadata =
1832 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
1833 size_t expectedSize = req.metadata.size();
1834 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1835 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1836
1837 size_t entryCount = get_camera_metadata_entry_count(metadata);
1838 ASSERT_GT(entryCount, 0u);
1839 defaultSettings = metadata;
1840 uint8_t sensorPixelMode =
1841 static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1842 ASSERT_EQ(::android::OK,
1843 defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1));
1844
1845 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
1846 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
1847 settings.metadata = std::vector(
1848 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
1849 overrideRotateAndCrop(&settings);
1850
1851 ret = mSession->close();
1852 mSession = nullptr;
1853 ASSERT_TRUE(ret.isOk());
1854
1855 std::vector<HalStream> halStreams;
1856 bool supportsPartialResults = false;
1857 std::set<int32_t> halBufManagedStreamIds;
1858 int32_t partialResultCount = 0;
1859 Stream previewStream;
1860 std::shared_ptr<DeviceCb> cb;
1861
1862 std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16};
1863 for (PixelFormat format : pixelFormats) {
1864 previewStream.usage =
1865 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
1866 GRALLOC1_CONSUMER_USAGE_CPU_READ);
1867 previewStream.dataSpace = Dataspace::UNKNOWN;
1868 configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams,
1869 &supportsPartialResults, &partialResultCount, &halBufManagedStreamIds,
1870 &cb, 0, /*maxResolution*/ true);
1871 ASSERT_NE(mSession, nullptr);
1872
1873 ::aidl::android::hardware::common::fmq::MQDescriptor<
1874 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1875 descriptor;
1876 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
1877 ASSERT_TRUE(resultQueueRet.isOk());
1878
1879 std::shared_ptr<ResultMetadataQueue> resultQueue =
1880 std::make_shared<ResultMetadataQueue>(descriptor);
1881 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1882 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
1883 resultQueue = nullptr;
1884 // Don't use the queue onwards.
1885 }
1886
1887 std::vector<buffer_handle_t> graphicBuffers;
1888 graphicBuffers.reserve(halStreams.size());
1889 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1890 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
1891 partialResultCount, std::unordered_set<std::string>(), resultQueue);
1892
1893 std::vector<CaptureRequest> requests(1);
1894 CaptureRequest& request = requests[0];
1895 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1896 outputBuffers.resize(halStreams.size());
1897
1898 size_t k = 0;
1899 for (const auto& halStream : halStreams) {
1900 buffer_handle_t buffer_handle;
1901 bool halBufManagerUsed =
1902 halBufManagedStreamIds.find(halStream.id) != halBufManagedStreamIds.end();
1903 if (halBufManagerUsed) {
1904 outputBuffers[k] = {halStream.id, 0,
1905 NativeHandle(), BufferStatus::OK,
1906 NativeHandle(), NativeHandle()};
1907 } else {
1908 allocateGraphicBuffer(previewStream.width, previewStream.height,
1909 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
1910 static_cast<uint64_t>(halStream.producerUsage),
1911 static_cast<uint64_t>(halStream.consumerUsage))),
1912 halStream.overrideFormat, &buffer_handle);
1913 graphicBuffers.push_back(buffer_handle);
1914 outputBuffers[k] = {
1915 halStream.id, bufferId, ::android::makeToAidl(buffer_handle),
1916 BufferStatus::OK, NativeHandle(), NativeHandle()};
1917 bufferId++;
1918 }
1919 k++;
1920 }
1921
1922 request.inputBuffer = {
1923 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
1924 request.frameNumber = frameNumber;
1925 request.fmqSettingsSize = 0;
1926 request.settings = settings;
1927 request.inputWidth = 0;
1928 request.inputHeight = 0;
1929
1930 {
1931 std::unique_lock<std::mutex> l(mLock);
1932 mInflightMap.clear();
1933 mInflightMap[frameNumber] = inflightReq;
1934 }
1935
1936 int32_t numRequestProcessed = 0;
1937 std::vector<BufferCache> cachesToRemove;
1938 ndk::ScopedAStatus returnStatus =
1939 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
1940 ASSERT_TRUE(returnStatus.isOk());
1941 ASSERT_EQ(numRequestProcessed, 1u);
1942
1943 {
1944 std::unique_lock<std::mutex> l(mLock);
1945 while (!inflightReq->errorCodeValid &&
1946 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
1947 auto timeout = std::chrono::system_clock::now() +
1948 std::chrono::seconds(kStreamBufferTimeoutSec);
1949 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
1950 }
1951
1952 ASSERT_FALSE(inflightReq->errorCodeValid);
1953 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
1954 }
1955 if (halBufManagedStreamIds.size()) {
1956 std::vector<int32_t> streamIds;
1957 for (size_t i = 0; i < halStreams.size(); i++) {
1958 if (contains(halBufManagedStreamIds, halStreams[i].id)) {
1959 streamIds.emplace_back(halStreams[i].id);
1960 }
1961 }
1962 verifyBuffersReturned(mSession, streamIds, cb);
1963 }
1964
1965 ret = mSession->close();
1966 mSession = nullptr;
1967 ASSERT_TRUE(ret.isOk());
1968 }
1969 }
1970 }
1971
1972 // Generate and verify 10-bit dynamic range request
TEST_P(CameraAidlTest,process10BitDynamicRangeRequest)1973 TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
1974 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
1975 CameraMetadata settings;
1976
1977 for (const auto& name : cameraDeviceNames) {
1978 std::string version, deviceId;
1979 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
1980 CameraMetadata meta;
1981 std::shared_ptr<ICameraDevice> device;
1982 openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
1983 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
1984 if (!is10BitDynamicRangeCapable(staticMeta)) {
1985 ndk::ScopedAStatus ret = mSession->close();
1986 mSession = nullptr;
1987 ASSERT_TRUE(ret.isOk());
1988 continue;
1989 }
1990 std::vector<RequestAvailableDynamicRangeProfilesMap> profileList;
1991 get10BitDynamicRangeProfiles(staticMeta, &profileList);
1992 ASSERT_FALSE(profileList.empty());
1993
1994 CameraMetadata req;
1995 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
1996 ndk::ScopedAStatus ret =
1997 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
1998 ASSERT_TRUE(ret.isOk());
1999
2000 const camera_metadata_t* metadata =
2001 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
2002 size_t expectedSize = req.metadata.size();
2003 int result = validate_camera_metadata_structure(metadata, &expectedSize);
2004 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
2005
2006 size_t entryCount = get_camera_metadata_entry_count(metadata);
2007 ASSERT_GT(entryCount, 0u);
2008 defaultSettings = metadata;
2009
2010 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
2011 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
2012 settings.metadata = std::vector(
2013 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
2014 overrideRotateAndCrop(&settings);
2015
2016 ret = mSession->close();
2017 mSession = nullptr;
2018 ASSERT_TRUE(ret.isOk());
2019
2020 std::vector<HalStream> halStreams;
2021 bool supportsPartialResults = false;
2022 std::set<int32_t> halBufManagedStreamIds;
2023 int32_t partialResultCount = 0;
2024 Stream previewStream;
2025 std::shared_ptr<DeviceCb> cb;
2026 for (const auto& profile : profileList) {
2027 previewStream.usage =
2028 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2029 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
2030 previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
2031 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
2032 &previewStream, &halStreams, &supportsPartialResults,
2033 &partialResultCount, &halBufManagedStreamIds, &cb, 0,
2034 /*maxResolution*/ false, profile);
2035 ASSERT_NE(mSession, nullptr);
2036
2037 ::aidl::android::hardware::common::fmq::MQDescriptor<
2038 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2039 descriptor;
2040 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2041 ASSERT_TRUE(resultQueueRet.isOk());
2042
2043 std::shared_ptr<ResultMetadataQueue> resultQueue =
2044 std::make_shared<ResultMetadataQueue>(descriptor);
2045 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2046 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2047 resultQueue = nullptr;
2048 // Don't use the queue onwards.
2049 }
2050
2051 mInflightMap.clear();
2052 // Stream as long as needed to fill the Hal inflight queue
2053 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
2054
2055 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
2056 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2057 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
2058 partialResultCount, std::unordered_set<std::string>(), resultQueue);
2059
2060 CaptureRequest& request = requests[requestId];
2061 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2062 outputBuffers.resize(halStreams.size());
2063
2064 size_t k = 0;
2065 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
2066 std::vector<buffer_handle_t> graphicBuffers;
2067 graphicBuffers.reserve(halStreams.size());
2068
2069 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
2070 for (const auto& halStream : halStreams) {
2071 buffer_handle_t buffer_handle;
2072 if (contains(halBufManagedStreamIds, halStream.id)) {
2073 outputBuffers[k] = {halStream.id, 0,
2074 NativeHandle(), BufferStatus::OK,
2075 NativeHandle(), NativeHandle()};
2076 } else {
2077 auto usage = ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2078 static_cast<uint64_t>(halStream.producerUsage),
2079 static_cast<uint64_t>(halStream.consumerUsage)));
2080 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
2081 halStream.overrideFormat, &buffer_handle);
2082
2083 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
2084 graphicBuffers.push_back(buffer_handle);
2085 outputBuffers[k] = {halStream.id, bufferId,
2086 android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
2087 NativeHandle()};
2088 }
2089 k++;
2090 }
2091
2092 request.inputBuffer = {
2093 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2094 request.frameNumber = bufferId;
2095 request.fmqSettingsSize = 0;
2096 request.settings = settings;
2097 request.inputWidth = 0;
2098 request.inputHeight = 0;
2099
2100 {
2101 std::unique_lock<std::mutex> l(mLock);
2102 mInflightMap[bufferId] = inflightReq;
2103 }
2104
2105 }
2106
2107 int32_t numRequestProcessed = 0;
2108 std::vector<BufferCache> cachesToRemove;
2109 ndk::ScopedAStatus returnStatus =
2110 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2111 ASSERT_TRUE(returnStatus.isOk());
2112 ASSERT_EQ(numRequestProcessed, requests.size());
2113
2114 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
2115 std::vector<int32_t> {halStreams[0].id});
2116 ASSERT_TRUE(returnStatus.isOk());
2117
2118 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
2119 // is used to indicate a buffer that is not present/available so buffer ids as well
2120 // as frame numbers begin with 1.
2121 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
2122 const auto& inflightReq = mInflightMap[frameNumber];
2123 std::unique_lock<std::mutex> l(mLock);
2124 while (!inflightReq->errorCodeValid &&
2125 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2126 auto timeout = std::chrono::system_clock::now() +
2127 std::chrono::seconds(kStreamBufferTimeoutSec);
2128 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2129 }
2130
2131 waitForReleaseFence(inflightReq->resultOutputBuffers);
2132
2133 ASSERT_FALSE(inflightReq->errorCodeValid);
2134 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2135 verify10BitMetadata(mHandleImporter, *inflightReq, profile);
2136 }
2137
2138 if (halBufManagedStreamIds.size() != 0) {
2139 std::vector<int32_t> streamIds;
2140 for (size_t i = 0; i < halStreams.size(); i++) {
2141 if (contains(halBufManagedStreamIds, halStreams[i].id)) {
2142 streamIds.emplace_back(halStreams[i].id);
2143 }
2144 }
2145 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
2146 cb->waitForBuffersReturned();
2147 }
2148
2149 ret = mSession->close();
2150 mSession = nullptr;
2151 ASSERT_TRUE(ret.isOk());
2152 }
2153 }
2154 }
2155
TEST_P(CameraAidlTest,process8BitColorSpaceRequests)2156 TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
2157 static int profiles[] = {ColorSpaceNamed::DISPLAY_P3, ColorSpaceNamed::SRGB};
2158
2159 for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
2160 processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
2161 static_cast<RequestAvailableDynamicRangeProfilesMap>(
2162 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
2163 }
2164 }
2165
TEST_P(CameraAidlTest,process10BitColorSpaceRequests)2166 TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
2167 static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
2168 dynamicRangeProfiles[] = {
2169 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
2170 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
2171 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
2172 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
2173 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
2174 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
2175 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
2176 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
2177 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
2178 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
2179 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
2180 };
2181
2182 // Process all dynamic range profiles with BT2020_HLG
2183 for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
2184 processColorSpaceRequest(
2185 static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020_HLG),
2186 static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
2187 }
2188 }
2189
TEST_P(CameraAidlTest,processZoomSettingsOverrideRequests)2190 TEST_P(CameraAidlTest, processZoomSettingsOverrideRequests) {
2191 const int32_t kFrameCount = 5;
2192 const int32_t kTestCases = 2;
2193 const bool kOverrideSequence[kTestCases][kFrameCount] = {// ZOOM, ZOOM, ZOOM, ZOOM, ZOOM;
2194 {true, true, true, true, true},
2195 // OFF, ZOOM, ZOOM, ZOOM, OFF;
2196 {false, true, true, true, false}};
2197 const bool kExpectedOverrideResults[kTestCases][kFrameCount] = {
2198 // All resuls should be overridden except the last one. The last result's
2199 // zoom doesn't have speed-up.
2200 {true, true, true, true, false},
2201 // Because we require at least 1 frame speed-up, request #1, #2 and #3
2202 // will be overridden.
2203 {true, true, true, false, false}};
2204
2205 for (int i = 0; i < kTestCases; i++) {
2206 processZoomSettingsOverrideRequests(kFrameCount, kOverrideSequence[i],
2207 kExpectedOverrideResults[i]);
2208 }
2209 }
2210
2211 // Generate and verify a burst containing alternating sensor sensitivity values
TEST_P(CameraAidlTest,processCaptureRequestBurstISO)2212 TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
2213 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2214 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2215 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2216 int64_t bufferId = 1;
2217 int32_t frameNumber = 1;
2218 float isoTol = .03f;
2219 CameraMetadata settings;
2220
2221 for (const auto& name : cameraDeviceNames) {
2222 CameraMetadata meta;
2223 settings.metadata.clear();
2224 std::shared_ptr<ICameraDevice> unusedDevice;
2225 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2226 &unusedDevice /*out*/);
2227 camera_metadata_t* staticMetaBuffer =
2228 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2229 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2230 staticMetaBuffer);
2231
2232 camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
2233 ASSERT_TRUE(0 < hwLevel.count);
2234 if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] ||
2235 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) {
2236 // Limited/External devices can skip this test
2237 ndk::ScopedAStatus ret = mSession->close();
2238 mSession = nullptr;
2239 ASSERT_TRUE(ret.isOk());
2240 continue;
2241 }
2242
2243 camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
2244 ASSERT_EQ(isoRange.count, 2u);
2245
2246 ndk::ScopedAStatus ret = mSession->close();
2247 mSession = nullptr;
2248 ASSERT_TRUE(ret.isOk());
2249
2250 bool supportsPartialResults = false;
2251 bool useHalBufManager = false;
2252 int32_t partialResultCount = 0;
2253 Stream previewStream;
2254 std::vector<HalStream> halStreams;
2255 std::shared_ptr<DeviceCb> cb;
2256 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2257 &previewStream /*out*/, &halStreams /*out*/,
2258 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2259 &useHalBufManager /*out*/, &cb /*out*/);
2260
2261 ::aidl::android::hardware::common::fmq::MQDescriptor<
2262 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2263 descriptor;
2264 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2265 std::shared_ptr<ResultMetadataQueue> resultQueue =
2266 std::make_shared<ResultMetadataQueue>(descriptor);
2267 ASSERT_TRUE(resultQueueRet.isOk());
2268 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2269 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2270 resultQueue = nullptr;
2271 // Don't use the queue onwards.
2272 }
2273
2274 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
2275 ASSERT_TRUE(ret.isOk());
2276
2277 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2278 std::vector<CaptureRequest> requests(kBurstFrameCount);
2279 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2280 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2281 std::vector<int32_t> isoValues(kBurstFrameCount);
2282 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2283
2284 for (int32_t i = 0; i < kBurstFrameCount; i++) {
2285 std::unique_lock<std::mutex> l(mLock);
2286 CaptureRequest& request = requests[i];
2287 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2288 outputBuffers.resize(1);
2289 StreamBuffer& outputBuffer = outputBuffers[0];
2290
2291 isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1];
2292 if (useHalBufManager) {
2293 outputBuffer = {halStreams[0].id, 0,
2294 NativeHandle(), BufferStatus::OK,
2295 NativeHandle(), NativeHandle()};
2296 } else {
2297 allocateGraphicBuffer(previewStream.width, previewStream.height,
2298 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2299 static_cast<uint64_t>(halStreams[0].producerUsage),
2300 static_cast<uint64_t>(halStreams[0].consumerUsage))),
2301 halStreams[0].overrideFormat, &buffers[i]);
2302 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
2303 BufferStatus::OK, NativeHandle(), NativeHandle()};
2304 }
2305
2306 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2307
2308 // Disable all 3A routines
2309 uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
2310 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1));
2311 ASSERT_EQ(::android::OK,
2312 requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1));
2313 camera_metadata_t* metaBuffer = requestMeta.release();
2314 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2315 requestSettings[i].metadata = std::vector(
2316 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2317 overrideRotateAndCrop(&(requestSettings[i]));
2318
2319 request.frameNumber = frameNumber + i;
2320 request.fmqSettingsSize = 0;
2321 request.settings = requestSettings[i];
2322 request.inputBuffer = {
2323 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2324
2325 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2326 partialResultCount, resultQueue);
2327 mInflightMap[frameNumber + i] = inflightReqs[i];
2328 }
2329
2330 int32_t numRequestProcessed = 0;
2331 std::vector<BufferCache> cachesToRemove;
2332
2333 ndk::ScopedAStatus returnStatus =
2334 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2335 ASSERT_TRUE(returnStatus.isOk());
2336 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2337
2338 for (size_t i = 0; i < kBurstFrameCount; i++) {
2339 std::unique_lock<std::mutex> l(mLock);
2340 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2341 (!inflightReqs[i]->haveResultMetadata))) {
2342 auto timeout = std::chrono::system_clock::now() +
2343 std::chrono::seconds(kStreamBufferTimeoutSec);
2344 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2345 }
2346
2347 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2348 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2349 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2350 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2351 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY));
2352 camera_metadata_entry_t isoResult =
2353 inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY);
2354 ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <=
2355 std::round(isoValues[i] * isoTol));
2356 }
2357
2358 if (useHalBufManager) {
2359 verifyBuffersReturned(mSession, previewStream.id, cb);
2360 }
2361 ret = mSession->close();
2362 mSession = nullptr;
2363 ASSERT_TRUE(ret.isOk());
2364 }
2365 }
2366
2367 // Test whether an incorrect capture request with missing settings will
2368 // be reported correctly.
TEST_P(CameraAidlTest,processCaptureRequestInvalidSinglePreview)2369 TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) {
2370 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2371 std::vector<AvailableStream> outputPreviewStreams;
2372 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2373 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2374 int64_t bufferId = 1;
2375 int32_t frameNumber = 1;
2376 CameraMetadata settings;
2377
2378 for (const auto& name : cameraDeviceNames) {
2379 Stream previewStream;
2380 std::vector<HalStream> halStreams;
2381 std::shared_ptr<DeviceCb> cb;
2382 bool supportsPartialResults = false;
2383 bool useHalBufManager = false;
2384 int32_t partialResultCount = 0;
2385 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2386 &previewStream /*out*/, &halStreams /*out*/,
2387 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2388 &useHalBufManager /*out*/, &cb /*out*/);
2389 ASSERT_NE(mSession, nullptr);
2390 ASSERT_FALSE(halStreams.empty());
2391
2392 buffer_handle_t buffer_handle = nullptr;
2393
2394 if (useHalBufManager) {
2395 bufferId = 0;
2396 } else {
2397 allocateGraphicBuffer(previewStream.width, previewStream.height,
2398 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2399 static_cast<uint64_t>(halStreams[0].producerUsage),
2400 static_cast<uint64_t>(halStreams[0].consumerUsage))),
2401 halStreams[0].overrideFormat, &buffer_handle);
2402 }
2403
2404 std::vector<CaptureRequest> requests(1);
2405 CaptureRequest& request = requests[0];
2406 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2407 outputBuffers.resize(1);
2408 StreamBuffer& outputBuffer = outputBuffers[0];
2409
2410 outputBuffer = {
2411 halStreams[0].id,
2412 bufferId,
2413 buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle),
2414 BufferStatus::OK,
2415 NativeHandle(),
2416 NativeHandle()};
2417
2418 request.inputBuffer = {
2419 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2420 request.frameNumber = frameNumber;
2421 request.fmqSettingsSize = 0;
2422 request.settings = settings;
2423
2424 // Settings were not correctly initialized, we should fail here
2425 int32_t numRequestProcessed = 0;
2426 std::vector<BufferCache> cachesToRemove;
2427 ndk::ScopedAStatus ret =
2428 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2429 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2430 ASSERT_EQ(numRequestProcessed, 0u);
2431
2432 ret = mSession->close();
2433 mSession = nullptr;
2434 ASSERT_TRUE(ret.isOk());
2435 }
2436 }
2437
2438 // Verify camera offline session behavior
TEST_P(CameraAidlTest,switchToOffline)2439 TEST_P(CameraAidlTest, switchToOffline) {
2440 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2441 AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight,
2442 static_cast<int32_t>(PixelFormat::BLOB)};
2443 int64_t bufferId = 1;
2444 int32_t frameNumber = 1;
2445 CameraMetadata settings;
2446
2447 for (const auto& name : cameraDeviceNames) {
2448 CameraMetadata meta;
2449 {
2450 std::shared_ptr<ICameraDevice> unusedDevice;
2451 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2452 &unusedDevice);
2453 camera_metadata_t* staticMetaBuffer = clone_camera_metadata(
2454 reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
2455 ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta(
2456 staticMetaBuffer);
2457
2458 if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) {
2459 ndk::ScopedAStatus ret = mSession->close();
2460 mSession = nullptr;
2461 ASSERT_TRUE(ret.isOk());
2462 continue;
2463 }
2464 ndk::ScopedAStatus ret = mSession->close();
2465 mSession = nullptr;
2466 ASSERT_TRUE(ret.isOk());
2467 }
2468
2469 bool supportsPartialResults = false;
2470 int32_t partialResultCount = 0;
2471 Stream stream;
2472 std::vector<HalStream> halStreams;
2473 std::shared_ptr<DeviceCb> cb;
2474 int32_t jpegBufferSize;
2475 std::set<int32_t> halBufManagedStreamIds;
2476 configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/,
2477 &halStreams /*out*/, &supportsPartialResults /*out*/,
2478 &partialResultCount /*out*/, &cb /*out*/,
2479 &jpegBufferSize /*out*/, &halBufManagedStreamIds /*out*/);
2480
2481 auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE,
2482 &settings);
2483 ASSERT_TRUE(ret.isOk());
2484
2485 ::aidl::android::hardware::common::fmq::MQDescriptor<
2486 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2487 descriptor;
2488
2489 ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2490 ASSERT_TRUE(resultQueueRet.isOk());
2491 std::shared_ptr<ResultMetadataQueue> resultQueue =
2492 std::make_shared<ResultMetadataQueue>(descriptor);
2493 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2494 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2495 resultQueue = nullptr;
2496 // Don't use the queue onwards.
2497 }
2498
2499 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
2500
2501 std::vector<buffer_handle_t> buffers(kBurstFrameCount);
2502 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount);
2503 std::vector<CameraMetadata> requestSettings(kBurstFrameCount);
2504
2505 std::vector<CaptureRequest> requests(kBurstFrameCount);
2506
2507 HalStream halStream = halStreams[0];
2508 for (uint32_t i = 0; i < kBurstFrameCount; i++) {
2509 CaptureRequest& request = requests[i];
2510 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2511 outputBuffers.resize(1);
2512 StreamBuffer& outputBuffer = outputBuffers[0];
2513
2514 std::unique_lock<std::mutex> l(mLock);
2515 if (contains(halBufManagedStreamIds, halStream.id)) {
2516 outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(),
2517 NativeHandle()};
2518 } else {
2519 // jpeg buffer (w,h) = (blobLen, 1)
2520 allocateGraphicBuffer(jpegBufferSize, /*height*/ 1,
2521 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2522 static_cast<uint64_t>(halStream.producerUsage),
2523 static_cast<uint64_t>(halStream.consumerUsage))),
2524 halStream.overrideFormat, &buffers[i]);
2525 outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]),
2526 BufferStatus::OK, NativeHandle(), NativeHandle()};
2527 }
2528
2529 requestMeta.clear();
2530 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
2531
2532 camera_metadata_t* metaBuffer = requestMeta.release();
2533 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2534 requestSettings[i].metadata = std::vector(
2535 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2536 overrideRotateAndCrop(&requestSettings[i]);
2537
2538 request.frameNumber = frameNumber + i;
2539 request.fmqSettingsSize = 0;
2540 request.settings = requestSettings[i];
2541 request.inputBuffer = {/*streamId*/ -1,
2542 /*bufferId*/ 0, NativeHandle(),
2543 BufferStatus::ERROR, NativeHandle(),
2544 NativeHandle()};
2545
2546 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2547 partialResultCount, resultQueue);
2548 mInflightMap[frameNumber + i] = inflightReqs[i];
2549 }
2550
2551 int32_t numRequestProcessed = 0;
2552 std::vector<BufferCache> cachesToRemove;
2553
2554 ndk::ScopedAStatus returnStatus =
2555 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2556 ASSERT_TRUE(returnStatus.isOk());
2557 ASSERT_EQ(numRequestProcessed, kBurstFrameCount);
2558
2559 std::vector<int32_t> offlineStreamIds = {halStream.id};
2560 CameraOfflineSessionInfo offlineSessionInfo;
2561 std::shared_ptr<ICameraOfflineSession> offlineSession;
2562 returnStatus =
2563 mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession);
2564
2565 if (!halStreams[0].supportOffline) {
2566 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2567 returnStatus.getServiceSpecificError());
2568 ret = mSession->close();
2569 mSession = nullptr;
2570 ASSERT_TRUE(ret.isOk());
2571 continue;
2572 }
2573
2574 ASSERT_TRUE(returnStatus.isOk());
2575 // Hal might be unable to find any requests qualified for offline mode.
2576 if (offlineSession == nullptr) {
2577 ret = mSession->close();
2578 mSession = nullptr;
2579 ASSERT_TRUE(ret.isOk());
2580 continue;
2581 }
2582
2583 ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u);
2584 ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id);
2585 ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u);
2586
2587 // close device session to make sure offline session does not rely on it
2588 ret = mSession->close();
2589 mSession = nullptr;
2590 ASSERT_TRUE(ret.isOk());
2591
2592 ::aidl::android::hardware::common::fmq::MQDescriptor<
2593 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2594 offlineResultDescriptor;
2595
2596 auto offlineResultQueueRet =
2597 offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor);
2598 std::shared_ptr<ResultMetadataQueue> offlineResultQueue =
2599 std::make_shared<ResultMetadataQueue>(descriptor);
2600 if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) {
2601 ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__);
2602 offlineResultQueue = nullptr;
2603 // Don't use the queue onwards.
2604 }
2605 ASSERT_TRUE(offlineResultQueueRet.isOk());
2606
2607 updateInflightResultQueue(offlineResultQueue);
2608
2609 ret = offlineSession->setCallback(cb);
2610 ASSERT_TRUE(ret.isOk());
2611
2612 for (size_t i = 0; i < kBurstFrameCount; i++) {
2613 std::unique_lock<std::mutex> l(mLock);
2614 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
2615 (!inflightReqs[i]->haveResultMetadata))) {
2616 auto timeout = std::chrono::system_clock::now() +
2617 std::chrono::seconds(kStreamBufferTimeoutSec);
2618 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2619 }
2620
2621 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
2622 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
2623 ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
2624 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
2625 }
2626
2627 ret = offlineSession->close();
2628 ASSERT_TRUE(ret.isOk());
2629 }
2630 }
2631
2632 // Check whether an invalid capture request with missing output buffers
2633 // will be reported correctly.
TEST_P(CameraAidlTest,processCaptureRequestInvalidBuffer)2634 TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) {
2635 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2636 std::vector<AvailableStream> outputBlobStreams;
2637 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2638 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2639 int32_t frameNumber = 1;
2640 CameraMetadata settings;
2641
2642 for (const auto& name : cameraDeviceNames) {
2643 Stream previewStream;
2644 std::vector<HalStream> halStreams;
2645 std::shared_ptr<DeviceCb> cb;
2646 bool supportsPartialResults = false;
2647 bool useHalBufManager = false;
2648 int32_t partialResultCount = 0;
2649 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2650 &previewStream /*out*/, &halStreams /*out*/,
2651 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2652 &useHalBufManager /*out*/, &cb /*out*/);
2653
2654 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2655 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2656 ASSERT_TRUE(ret.isOk());
2657 overrideRotateAndCrop(&settings);
2658
2659 std::vector<CaptureRequest> requests(1);
2660 CaptureRequest& request = requests[0];
2661 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2662 outputBuffers.resize(1);
2663 // Empty output buffer
2664 outputBuffers[0] = {
2665 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2666
2667 request.inputBuffer = {
2668 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2669 request.frameNumber = frameNumber;
2670 request.fmqSettingsSize = 0;
2671 request.settings = settings;
2672
2673 // Output buffers are missing, we should fail here
2674 int32_t numRequestProcessed = 0;
2675 std::vector<BufferCache> cachesToRemove;
2676 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2677 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError());
2678 ASSERT_EQ(numRequestProcessed, 0u);
2679
2680 ret = mSession->close();
2681 mSession = nullptr;
2682 ASSERT_TRUE(ret.isOk());
2683 }
2684 }
2685
2686 // Generate, trigger and flush a preview request
TEST_P(CameraAidlTest,flushPreviewRequest)2687 TEST_P(CameraAidlTest, flushPreviewRequest) {
2688 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2689 std::vector<AvailableStream> outputPreviewStreams;
2690 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2691 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2692 int64_t bufferId = 1;
2693 int32_t frameNumber = 1;
2694 CameraMetadata settings;
2695
2696 for (const auto& name : cameraDeviceNames) {
2697 Stream previewStream;
2698 std::vector<HalStream> halStreams;
2699 std::shared_ptr<DeviceCb> cb;
2700 bool supportsPartialResults = false;
2701 bool useHalBufManager = false;
2702 int32_t partialResultCount = 0;
2703
2704 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2705 &previewStream /*out*/, &halStreams /*out*/,
2706 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2707 &useHalBufManager /*out*/, &cb /*out*/);
2708
2709 ASSERT_NE(mSession, nullptr);
2710 ASSERT_NE(cb, nullptr);
2711 ASSERT_FALSE(halStreams.empty());
2712
2713 ::aidl::android::hardware::common::fmq::MQDescriptor<
2714 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2715 descriptor;
2716
2717 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
2718 std::shared_ptr<ResultMetadataQueue> resultQueue =
2719 std::make_shared<ResultMetadataQueue>(descriptor);
2720 ASSERT_TRUE(resultQueueRet.isOk());
2721 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2722 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
2723 resultQueue = nullptr;
2724 // Don't use the queue onwards.
2725 }
2726
2727 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2728 1, false, supportsPartialResults, partialResultCount, resultQueue);
2729 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2730
2731 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings);
2732 ASSERT_TRUE(ret.isOk());
2733 overrideRotateAndCrop(&settings);
2734
2735 buffer_handle_t buffer_handle;
2736 std::vector<CaptureRequest> requests(1);
2737 CaptureRequest& request = requests[0];
2738 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2739 outputBuffers.resize(1);
2740 StreamBuffer& outputBuffer = outputBuffers[0];
2741 if (useHalBufManager) {
2742 bufferId = 0;
2743 outputBuffer = {halStreams[0].id, bufferId, NativeHandle(),
2744 BufferStatus::OK, NativeHandle(), NativeHandle()};
2745 } else {
2746 allocateGraphicBuffer(previewStream.width, previewStream.height,
2747 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2748 static_cast<uint64_t>(halStreams[0].producerUsage),
2749 static_cast<uint64_t>(halStreams[0].consumerUsage))),
2750 halStreams[0].overrideFormat, &buffer_handle);
2751 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2752 BufferStatus::OK, NativeHandle(), NativeHandle()};
2753 }
2754
2755 request.frameNumber = frameNumber;
2756 request.fmqSettingsSize = 0;
2757 request.settings = settings;
2758 request.inputBuffer = {
2759 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2760
2761 {
2762 std::unique_lock<std::mutex> l(mLock);
2763 mInflightMap.clear();
2764 mInflightMap[frameNumber] = inflightReq;
2765 }
2766
2767 int32_t numRequestProcessed = 0;
2768 std::vector<BufferCache> cachesToRemove;
2769 ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2770 ASSERT_TRUE(ret.isOk());
2771 ASSERT_EQ(numRequestProcessed, 1u);
2772
2773 // Flush before waiting for request to complete.
2774 ndk::ScopedAStatus returnStatus = mSession->flush();
2775 ASSERT_TRUE(returnStatus.isOk());
2776
2777 {
2778 std::unique_lock<std::mutex> l(mLock);
2779 while (!inflightReq->errorCodeValid &&
2780 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2781 auto timeout = std::chrono::system_clock::now() +
2782 std::chrono::seconds(kStreamBufferTimeoutSec);
2783 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2784 }
2785
2786 if (!inflightReq->errorCodeValid) {
2787 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2788 ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2789 } else {
2790 switch (inflightReq->errorCode) {
2791 case ErrorCode::ERROR_REQUEST:
2792 case ErrorCode::ERROR_RESULT:
2793 case ErrorCode::ERROR_BUFFER:
2794 // Expected
2795 break;
2796 case ErrorCode::ERROR_DEVICE:
2797 default:
2798 FAIL() << "Unexpected error:"
2799 << static_cast<uint32_t>(inflightReq->errorCode);
2800 }
2801 }
2802 }
2803
2804 if (useHalBufManager) {
2805 verifyBuffersReturned(mSession, previewStream.id, cb);
2806 }
2807
2808 ret = mSession->close();
2809 mSession = nullptr;
2810 ASSERT_TRUE(ret.isOk());
2811 }
2812 }
2813
2814 // Verify that camera flushes correctly without any pending requests.
TEST_P(CameraAidlTest,flushEmpty)2815 TEST_P(CameraAidlTest, flushEmpty) {
2816 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2817 std::vector<AvailableStream> outputPreviewStreams;
2818 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2819 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2820
2821 for (const auto& name : cameraDeviceNames) {
2822 Stream previewStream;
2823 std::vector<HalStream> halStreams;
2824 std::shared_ptr<DeviceCb> cb;
2825 bool supportsPartialResults = false;
2826 bool useHalBufManager = false;
2827
2828 int32_t partialResultCount = 0;
2829 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
2830 &previewStream /*out*/, &halStreams /*out*/,
2831 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2832 &useHalBufManager /*out*/, &cb /*out*/);
2833
2834 ndk::ScopedAStatus returnStatus = mSession->flush();
2835 ASSERT_TRUE(returnStatus.isOk());
2836
2837 {
2838 std::unique_lock<std::mutex> l(mLock);
2839 auto timeout = std::chrono::system_clock::now() +
2840 std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
2841 ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2842 }
2843
2844 ndk::ScopedAStatus ret = mSession->close();
2845 mSession = nullptr;
2846 ASSERT_TRUE(ret.isOk());
2847 }
2848 }
2849
2850 // Test camera provider notify method
TEST_P(CameraAidlTest,providerDeviceStateNotification)2851 TEST_P(CameraAidlTest, providerDeviceStateNotification) {
2852 notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED);
2853 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
2854 }
2855
2856 // Verify that all supported stream formats and sizes can be configured
2857 // successfully for injection camera.
TEST_P(CameraAidlTest,configureInjectionStreamsAvailableOutputs)2858 TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) {
2859 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2860 std::vector<AvailableStream> outputStreams;
2861
2862 for (const auto& name : cameraDeviceNames) {
2863 CameraMetadata metadata;
2864
2865 std::shared_ptr<ICameraInjectionSession> injectionSession;
2866 std::shared_ptr<ICameraDevice> unusedDevice;
2867 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2868 &unusedDevice /*out*/);
2869 if (injectionSession == nullptr) {
2870 continue;
2871 }
2872
2873 camera_metadata_t* staticMetaBuffer =
2874 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2875 CameraMetadata chars;
2876 chars.metadata = metadata.metadata;
2877
2878 outputStreams.clear();
2879 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2880 ASSERT_NE(0u, outputStreams.size());
2881
2882 int32_t jpegBufferSize = 0;
2883 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2884 ASSERT_NE(0u, jpegBufferSize);
2885
2886 int32_t streamId = 0;
2887 int32_t streamConfigCounter = 0;
2888 for (auto& it : outputStreams) {
2889 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format));
2890 Stream stream = {streamId,
2891 StreamType::OUTPUT,
2892 it.width,
2893 it.height,
2894 static_cast<PixelFormat>(it.format),
2895 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2896 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2897 dataspace,
2898 StreamRotation::ROTATION_0,
2899 std::string(),
2900 jpegBufferSize,
2901 0,
2902 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2903 RequestAvailableDynamicRangeProfilesMap::
2904 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2905
2906 std::vector<Stream> streams = {stream};
2907 StreamConfiguration config;
2908 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2909 jpegBufferSize);
2910
2911 config.streamConfigCounter = streamConfigCounter++;
2912 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2913 ASSERT_TRUE(s.isOk());
2914 streamId++;
2915 }
2916
2917 std::shared_ptr<ICameraDeviceSession> session;
2918 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2919 ASSERT_TRUE(ret.isOk());
2920 ASSERT_NE(session, nullptr);
2921 ret = session->close();
2922 ASSERT_TRUE(ret.isOk());
2923 }
2924 }
2925
2926 // Check for correct handling of invalid/incorrect configuration parameters for injection camera.
TEST_P(CameraAidlTest,configureInjectionStreamsInvalidOutputs)2927 TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) {
2928 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2929 std::vector<AvailableStream> outputStreams;
2930
2931 for (const auto& name : cameraDeviceNames) {
2932 CameraMetadata metadata;
2933 std::shared_ptr<ICameraInjectionSession> injectionSession;
2934 std::shared_ptr<ICameraDevice> unusedDevice;
2935 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
2936 &unusedDevice);
2937 if (injectionSession == nullptr) {
2938 continue;
2939 }
2940
2941 camera_metadata_t* staticMetaBuffer =
2942 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
2943 std::shared_ptr<ICameraDeviceSession> session;
2944 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
2945 ASSERT_TRUE(ret.isOk());
2946 ASSERT_NE(session, nullptr);
2947
2948 CameraMetadata chars;
2949 chars.metadata = metadata.metadata;
2950
2951 outputStreams.clear();
2952 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams));
2953 ASSERT_NE(0u, outputStreams.size());
2954
2955 int32_t jpegBufferSize = 0;
2956 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize));
2957 ASSERT_NE(0u, jpegBufferSize);
2958
2959 int32_t streamId = 0;
2960 Stream stream = {streamId++,
2961 StreamType::OUTPUT,
2962 0,
2963 0,
2964 static_cast<PixelFormat>(outputStreams[0].format),
2965 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2966 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2967 Dataspace::UNKNOWN,
2968 StreamRotation::ROTATION_0,
2969 std::string(),
2970 jpegBufferSize,
2971 0,
2972 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2973 RequestAvailableDynamicRangeProfilesMap::
2974 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2975
2976 int32_t streamConfigCounter = 0;
2977 std::vector<Stream> streams = {stream};
2978 StreamConfiguration config;
2979 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2980 jpegBufferSize);
2981
2982 config.streamConfigCounter = streamConfigCounter++;
2983 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
2984 ASSERT_TRUE(
2985 (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) ||
2986 (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError()));
2987
2988 stream = {streamId++,
2989 StreamType::OUTPUT,
2990 INT32_MAX,
2991 INT32_MAX,
2992 static_cast<PixelFormat>(outputStreams[0].format),
2993 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2994 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2995 Dataspace::UNKNOWN,
2996 StreamRotation::ROTATION_0,
2997 std::string(),
2998 jpegBufferSize,
2999 0,
3000 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3001 RequestAvailableDynamicRangeProfilesMap::
3002 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
3003
3004 streams[0] = stream;
3005 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
3006 jpegBufferSize);
3007 config.streamConfigCounter = streamConfigCounter++;
3008 s = injectionSession->configureInjectionStreams(config, chars);
3009 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
3010
3011 for (auto& it : outputStreams) {
3012 stream = {streamId++,
3013 StreamType::OUTPUT,
3014 it.width,
3015 it.height,
3016 static_cast<PixelFormat>(INT32_MAX),
3017 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3018 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3019 Dataspace::UNKNOWN,
3020 StreamRotation::ROTATION_0,
3021 std::string(),
3022 jpegBufferSize,
3023 0,
3024 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3025 RequestAvailableDynamicRangeProfilesMap::
3026 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
3027 streams[0] = stream;
3028 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
3029 jpegBufferSize);
3030 config.streamConfigCounter = streamConfigCounter++;
3031 s = injectionSession->configureInjectionStreams(config, chars);
3032 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
3033
3034 stream = {streamId++,
3035 StreamType::OUTPUT,
3036 it.width,
3037 it.height,
3038 static_cast<PixelFormat>(it.format),
3039 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3040 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3041 Dataspace::UNKNOWN,
3042 static_cast<StreamRotation>(INT32_MAX),
3043 std::string(),
3044 jpegBufferSize,
3045 0,
3046 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3047 RequestAvailableDynamicRangeProfilesMap::
3048 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
3049 streams[0] = stream;
3050 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
3051 jpegBufferSize);
3052 config.streamConfigCounter = streamConfigCounter++;
3053 s = injectionSession->configureInjectionStreams(config, chars);
3054 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError());
3055 }
3056
3057 ret = session->close();
3058 ASSERT_TRUE(ret.isOk());
3059 }
3060 }
3061
3062 // Check whether session parameters are supported for injection camera. If Hal support for them
3063 // exist, then try to configure a preview stream using them.
TEST_P(CameraAidlTest,configureInjectionStreamsWithSessionParameters)3064 TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) {
3065 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3066 std::vector<AvailableStream> outputPreviewStreams;
3067 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3068 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
3069
3070 for (const auto& name : cameraDeviceNames) {
3071 CameraMetadata metadata;
3072 std::shared_ptr<ICameraInjectionSession> injectionSession;
3073 std::shared_ptr<ICameraDevice> unusedDevice;
3074 openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/,
3075 &unusedDevice /*out*/);
3076 if (injectionSession == nullptr) {
3077 continue;
3078 }
3079
3080 std::shared_ptr<ICameraDeviceSession> session;
3081 ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session);
3082 ASSERT_TRUE(ret.isOk());
3083 ASSERT_NE(session, nullptr);
3084
3085 camera_metadata_t* staticMetaBuffer =
3086 reinterpret_cast<camera_metadata_t*>(metadata.metadata.data());
3087 CameraMetadata chars;
3088 chars.metadata = metadata.metadata;
3089
3090 std::unordered_set<int32_t> availableSessionKeys;
3091 Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
3092 &availableSessionKeys);
3093 ASSERT_EQ(Status::OK, rc);
3094 if (availableSessionKeys.empty()) {
3095 ret = session->close();
3096 ASSERT_TRUE(ret.isOk());
3097 continue;
3098 }
3099
3100 android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings;
3101 android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams,
3102 modifiedSessionParams;
3103 constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW,
3104 &previewRequestSettings, &sessionParams);
3105 if (sessionParams.isEmpty()) {
3106 ret = session->close();
3107 ASSERT_TRUE(ret.isOk());
3108 continue;
3109 }
3110
3111 outputPreviewStreams.clear();
3112
3113 ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams,
3114 &previewThreshold));
3115 ASSERT_NE(0u, outputPreviewStreams.size());
3116
3117 Stream previewStream = {
3118 0,
3119 StreamType::OUTPUT,
3120 outputPreviewStreams[0].width,
3121 outputPreviewStreams[0].height,
3122 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3123 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3124 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3125 Dataspace::UNKNOWN,
3126 StreamRotation::ROTATION_0,
3127 std::string(),
3128 0,
3129 -1,
3130 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3131 RequestAvailableDynamicRangeProfilesMap::
3132 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
3133 std::vector<Stream> streams = {previewStream};
3134 StreamConfiguration config;
3135 config.streams = streams;
3136 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3137
3138 modifiedSessionParams = sessionParams;
3139 camera_metadata_t* sessionParamsBuffer = sessionParams.release();
3140 uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer);
3141 config.sessionParams.metadata =
3142 std::vector(rawSessionParamsBuffer,
3143 rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer));
3144
3145 config.streamConfigCounter = 0;
3146 config.streamConfigCounter = 0;
3147 config.multiResolutionInputImage = false;
3148
3149 ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars);
3150 ASSERT_TRUE(s.isOk());
3151
3152 sessionParams.acquire(sessionParamsBuffer);
3153 free_camera_metadata(staticMetaBuffer);
3154 ret = session->close();
3155 ASSERT_TRUE(ret.isOk());
3156 }
3157 }
3158
TEST_P(CameraAidlTest,configureStreamsUseCasesCroppedRaw)3159 TEST_P(CameraAidlTest, configureStreamsUseCasesCroppedRaw) {
3160 AvailableStream rawStreamThreshold =
3161 {INT_MAX, INT_MAX, static_cast<int32_t>(PixelFormat::RAW16)};
3162 configureStreamUseCaseInternal(rawStreamThreshold);
3163 }
3164
3165 // Verify that valid stream use cases can be configured successfully, and invalid use cases
3166 // fail stream configuration.
TEST_P(CameraAidlTest,configureStreamsUseCases)3167 TEST_P(CameraAidlTest, configureStreamsUseCases) {
3168 AvailableStream previewStreamThreshold =
3169 {kMaxPreviewWidth, kMaxPreviewHeight, static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
3170 configureStreamUseCaseInternal(previewStreamThreshold);
3171 }
3172
3173 // Validate the integrity of stream configuration metadata
TEST_P(CameraAidlTest,validateStreamConfigurations)3174 TEST_P(CameraAidlTest, validateStreamConfigurations) {
3175 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3176 std::vector<AvailableStream> outputStreams;
3177
3178 const int32_t scalerSizesTag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
3179 const int32_t scalerMinFrameDurationsTag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
3180 const int32_t scalerStallDurationsTag = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
3181
3182 for (const auto& name : cameraDeviceNames) {
3183 CameraMetadata meta;
3184 std::shared_ptr<ICameraDevice> cameraDevice;
3185
3186 openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3187 &cameraDevice /*out*/);
3188 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3189
3190 if (is10BitDynamicRangeCapable(staticMeta)) {
3191 std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
3192
3193 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
3194 &supportedBlobSizes);
3195 getSupportedSizes(staticMeta, scalerSizesTag, HAL_PIXEL_FORMAT_YCBCR_P010,
3196 &supportedP010Sizes);
3197 ASSERT_FALSE(supportedP010Sizes.empty());
3198
3199 std::vector<int64_t> blobMinDurations, blobStallDurations;
3200 getSupportedDurations(staticMeta, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3201 supportedP010Sizes, &blobMinDurations);
3202 getSupportedDurations(staticMeta, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
3203 supportedP010Sizes, &blobStallDurations);
3204 ASSERT_FALSE(blobStallDurations.empty());
3205 ASSERT_FALSE(blobMinDurations.empty());
3206 ASSERT_EQ(supportedP010Sizes.size(), blobMinDurations.size());
3207 ASSERT_EQ(blobMinDurations.size(), blobStallDurations.size());
3208 }
3209
3210 // TODO (b/280887191): Validate other aspects of stream configuration metadata...
3211
3212 ndk::ScopedAStatus ret = mSession->close();
3213 mSession = nullptr;
3214 ASSERT_TRUE(ret.isOk());
3215 }
3216 }
3217
3218 GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest);
3219 INSTANTIATE_TEST_SUITE_P(
3220 PerInstance, CameraAidlTest,
3221 testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)),
3222 android::hardware::PrintInstanceNameToString);
3223