1 /* 2 * Copyright (C) 2023 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.DeviceAsWebcam; 18 19 import android.content.Context; 20 import android.graphics.Bitmap; 21 import android.graphics.BitmapFactory; 22 import android.graphics.Canvas; 23 import android.graphics.Matrix; 24 import android.graphics.Point; 25 import android.graphics.Rect; 26 import android.graphics.SurfaceTexture; 27 import android.hardware.HardwareBuffer; 28 import android.hardware.camera2.CameraAccessException; 29 import android.hardware.camera2.CameraCaptureSession; 30 import android.hardware.camera2.CameraCharacteristics; 31 import android.hardware.camera2.CameraDevice; 32 import android.hardware.camera2.CameraManager; 33 import android.hardware.camera2.CameraMetadata; 34 import android.hardware.camera2.CaptureRequest; 35 import android.hardware.camera2.params.MeteringRectangle; 36 import android.hardware.camera2.params.OutputConfiguration; 37 import android.hardware.camera2.params.SessionConfiguration; 38 import android.hardware.camera2.params.StreamConfigurationMap; 39 import android.hardware.display.DisplayManager; 40 import android.media.Image; 41 import android.media.ImageReader; 42 import android.media.ImageWriter; 43 import android.os.ConditionVariable; 44 import android.os.Handler; 45 import android.os.HandlerThread; 46 import android.util.ArrayMap; 47 import android.util.Log; 48 import android.util.Range; 49 import android.util.Rational; 50 import android.util.Size; 51 import android.view.Display; 52 import android.view.Surface; 53 54 import androidx.annotation.NonNull; 55 import androidx.annotation.Nullable; 56 57 import com.android.DeviceAsWebcam.utils.UserPrefs; 58 import com.android.deviceaswebcam.flags.Flags; 59 60 import java.lang.ref.WeakReference; 61 import java.nio.ByteBuffer; 62 import java.util.ArrayList; 63 import java.util.Arrays; 64 import java.util.Comparator; 65 import java.util.List; 66 import java.util.Objects; 67 import java.util.concurrent.ConcurrentHashMap; 68 import java.util.concurrent.Executor; 69 import java.util.concurrent.Executors; 70 import java.util.concurrent.ScheduledExecutorService; 71 import java.util.concurrent.TimeUnit; 72 import java.util.concurrent.atomic.AtomicBoolean; 73 import java.util.function.Consumer; 74 75 /** 76 * This class controls the operation of the camera - primarily through the public calls 77 * - startPreviewStreaming 78 * - startWebcamStreaming 79 * - stopPreviewStreaming 80 * - stopWebcamStreaming 81 * These calls do what they suggest - that is start / stop preview and webcam streams. They 82 * internally book-keep whether they need to start a preview stream alongside a webcam stream or 83 * by itself, and vice-versa. 84 * For the webcam stream, it delegates the job of interacting with the native service 85 * code - used for encoding ImageReader image callbacks, to the Foreground service (it stores a weak 86 * reference to the foreground service during construction). 87 */ 88 public class CameraController { 89 private static final String TAG = "CameraController"; 90 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 91 92 // Camera session state - when camera is actually being used 93 enum CameraStreamingState { 94 NO_STREAMING, 95 WEBCAM_STREAMING, 96 PREVIEW_STREAMING, 97 PREVIEW_AND_WEBCAM_STREAMING 98 }; 99 100 // Camera availability states 101 enum CameraAvailabilityState { 102 AVAILABLE, 103 UNAVAILABLE 104 }; 105 106 private static final int MAX_BUFFERS = 4; 107 // The ratio to the active array size that will be used to determine the metering rectangle 108 // size. 109 private static final float METERING_RECTANGLE_SIZE_RATIO = 0.15f; 110 111 @Nullable 112 private CameraId mBackCameraId = null; 113 @Nullable 114 private CameraId mFrontCameraId = null; 115 116 // Tracks if Webcam should drop performance optimizations to get the best quality. 117 private boolean mHighQualityModeEnabled = false; 118 119 private ImageReader mImgReader; 120 private Object mImgReaderLock = new Object(); 121 private ImageWriter mImageWriter; 122 123 // current camera session state 124 private CameraStreamingState mCurrentState = CameraStreamingState.NO_STREAMING; 125 126 // current camera availability state - to be accessed only from camera related callbacks which 127 // execute on mCameraCallbacksExecutor. This isn't a part of mCameraInfo since that is static 128 // information about a camera and has looser thread access requirements. 129 private ArrayMap<String, CameraAvailabilityState> mCameraAvailabilityState = new ArrayMap<>(); 130 131 private Context mContext; 132 private WeakReference<DeviceAsWebcamFgService> mServiceWeak; 133 private CaptureRequest.Builder mPreviewRequestBuilder; 134 private CameraManager mCameraManager; 135 private CameraDevice mCameraDevice; 136 private Handler mImageReaderHandler; 137 private Executor mCameraCallbacksExecutor; 138 private Executor mServiceEventsExecutor; 139 private SurfaceTexture mPreviewSurfaceTexture; 140 /** 141 * Registered by the Preview Activity, and called by CameraController when preview size changes 142 * as a result of the webcam stream changing. 143 */ 144 private Consumer<Size> mPreviewSizeChangeListener; 145 private Surface mPreviewSurface; 146 private Size mDisplaySize; 147 private Size mPreviewSize; 148 // Executor for ImageWriter thread - used when camera is evicted and webcam is streaming. 149 private ScheduledExecutorService mImageWriterEventsExecutor; 150 151 // This is set up only when we need to show the camera access blocked logo and reset 152 // when camera is available again - since its going to be a rare occurrence that camera is 153 // actually evicted when webcam is streaming. 154 private byte[] mCombinedBitmapBytes; 155 156 private OutputConfiguration mPreviewOutputConfiguration; 157 private OutputConfiguration mWebcamOutputConfiguration; 158 private List<OutputConfiguration> mOutputConfigurations; 159 private CameraCaptureSession mCaptureSession; 160 private ConditionVariable mReadyToStream = new ConditionVariable(); 161 private ConditionVariable mCaptureSessionReady = new ConditionVariable(); 162 private AtomicBoolean mStartCaptureWebcamStream = new AtomicBoolean(false); 163 private final Object mSerializationLock = new Object(); 164 // timestamp -> Image 165 private ConcurrentHashMap<Long, ImageAndBuffer> mImageMap = new ConcurrentHashMap<>(); 166 private List<CameraId> mAvailableCameraIds = new ArrayList<>(); 167 @Nullable 168 private CameraId mCameraId = null; 169 private ArrayMap<CameraId, CameraInfo> mCameraInfoMap = new ArrayMap<>(); 170 @Nullable 171 private float[] mTapToFocusPoints = null; 172 private static class StreamConfigs { StreamConfigs(boolean mjpegP, int widthP, int heightP, int fpsP)173 StreamConfigs(boolean mjpegP, int widthP, int heightP, int fpsP) { 174 isMjpeg = mjpegP; 175 width = widthP; 176 height = heightP; 177 fps = fpsP; 178 } 179 180 boolean isMjpeg; 181 int width; 182 int height; 183 int fps; 184 }; 185 private StreamConfigs mStreamConfigs; 186 private CameraDevice.StateCallback mCameraStateCallback = new CameraDevice.StateCallback() { 187 @Override 188 public void onOpened(@NonNull CameraDevice cameraDevice) { 189 if (VERBOSE) { 190 Log.v(TAG, "Camera device opened, creating capture session now"); 191 } 192 mCameraDevice = cameraDevice; 193 mReadyToStream.open(); 194 } 195 196 @Override 197 public void onDisconnected(CameraDevice cameraDevice) { 198 if (VERBOSE) { 199 Log.v(TAG, "onDisconnected: " + cameraDevice.getId() + 200 " camera available state " + 201 mCameraAvailabilityState.get(cameraDevice.getId())); 202 } 203 handleDisconnected(); 204 } 205 206 private void handleDisconnected() { 207 mServiceEventsExecutor.execute(() -> { 208 synchronized (mSerializationLock) { 209 mCameraDevice = null; 210 stopStreamingAltogetherLocked(/*closeImageReader*/false); 211 if (mStartCaptureWebcamStream.get()) { 212 startShowingCameraUnavailableLogo(); 213 } 214 } 215 }); 216 } 217 @Override 218 public void onError(@NonNull CameraDevice cameraDevice, int error) { 219 if (VERBOSE) { 220 Log.e(TAG, "Camera id " + cameraDevice.getId() + ": onError " + error); 221 } 222 mReadyToStream.open(); 223 if (mStartCaptureWebcamStream.get()) { 224 startShowingCameraUnavailableLogo(); 225 } 226 } 227 }; 228 private CameraCaptureSession.CaptureCallback mCaptureCallback = 229 new CameraCaptureSession.CaptureCallback() {}; 230 231 private CameraCaptureSession.StateCallback mCameraCaptureSessionCallback = 232 new CameraCaptureSession.StateCallback() { 233 @Override 234 public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { 235 if (mCameraDevice == null) { 236 return; 237 } 238 mCaptureSession = cameraCaptureSession; 239 try { 240 mCaptureSession.setSingleRepeatingRequest( 241 mPreviewRequestBuilder.build(), mCameraCallbacksExecutor, 242 mCaptureCallback); 243 } catch (CameraAccessException e) { 244 Log.e(TAG, "setSingleRepeatingRequest failed", e); 245 } 246 mCaptureSessionReady.open(); 247 } 248 249 @Override 250 public void onConfigureFailed(@NonNull CameraCaptureSession captureSession) { 251 Log.e(TAG, "Failed to configure CameraCaptureSession"); 252 } 253 }; 254 255 private CameraManager.AvailabilityCallback mCameraAvailabilityCallbacks = 256 new CameraManager.AvailabilityCallback() { 257 @Override 258 public void onCameraAvailable(String cameraId) { 259 mCameraAvailabilityState.put(cameraId, CameraAvailabilityState.AVAILABLE); 260 if (VERBOSE) { 261 Log.v(TAG, "onCameraAvailable: " + cameraId); 262 } 263 // We want to attempt to start webcam streaming when : 264 // webcam was already streaming and the camera that was streaming became available. 265 // The attempt to start streaming the camera may succeed or fail. If it fails, 266 // (for example: if the camera is available but another client is using a camera which 267 // cannot be opened concurrently with mCameraId), it'll be handled by the onError 268 // callback. 269 if (mStartCaptureWebcamStream.get() && 270 mCameraAvailabilityState.get(mCameraId.mainCameraId) == 271 CameraAvailabilityState.AVAILABLE) { 272 if (VERBOSE) { 273 Log.v(TAG, "Camera available : try starting webcam stream for camera id " 274 + mCameraId.mainCameraId); 275 } 276 handleOnCameraAvailable(); 277 } 278 279 } 280 281 @Override 282 public void onCameraUnavailable(String cameraId) { 283 // We're unconditionally waiting for available - mStartCaptureWebcamStream will decide 284 // whether we need to do anything about it. 285 if (VERBOSE) { 286 Log.v(TAG, "Camera id " + cameraId + " unavailable"); 287 } 288 mCameraAvailabilityState.put(cameraId, CameraAvailabilityState.UNAVAILABLE); 289 } 290 }; 291 292 private ImageReader.OnImageAvailableListener mOnImageAvailableListener = 293 new ImageReader.OnImageAvailableListener() { 294 @Override 295 public void onImageAvailable(ImageReader reader) { 296 Image image; 297 HardwareBuffer hardwareBuffer; 298 long ts; 299 DeviceAsWebcamFgService service = mServiceWeak.get(); 300 synchronized (mImgReaderLock) { 301 if (reader != mImgReader) { 302 return; 303 } 304 if (service == null) { 305 Log.e(TAG, "Service is dead, what ?"); 306 return; 307 } 308 if (mImageMap.size() >= MAX_BUFFERS) { 309 Log.w(TAG, "Too many buffers acquired in onImageAvailable, returning"); 310 return; 311 } 312 // Get native HardwareBuffer from the next image (we should never 313 // accumulate images since we're not doing any compute work on the 314 // imageReader thread) and 315 // send it to the native layer for the encoder to process. 316 // Acquire latest Image and get the HardwareBuffer 317 image = reader.acquireNextImage(); 318 if (VERBOSE) { 319 Log.v(TAG, "Got acquired Image in onImageAvailable callback for reader " 320 + reader); 321 } 322 if (image == null) { 323 if (VERBOSE) { 324 Log.e(TAG, "More images than MAX acquired ?"); 325 } 326 return; 327 } 328 ts = image.getTimestamp(); 329 hardwareBuffer = image.getHardwareBuffer(); 330 } 331 mImageMap.put(ts, new ImageAndBuffer(image, hardwareBuffer)); 332 // Callback into DeviceAsWebcamFgService to encode image 333 if ((!mStartCaptureWebcamStream.get()) || (service.nativeEncodeImage( 334 hardwareBuffer, ts, getCurrentRotation()) != 0)) { 335 if (VERBOSE) { 336 Log.v(TAG, 337 "Couldn't get buffer immediately, returning image images. " 338 + "acquired size " 339 + mImageMap.size()); 340 } 341 returnImage(ts); 342 } 343 } 344 }; 345 346 private volatile float mZoomRatio; 347 private RotationProvider mRotationProvider; 348 private RotationUpdateListener mRotationUpdateListener = null; 349 private CameraInfo mCameraInfo = null; 350 private UserPrefs mUserPrefs; 351 VendorCameraPrefs mRroCameraInfo; 352 CameraController(Context context, WeakReference<DeviceAsWebcamFgService> serviceWeak)353 public CameraController(Context context, WeakReference<DeviceAsWebcamFgService> serviceWeak) { 354 mContext = context; 355 mServiceWeak = serviceWeak; 356 if (mContext == null) { 357 Log.e(TAG, "Application context is null!, something is going to go wrong"); 358 return; 359 } 360 startBackgroundThread(); 361 mCameraManager = mContext.getSystemService(CameraManager.class); 362 mDisplaySize = getDisplayPreviewSize(); 363 mCameraManager.registerAvailabilityCallback( 364 mCameraCallbacksExecutor, mCameraAvailabilityCallbacks); 365 mUserPrefs = new UserPrefs(mContext); 366 mHighQualityModeEnabled = Flags.highQualityToggle() && 367 mUserPrefs.fetchHighQualityModeEnabled(/*defaultValue*/ false); 368 mRroCameraInfo = createVendorCameraPrefs(mHighQualityModeEnabled); 369 refreshAvailableCameraIdList(); 370 refreshLensFacingCameraIds(); 371 372 mCameraId = fetchCameraIdFromUserPrefs(/*defaultCameraId*/ mBackCameraId); 373 mCameraInfo = getOrCreateCameraInfo(mCameraId); 374 mZoomRatio = mUserPrefs.fetchZoomRatio(mCameraId.toString(), /*defaultZoom*/ 1.0f); 375 376 mRotationProvider = new RotationProvider(context.getApplicationContext(), 377 mCameraInfo.getSensorOrientation(), mCameraInfo.getLensFacing()); 378 // Adds a listener to enable the RotationProvider so that we can get the rotation 379 // degrees info to rotate the webcam stream images. 380 mRotationProvider.addListener(mCameraCallbacksExecutor, rotation -> { 381 if (mRotationUpdateListener != null) { 382 mRotationUpdateListener.onRotationUpdated(rotation); 383 } 384 }); 385 } 386 387 @Nullable fetchCameraIdFromUserPrefs(@ullable CameraId defaultCameraId)388 private CameraId fetchCameraIdFromUserPrefs(@Nullable CameraId defaultCameraId) { 389 String cameraIdString = mUserPrefs.fetchCameraId(null); 390 CameraId cameraId = convertAndValidateCameraIdString(cameraIdString); 391 return cameraId != null ? cameraId : defaultCameraId; 392 } 393 394 @Nullable fetchBackCameraIdFromUserPrefs(@ullable CameraId defaultCameraId)395 private CameraId fetchBackCameraIdFromUserPrefs(@Nullable CameraId defaultCameraId) { 396 String cameraIdString = mUserPrefs.fetchBackCameraId(null); 397 CameraId cameraId = convertAndValidateCameraIdString(cameraIdString); 398 return cameraId != null ? cameraId : defaultCameraId; 399 } 400 401 @Nullable fetchFrontCameraIdFromUserPrefs(@ullable CameraId defaultCameraId)402 private CameraId fetchFrontCameraIdFromUserPrefs(@Nullable CameraId defaultCameraId) { 403 String cameraIdString = mUserPrefs.fetchFrontCameraId(null); 404 CameraId cameraId = convertAndValidateCameraIdString(cameraIdString); 405 return cameraId != null ? cameraId : defaultCameraId; 406 } 407 408 /** 409 * Converts the camera id string to {@link CameraId} and returns it only when it is includes in 410 * the available camera id list. 411 */ 412 @Nullable convertAndValidateCameraIdString(@ullable String cameraIdString)413 private CameraId convertAndValidateCameraIdString(@Nullable String cameraIdString) { 414 CameraId cameraId = CameraId.fromCameraIdString(cameraIdString); 415 if (cameraId != null && !mAvailableCameraIds.contains(cameraId)) { 416 cameraId = null; 417 } 418 return cameraId; 419 } 420 convertARGBToRGBA(ByteBuffer argb)421 private void convertARGBToRGBA(ByteBuffer argb) { 422 // Android Bitmap.Config.ARGB_8888 is laid out as RGBA in an int and java ByteBuffer by 423 // default is big endian. 424 for (int i = 0; i < argb.capacity(); i+= 4) { 425 byte r = argb.get(i); 426 byte g = argb.get(i + 1); 427 byte b = argb.get(i + 2); 428 byte a = argb.get(i + 3); 429 430 //libyuv expects BGRA 431 argb.put(i, b); 432 argb.put(i + 1, g); 433 argb.put(i + 2, r); 434 argb.put(i + 3, a); 435 } 436 } 437 setupBitmaps(int width, int height)438 private void setupBitmaps(int width, int height) { 439 // Initialize logoBitmap. Should fit 'in' enclosed by any webcam stream 440 BitmapFactory.Options options = new BitmapFactory.Options(); 441 options.inPreferredConfig = Bitmap.Config.ARGB_8888; 442 // We want 1/2 of the screen being covered by the camera blocked logo 443 Bitmap logoBitmap = 444 BitmapFactory.decodeResource(mContext.getResources(), 445 R.drawable.camera_access_blocked, options); 446 int scaledWidth, scaledHeight; 447 if (logoBitmap.getWidth() > logoBitmap.getHeight()) { 448 scaledWidth = (int)(0.5 * width); 449 scaledHeight = 450 (int)(scaledWidth * (float)logoBitmap.getHeight() / logoBitmap.getWidth()); 451 } else { 452 scaledHeight = (int)(0.5 * height); 453 scaledWidth = 454 (int)(scaledHeight * (float)logoBitmap.getWidth() / logoBitmap.getHeight()); 455 } 456 // Combined Bitmap which will hold background + camera access blocked image 457 Bitmap combinedBitmap = 458 Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 459 Canvas canvas = new Canvas(combinedBitmap); 460 // Offsets to start composed image from 461 int offsetX = (width - scaledWidth) / 2; 462 int offsetY = (height - scaledHeight)/ 2; 463 int endX = offsetX + scaledWidth; 464 int endY = offsetY + scaledHeight; 465 canvas.drawBitmap(logoBitmap, 466 new Rect(0, 0, logoBitmap.getWidth(), logoBitmap.getHeight()), 467 new Rect(offsetX, offsetY, endX, endY), null); 468 ByteBuffer byteBuffer = ByteBuffer.allocate(combinedBitmap.getByteCount()); 469 combinedBitmap.copyPixelsToBuffer(byteBuffer); 470 convertARGBToRGBA(byteBuffer); 471 mCombinedBitmapBytes = byteBuffer.array(); 472 } 473 refreshAvailableCameraIdList()474 private void refreshAvailableCameraIdList() { 475 mAvailableCameraIds.clear(); 476 String[] cameraIdList; 477 try { 478 cameraIdList = mCameraManager.getCameraIdList(); 479 } catch (CameraAccessException e) { 480 Log.e(TAG, "Failed to retrieve the camera id list from CameraManager!", e); 481 return; 482 } 483 484 List<String> ignoredCameraList = mRroCameraInfo.getIgnoredCameraList(); 485 486 for (String cameraId : cameraIdList) { 487 // Skips the ignored cameras 488 if (ignoredCameraList.contains(cameraId)) { 489 continue; 490 } 491 492 CameraCharacteristics characteristics = getCameraCharacteristicsOrNull(cameraId); 493 494 if (characteristics == null) { 495 continue; 496 } 497 498 // Only lists backward compatible cameras 499 if (!isBackwardCompatible(characteristics)) { 500 continue; 501 } 502 503 List<VendorCameraPrefs.PhysicalCameraInfo> physicalCameraInfos = 504 mRroCameraInfo.getPhysicalCameraInfos(cameraId); 505 506 if (physicalCameraInfos == null || physicalCameraInfos.isEmpty()) { 507 mAvailableCameraIds.add(new CameraId(cameraId, null)); 508 continue; 509 } 510 511 for (VendorCameraPrefs.PhysicalCameraInfo physicalCameraInfo : 512 physicalCameraInfos) { 513 // Only lists backward compatible cameras 514 CameraCharacteristics physChars = getCameraCharacteristicsOrNull( 515 physicalCameraInfo.physicalCameraId); 516 if (isBackwardCompatible(physChars)) { 517 mAvailableCameraIds.add( 518 new CameraId(cameraId, physicalCameraInfo.physicalCameraId)); 519 } 520 } 521 } 522 } 523 refreshLensFacingCameraIds()524 private void refreshLensFacingCameraIds() { 525 // Loads the default back and front camera from the user prefs. 526 mBackCameraId = fetchBackCameraIdFromUserPrefs(null); 527 mFrontCameraId = fetchFrontCameraIdFromUserPrefs(null); 528 529 if (mBackCameraId != null && mFrontCameraId != null) { 530 return; 531 } 532 533 for (CameraId cameraId : mAvailableCameraIds) { 534 CameraCharacteristics characteristics = getCameraCharacteristicsOrNull( 535 cameraId.mainCameraId); 536 if (characteristics == null) { 537 continue; 538 } 539 540 Integer lensFacing = getCameraCharacteristic(characteristics, 541 CameraCharacteristics.LENS_FACING); 542 if (lensFacing == null) { 543 continue; 544 } 545 if (mBackCameraId == null && lensFacing == CameraMetadata.LENS_FACING_BACK) { 546 mBackCameraId = cameraId; 547 } else if (mFrontCameraId == null 548 && lensFacing == CameraMetadata.LENS_FACING_FRONT) { 549 mFrontCameraId = cameraId; 550 } 551 } 552 } 553 554 /** 555 * Returns the available {@link CameraId} list. 556 */ getAvailableCameraIds()557 public List<CameraId> getAvailableCameraIds() { 558 return mAvailableCameraIds; 559 } 560 getOrCreateCameraInfo(CameraId cameraId)561 public CameraInfo getOrCreateCameraInfo(CameraId cameraId) { 562 CameraInfo cameraInfo = mCameraInfoMap.get(cameraId); 563 if (cameraInfo != null) { 564 return cameraInfo; 565 } 566 567 cameraInfo = createCameraInfo(cameraId); 568 mCameraInfoMap.put(cameraId, cameraInfo); 569 return cameraInfo; 570 } 571 createCameraInfo(CameraId cameraId)572 private CameraInfo createCameraInfo(CameraId cameraId) { 573 CameraCharacteristics chars = getCameraCharacteristicsOrNull(cameraId.mainCameraId); 574 CameraCharacteristics physicalChars = getCameraCharacteristicsOrNull( 575 cameraId.physicalCameraId != null ? cameraId.physicalCameraId 576 : cameraId.mainCameraId); 577 // Retrieves the physical camera zoom ratio range from the vendor camera prefs. 578 Range<Float> zoomRatioRange = mRroCameraInfo.getPhysicalCameraZoomRatioRange(cameraId); 579 // Retrieves the physical camera zoom ratio range if no custom data is found. 580 if (zoomRatioRange == null) { 581 zoomRatioRange = getCameraCharacteristic(physicalChars, 582 CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); 583 } 584 585 // Logical cameras will be STANDARD category by default. For physical cameras, their 586 // categories should be specified by the vendor. If the category is not provided, use 587 // focal lengths to determine the physical camera's category. 588 CameraCategory cameraCategory = CameraCategory.STANDARD; 589 if (cameraId.physicalCameraId != null) { 590 cameraCategory = mRroCameraInfo.getCameraCategory(cameraId); 591 if (cameraCategory == CameraCategory.UNKNOWN) { 592 if (physicalChars != null) { 593 cameraCategory = calculateCameraCategoryByFocalLengths(physicalChars); 594 } 595 } 596 } 597 // We should consider using a builder pattern here if the parameters grow a lot. 598 return new CameraInfo( 599 new CameraId(cameraId.mainCameraId, cameraId.physicalCameraId), 600 getCameraCharacteristic(chars, CameraCharacteristics.LENS_FACING), 601 getCameraCharacteristic(chars, CameraCharacteristics.SENSOR_ORIENTATION), 602 zoomRatioRange, 603 getCameraCharacteristic(chars, 604 CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE), 605 isFacePrioritySupported(chars), 606 isStreamUseCaseSupported(chars), 607 cameraCategory 608 ); 609 } 610 calculateCameraCategoryByFocalLengths( CameraCharacteristics characteristics)611 private CameraCategory calculateCameraCategoryByFocalLengths( 612 CameraCharacteristics characteristics) { 613 float[] focalLengths = characteristics.get( 614 CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS); 615 616 if (focalLengths == null) { 617 return CameraCategory.UNKNOWN; 618 } 619 620 final int standardCamera = 0x1; 621 final int telephotoCamera = 0x2; 622 final int wideAngleCamera = 0x4; 623 final int ultraWideCamera = 0x8; 624 625 int cameraCategory = 0; 626 627 for (float focalLength : focalLengths) { 628 if (focalLength >= 50) { 629 cameraCategory |= telephotoCamera; 630 } else if (focalLength >= 30) { 631 cameraCategory |= standardCamera; 632 } else if (focalLength >= 20) { 633 cameraCategory |= wideAngleCamera; 634 } else { 635 cameraCategory |= ultraWideCamera; 636 } 637 } 638 639 return switch (cameraCategory) { 640 case telephotoCamera -> CameraCategory.TELEPHOTO; 641 case wideAngleCamera -> CameraCategory.WIDE_ANGLE; 642 case ultraWideCamera -> CameraCategory.ULTRA_WIDE; 643 default -> CameraCategory.STANDARD; 644 }; 645 } 646 647 @Nullable getCameraCharacteristic(CameraCharacteristics chars, CameraCharacteristics.Key<T> key)648 private static <T> T getCameraCharacteristic(CameraCharacteristics chars, 649 CameraCharacteristics.Key<T> key) { 650 return chars.get(key); 651 } 652 653 @Nullable getCameraCharacteristicsOrNull(String cameraId)654 private CameraCharacteristics getCameraCharacteristicsOrNull(String cameraId) { 655 try { 656 CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics( 657 cameraId); 658 return characteristics; 659 } catch (CameraAccessException e) { 660 Log.e(TAG, "Failed to get characteristics for camera " + cameraId 661 + ".", e); 662 } 663 return null; 664 } 665 666 @Nullable getCameraCharacteristic(String cameraId, CameraCharacteristics.Key<T> key)667 private <T> T getCameraCharacteristic(String cameraId, CameraCharacteristics.Key<T> key) { 668 CameraCharacteristics chars = getCameraCharacteristicsOrNull(cameraId); 669 if (chars != null) { 670 return chars.get(key); 671 } 672 return null; 673 } 674 setWebcamStreamConfig(boolean mjpeg, int width, int height, int fps)675 public void setWebcamStreamConfig(boolean mjpeg, int width, int height, int fps) { 676 if (VERBOSE) { 677 Log.v(TAG, "Set stream config service : mjpeg ? " + mjpeg + " width" + width + 678 " height " + height + " fps " + fps); 679 } 680 synchronized (mSerializationLock) { 681 long usage = HardwareBuffer.USAGE_CPU_READ_OFTEN | HardwareBuffer.USAGE_VIDEO_ENCODE; 682 mStreamConfigs = new StreamConfigs(mjpeg, width, height, fps); 683 synchronized (mImgReaderLock) { 684 if (mImgReader != null) { 685 mImgReader.close(); 686 } 687 mImgReader = new ImageReader.Builder(width, height) 688 .setMaxImages(MAX_BUFFERS) 689 .setDefaultHardwareBufferFormat(HardwareBuffer.YCBCR_420_888) 690 .setUsage(usage) 691 .build(); 692 mImgReader.setOnImageAvailableListener(mOnImageAvailableListener, 693 mImageReaderHandler); 694 } 695 } 696 } 697 fillImageWithCameraAccessBlockedLogo(Image img)698 private void fillImageWithCameraAccessBlockedLogo(Image img) { 699 Image.Plane[] planes = img.getPlanes(); 700 701 ByteBuffer rgbaBuffer = planes[0].getBuffer(); 702 // Copy the bitmap array 703 rgbaBuffer.put(mCombinedBitmapBytes); 704 } 705 handleOnCameraAvailable()706 private void handleOnCameraAvailable() { 707 // Offload to mServiceEventsExecutor since any camera operations which require 708 // mSerializationLock should be performed on mServiceEventsExecutor thread. 709 mServiceEventsExecutor.execute(() -> { 710 synchronized (mSerializationLock) { 711 if (mCameraDevice != null) { 712 return; 713 } 714 stopShowingCameraUnavailableLogo(); 715 setWebcamStreamConfig(mStreamConfigs.isMjpeg, mStreamConfigs.width, 716 mStreamConfigs.height, mStreamConfigs.fps); 717 startWebcamStreamingNoOffload(); 718 } 719 }); 720 } 721 722 /** 723 * Stops showing the camera unavailable logo. Should only be called on the 724 * mServiceEventsExecutor thread 725 */ stopShowingCameraUnavailableLogo()726 private void stopShowingCameraUnavailableLogo() { 727 // destroy the executor since camera getting evicted would be a rare occurrence 728 synchronized (mSerializationLock) { 729 if (mImageWriterEventsExecutor != null) { 730 mImageWriterEventsExecutor.shutdown(); 731 } 732 mImageWriterEventsExecutor = null; 733 mImageWriter = null; 734 mCombinedBitmapBytes = null; 735 } 736 } 737 startShowingCameraUnavailableLogo()738 private void startShowingCameraUnavailableLogo() { 739 mServiceEventsExecutor.execute(() -> { 740 startShowingCameraUnavailableLogoNoOffload(); 741 }); 742 } 743 744 /** 745 * Starts showing the camera unavailable logo. Should only be called on the 746 * mServiceEventsExecutor thread 747 */ startShowingCameraUnavailableLogoNoOffload()748 private void startShowingCameraUnavailableLogoNoOffload() { 749 synchronized (mSerializationLock) { 750 setupBitmaps(mStreamConfigs.width, mStreamConfigs.height); 751 long usage = HardwareBuffer.USAGE_CPU_READ_OFTEN; 752 synchronized (mImgReaderLock) { 753 if (mImgReader != null) { 754 mImgReader.close(); 755 } 756 mImgReader = new ImageReader.Builder( 757 mStreamConfigs.width, mStreamConfigs.height) 758 .setMaxImages(MAX_BUFFERS) 759 .setDefaultHardwareBufferFormat(HardwareBuffer.RGBA_8888) 760 .setUsage(usage) 761 .build(); 762 763 mImgReader.setOnImageAvailableListener(mOnImageAvailableListener, 764 mImageReaderHandler); 765 } 766 mImageWriter = ImageWriter.newInstance(mImgReader.getSurface(), MAX_BUFFERS); 767 // In effect, the webcam stream has started 768 mImageWriterEventsExecutor = Executors.newScheduledThreadPool(1); 769 mImageWriterEventsExecutor.scheduleAtFixedRate(new Runnable() { 770 @Override 771 public void run() { 772 Image img = mImageWriter.dequeueInputImage(); 773 // Fill in image 774 fillImageWithCameraAccessBlockedLogo(img); 775 mImageWriter.queueInputImage(img); 776 } 777 }, /*initialDelay*/0, /*fps period ms*/1000 / mStreamConfigs.fps, 778 TimeUnit.MILLISECONDS); 779 } 780 } 781 782 /** 783 * Must be called with mSerializationLock held on mServiceExecutor thread. 784 */ openCameraBlocking()785 private void openCameraBlocking() { 786 if (mCameraManager == null) { 787 Log.e(TAG, "CameraManager is not initialized, aborting"); 788 return; 789 } 790 if (mCameraId == null) { 791 Log.e(TAG, "No camera is found on the device, aborting"); 792 return; 793 } 794 if (mCameraDevice != null) { 795 mCameraDevice.close(); 796 mCameraDevice = null; 797 } 798 try { 799 mCameraManager.openCamera(mCameraId.mainCameraId, mCameraCallbacksExecutor, 800 mCameraStateCallback); 801 } catch (CameraAccessException e) { 802 Log.e(TAG, "openCamera failed for cameraId : " + mCameraId.mainCameraId, e); 803 startShowingCameraUnavailableLogo(); 804 } 805 mReadyToStream.block(); 806 mReadyToStream.close(); 807 } 808 setupPreviewOnlyStreamLocked(SurfaceTexture previewSurfaceTexture)809 private void setupPreviewOnlyStreamLocked(SurfaceTexture previewSurfaceTexture) { 810 setupPreviewOnlyStreamLocked(new Surface(previewSurfaceTexture)); 811 } 812 setupPreviewOnlyStreamLocked(Surface previewSurface)813 private void setupPreviewOnlyStreamLocked(Surface previewSurface) { 814 mPreviewSurface = previewSurface; 815 openCameraBlocking(); 816 mPreviewRequestBuilder = createInitialPreviewRequestBuilder(mPreviewSurface); 817 if (mPreviewRequestBuilder == null) { 818 return; 819 } 820 mPreviewOutputConfiguration = new OutputConfiguration(mPreviewSurface); 821 if (mCameraInfo.isStreamUseCaseSupported() && shouldUseStreamUseCase()) { 822 mPreviewOutputConfiguration.setStreamUseCase( 823 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW); 824 } 825 826 // So that we don't have to reconfigure if / when the preview activity is turned off / 827 // on again. 828 mWebcamOutputConfiguration = null; 829 mOutputConfigurations = Arrays.asList(mPreviewOutputConfiguration); 830 mCurrentState = CameraStreamingState.PREVIEW_STREAMING; 831 createCaptureSessionBlocking(); 832 } 833 createInitialPreviewRequestBuilder(Surface targetSurface)834 private CaptureRequest.Builder createInitialPreviewRequestBuilder(Surface targetSurface) { 835 CaptureRequest.Builder captureRequestBuilder; 836 try { 837 captureRequestBuilder = 838 mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 839 } catch (CameraAccessException e) { 840 Log.e(TAG, "createCaptureRequest failed", e); 841 stopStreamingAltogetherLocked(); 842 startShowingCameraUnavailableLogoNoOffload(); 843 return null; 844 } 845 846 int currentFps = 30; 847 if (mStreamConfigs != null) { 848 currentFps = mStreamConfigs.fps; 849 } 850 Range<Integer> fpsRange; 851 if (currentFps != 0) { 852 fpsRange = new Range<>(currentFps, currentFps); 853 } else { 854 fpsRange = new Range<>(30, 30); 855 } 856 captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); 857 captureRequestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, mZoomRatio); 858 captureRequestBuilder.addTarget(targetSurface); 859 captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, 860 CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); 861 if (mCameraInfo.isFacePrioritySupported()) { 862 captureRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, 863 CaptureRequest.CONTROL_SCENE_MODE_FACE_PRIORITY); 864 } 865 866 return captureRequestBuilder; 867 } 868 checkArrayContains(@ullable int[] array, int value)869 private static boolean checkArrayContains(@Nullable int[] array, int value) { 870 if (array == null) { 871 return false; 872 } 873 for (int val : array) { 874 if (val == value) { 875 return true; 876 } 877 } 878 879 return false; 880 } 881 isBackwardCompatible(CameraCharacteristics chars)882 private static boolean isBackwardCompatible(CameraCharacteristics chars) { 883 int[] availableCapabilities = getCameraCharacteristic(chars, 884 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 885 return checkArrayContains(availableCapabilities, 886 CaptureRequest.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE); 887 } 888 isFacePrioritySupported(CameraCharacteristics chars)889 private static boolean isFacePrioritySupported(CameraCharacteristics chars) { 890 int[] availableSceneModes = getCameraCharacteristic(chars, 891 CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES); 892 return checkArrayContains( 893 availableSceneModes, CaptureRequest.CONTROL_SCENE_MODE_FACE_PRIORITY); 894 } 895 isStreamUseCaseSupported(CameraCharacteristics chars)896 private static boolean isStreamUseCaseSupported(CameraCharacteristics chars) { 897 int[] caps = getCameraCharacteristic(chars, 898 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 899 return checkArrayContains( 900 caps, CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE); 901 } 902 903 // CameraManager which populates the mandatory streams uses the same computation. getDisplayPreviewSize()904 private Size getDisplayPreviewSize() { 905 Size ret = new Size(1920, 1080); 906 DisplayManager displayManager = 907 mContext.getSystemService(DisplayManager.class); 908 Display display = displayManager.getDisplay(Display.DEFAULT_DISPLAY); 909 if (display != null) { 910 Point sz = new Point(); 911 display.getRealSize(sz); 912 int width = sz.x; 913 int height = sz.y; 914 915 if (height > width) { 916 height = width; 917 width = sz.y; 918 } 919 ret = new Size(width, height); 920 } else { 921 Log.e(TAG, "Invalid default display!"); 922 } 923 return ret; 924 } 925 926 // Check whether we satisfy mandatory stream combinations for stream use use case shouldUseStreamUseCase()927 private boolean shouldUseStreamUseCase() { 928 if (mHighQualityModeEnabled) { 929 // Do not use streamusecase if high quality mode is enabled. 930 return false; 931 } 932 // Webcam stream - YUV should be <= 1440p 933 // Preview stream should be <= PREVIEW - which is already guaranteed by 934 // getSuitablePreviewSize() 935 if (mWebcamOutputConfiguration != null && mStreamConfigs != null && 936 (mStreamConfigs.width * mStreamConfigs.height) > (1920 * 1440)) { 937 return false; 938 } 939 return true; 940 } 941 setupPreviewStreamAlongsideWebcamStreamLocked( SurfaceTexture previewSurfaceTexture)942 private void setupPreviewStreamAlongsideWebcamStreamLocked( 943 SurfaceTexture previewSurfaceTexture) { 944 setupPreviewStreamAlongsideWebcamStreamLocked(new Surface(previewSurfaceTexture)); 945 } 946 setupPreviewStreamAlongsideWebcamStreamLocked(Surface previewSurface)947 private void setupPreviewStreamAlongsideWebcamStreamLocked(Surface previewSurface) { 948 if (VERBOSE) { 949 Log.v(TAG, "setupPreviewAlongsideWebcam"); 950 } 951 mPreviewSurface = previewSurface; 952 mPreviewOutputConfiguration = new OutputConfiguration(mPreviewSurface); 953 if (mCameraInfo.isStreamUseCaseSupported() && shouldUseStreamUseCase()) { 954 mPreviewOutputConfiguration.setStreamUseCase( 955 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW); 956 } 957 958 mPreviewRequestBuilder.addTarget(mPreviewSurface); 959 mOutputConfigurations = Arrays.asList(mPreviewOutputConfiguration, 960 mWebcamOutputConfiguration); 961 962 mCurrentState = CameraStreamingState.PREVIEW_AND_WEBCAM_STREAMING; 963 createCaptureSessionBlocking(); 964 } 965 startPreviewStreaming(SurfaceTexture surfaceTexture, Size previewSize, Consumer<Size> previewSizeChangeListener)966 public void startPreviewStreaming(SurfaceTexture surfaceTexture, Size previewSize, 967 Consumer<Size> previewSizeChangeListener) { 968 // Started on a background thread since we don't want to be blocking either the activity's 969 // or the service's main thread (we call blocking camera open in these methods internally) 970 mServiceEventsExecutor.execute(new Runnable() { 971 @Override 972 public void run() { 973 synchronized (mSerializationLock) { 974 mPreviewSurfaceTexture = surfaceTexture; 975 mPreviewSize = previewSize; 976 mPreviewSizeChangeListener = previewSizeChangeListener; 977 switch (mCurrentState) { 978 case NO_STREAMING: 979 setupPreviewOnlyStreamLocked(surfaceTexture); 980 break; 981 case WEBCAM_STREAMING: 982 setupPreviewStreamAlongsideWebcamStreamLocked(surfaceTexture); 983 break; 984 case PREVIEW_STREAMING: 985 case PREVIEW_AND_WEBCAM_STREAMING: 986 Log.e(TAG, "Incorrect current state for startPreviewStreaming " + 987 mCurrentState); 988 } 989 } 990 } 991 }); 992 } 993 setupWebcamOnlyStreamAndOpenCameraLocked()994 private void setupWebcamOnlyStreamAndOpenCameraLocked() { 995 // Setup outputs 996 if (VERBOSE) { 997 Log.v(TAG, "setupWebcamOnly"); 998 } 999 Surface surface = mImgReader.getSurface(); 1000 openCameraBlocking(); 1001 mCurrentState = CameraStreamingState.WEBCAM_STREAMING; 1002 if (mCameraDevice != null) { 1003 mPreviewRequestBuilder = createInitialPreviewRequestBuilder(surface); 1004 if (mPreviewRequestBuilder == null) { 1005 Log.e(TAG, "Failed to create the webcam stream."); 1006 return; 1007 } 1008 mWebcamOutputConfiguration = new OutputConfiguration(surface); 1009 if (mCameraInfo.isStreamUseCaseSupported() && shouldUseStreamUseCase()) { 1010 mWebcamOutputConfiguration.setStreamUseCase( 1011 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL); 1012 } 1013 mOutputConfigurations = Arrays.asList(mWebcamOutputConfiguration); 1014 createCaptureSessionBlocking(); 1015 } 1016 } 1017 setupWebcamStreamAndReconfigureSessionLocked()1018 private void setupWebcamStreamAndReconfigureSessionLocked() { 1019 // Setup outputs 1020 if (VERBOSE) { 1021 Log.v(TAG, "setupWebcamStreamAndReconfigureSession"); 1022 } 1023 Surface surface = mImgReader.getSurface(); 1024 mPreviewRequestBuilder.addTarget(surface); 1025 mWebcamOutputConfiguration = new OutputConfiguration(surface); 1026 if (mCameraInfo.isStreamUseCaseSupported() && shouldUseStreamUseCase()) { 1027 mWebcamOutputConfiguration.setStreamUseCase( 1028 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL); 1029 } 1030 mCurrentState = CameraStreamingState.PREVIEW_AND_WEBCAM_STREAMING; 1031 mOutputConfigurations = 1032 Arrays.asList(mWebcamOutputConfiguration, mPreviewOutputConfiguration); 1033 createCaptureSessionBlocking(); 1034 } 1035 1036 /** 1037 * Adjust preview output configuration when preview size is changed. 1038 */ adjustPreviewOutputConfiguration()1039 private void adjustPreviewOutputConfiguration() { 1040 if (mPreviewSurfaceTexture == null || mPreviewSurface == null) { 1041 return; 1042 } 1043 1044 Size suitablePreviewSize = getSuitablePreviewSize(); 1045 // If the required preview size is the same, don't need to adjust the output configuration 1046 if (Objects.equals(suitablePreviewSize, mPreviewSize)) { 1047 return; 1048 } 1049 1050 // Removes the original preview surface 1051 mPreviewRequestBuilder.removeTarget(mPreviewSurface); 1052 // Adjusts the SurfaceTexture default buffer size to match the new preview size 1053 mPreviewSurfaceTexture.setDefaultBufferSize(suitablePreviewSize.getWidth(), 1054 suitablePreviewSize.getHeight()); 1055 mPreviewSize = suitablePreviewSize; 1056 mPreviewRequestBuilder.addTarget(mPreviewSurface); 1057 mPreviewOutputConfiguration = new OutputConfiguration(mPreviewSurface); 1058 if (mCameraInfo.isStreamUseCaseSupported()) { 1059 mPreviewOutputConfiguration.setStreamUseCase( 1060 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW); 1061 } 1062 1063 mOutputConfigurations = mWebcamOutputConfiguration != null ? Arrays.asList( 1064 mWebcamOutputConfiguration, mPreviewOutputConfiguration) : Arrays.asList( 1065 mPreviewOutputConfiguration); 1066 1067 // Invokes the preview size change listener so that the preview activity can adjust its 1068 // size and scale to match the new size. 1069 if (mPreviewSizeChangeListener != null) { 1070 mPreviewSizeChangeListener.accept(suitablePreviewSize); 1071 } 1072 } startWebcamStreaming()1073 public void startWebcamStreaming() { 1074 mServiceEventsExecutor.execute(() -> { 1075 // Started on a background thread since we don't want to be blocking the service's main 1076 // thread (we call blocking camera open in these methods internally) 1077 startWebcamStreamingNoOffload(); 1078 }); 1079 } 1080 1081 /** 1082 * Starts webcam streaming. This should only be called on the service events executor thread. 1083 */ startWebcamStreamingNoOffload()1084 public void startWebcamStreamingNoOffload() { 1085 mStartCaptureWebcamStream.set(true); 1086 synchronized (mSerializationLock) { 1087 synchronized (mImgReaderLock) { 1088 if (mImgReader == null) { 1089 Log.e(TAG, 1090 "Webcam streaming requested without ImageReader initialized"); 1091 return; 1092 } 1093 } 1094 switch (mCurrentState) { 1095 // Our current state could also be webcam streaming and we want to start the 1096 // camera again - example : we never had the camera and were streaming the 1097 // camera unavailable logo - when camera becomes available we actually want to 1098 // start streaming camera frames. 1099 case WEBCAM_STREAMING: 1100 case NO_STREAMING: 1101 setupWebcamOnlyStreamAndOpenCameraLocked(); 1102 break; 1103 case PREVIEW_STREAMING: 1104 adjustPreviewOutputConfiguration(); 1105 // Its okay to recreate an already running camera session with 1106 // preview since the 'glitch' that we see will not be on the webcam 1107 // stream. 1108 setupWebcamStreamAndReconfigureSessionLocked(); 1109 break; 1110 case PREVIEW_AND_WEBCAM_STREAMING: 1111 if (mCameraDevice == null) { 1112 // We had been evicted and were streaming fake webcam streams, 1113 // preview activity was selected, and then camera became available. 1114 setupWebcamOnlyStreamAndOpenCameraLocked(); 1115 if (mPreviewSurface != null) { 1116 setupPreviewStreamAlongsideWebcamStreamLocked(mPreviewSurface); 1117 } 1118 } else { 1119 Log.e(TAG, "Incorrect current state for startWebcamStreaming " 1120 + mCurrentState + " since webcam and preview already streaming"); 1121 } 1122 } 1123 } 1124 } 1125 stopPreviewStreamOnlyLocked()1126 private void stopPreviewStreamOnlyLocked() { 1127 mPreviewRequestBuilder.removeTarget(mPreviewSurface); 1128 mOutputConfigurations = Arrays.asList(mWebcamOutputConfiguration); 1129 createCaptureSessionBlocking(); 1130 mPreviewSurfaceTexture = null; 1131 mPreviewSizeChangeListener = null; 1132 mPreviewSurface = null; 1133 mPreviewSize = null; 1134 mCurrentState = CameraStreamingState.WEBCAM_STREAMING; 1135 } 1136 stopPreviewStreaming()1137 public void stopPreviewStreaming() { 1138 // Started on a background thread since we don't want to be blocking either the activity's 1139 // or the service's main thread (we call blocking camera open in these methods internally) 1140 mServiceEventsExecutor.execute(new Runnable() { 1141 @Override 1142 public void run() { 1143 synchronized (mSerializationLock) { 1144 switch (mCurrentState) { 1145 case PREVIEW_AND_WEBCAM_STREAMING: 1146 stopPreviewStreamOnlyLocked(); 1147 break; 1148 case PREVIEW_STREAMING: 1149 stopStreamingAltogetherLocked(); 1150 break; 1151 case NO_STREAMING: 1152 case WEBCAM_STREAMING: 1153 Log.e(TAG, 1154 "Incorrect current state for stopPreviewStreaming " + 1155 mCurrentState); 1156 } 1157 } 1158 } 1159 }); 1160 } 1161 stopWebcamStreamOnlyLocked()1162 private void stopWebcamStreamOnlyLocked() { 1163 // Re-configure session to have only the preview stream 1164 // Setup outputs 1165 mPreviewRequestBuilder.removeTarget(mImgReader.getSurface()); 1166 mOutputConfigurations = 1167 Arrays.asList(mPreviewOutputConfiguration); 1168 mCurrentState = CameraStreamingState.PREVIEW_STREAMING; 1169 mWebcamOutputConfiguration = null; 1170 createCaptureSessionBlocking(); 1171 } 1172 stopStreamingAltogetherLocked()1173 private void stopStreamingAltogetherLocked() { 1174 stopStreamingAltogetherLocked(/*closeImageReader*/true); 1175 } 1176 stopStreamingAltogetherLocked(boolean closeImageReader)1177 private void stopStreamingAltogetherLocked(boolean closeImageReader) { 1178 if (VERBOSE) { 1179 Log.v(TAG, "StopStreamingAltogether"); 1180 } 1181 mCurrentState = CameraStreamingState.NO_STREAMING; 1182 synchronized (mImgReaderLock) { 1183 if (closeImageReader && mImgReader != null) { 1184 mImgReader.close(); 1185 mImgReader = null; 1186 } 1187 } 1188 if (mCameraDevice != null) { 1189 mCameraDevice.close(); 1190 } 1191 mCameraDevice = null; 1192 mWebcamOutputConfiguration = null; 1193 mPreviewOutputConfiguration = null; 1194 mTapToFocusPoints = null; 1195 mReadyToStream.close(); 1196 } 1197 stopWebcamStreaming()1198 public void stopWebcamStreaming() { 1199 // Started on a background thread since we don't want to be blocking the service's main 1200 // thread (we call blocking camera open in these methods internally) 1201 mServiceEventsExecutor.execute(new Runnable() { 1202 @Override 1203 public void run() { 1204 mStartCaptureWebcamStream.set(false); 1205 synchronized (mSerializationLock) { 1206 switch (mCurrentState) { 1207 case PREVIEW_AND_WEBCAM_STREAMING: 1208 stopWebcamStreamOnlyLocked(); 1209 break; 1210 case WEBCAM_STREAMING: 1211 stopStreamingAltogetherLocked(); 1212 break; 1213 case PREVIEW_STREAMING: 1214 Log.e(TAG, 1215 "Incorrect current state for stopWebcamStreaming " + 1216 mCurrentState); 1217 return; 1218 } 1219 1220 if (mImageWriterEventsExecutor != null) { 1221 stopShowingCameraUnavailableLogo(); 1222 } 1223 } 1224 } 1225 }); 1226 } 1227 startBackgroundThread()1228 private void startBackgroundThread() { 1229 HandlerThread imageReaderThread = new HandlerThread("SdkCameraFrameProviderThread"); 1230 imageReaderThread.start(); 1231 mImageReaderHandler = new Handler(imageReaderThread.getLooper()); 1232 // We need two executor threads since the surface texture add / remove calls from the fg 1233 // service are going to be served on the main thread. To not wait on capture session 1234 // creation, onCaptureSequenceCompleted we need a new thread to cater to preview surface 1235 // addition / removal. 1236 // b/277099495 has additional context. 1237 mCameraCallbacksExecutor = Executors.newSingleThreadExecutor(); 1238 mServiceEventsExecutor = Executors.newSingleThreadExecutor(); 1239 } 1240 createCaptureSessionBlocking()1241 private void createCaptureSessionBlocking() { 1242 if (mCameraId.physicalCameraId != null) { 1243 for (OutputConfiguration config : mOutputConfigurations) { 1244 config.setPhysicalCameraId(mCameraId.physicalCameraId); 1245 } 1246 } 1247 // In case we're fake streaming camera frames. 1248 if (mCameraDevice == null) { 1249 return; 1250 } 1251 try { 1252 mCameraDevice.createCaptureSession( 1253 new SessionConfiguration( 1254 SessionConfiguration.SESSION_REGULAR, mOutputConfigurations, 1255 mCameraCallbacksExecutor, mCameraCaptureSessionCallback)); 1256 mCaptureSessionReady.block(); 1257 mCaptureSessionReady.close(); 1258 } catch (CameraAccessException e) { 1259 Log.e(TAG, "createCaptureSession failed", e); 1260 stopStreamingAltogetherLocked(); 1261 startShowingCameraUnavailableLogoNoOffload(); 1262 } 1263 } 1264 returnImage(long timestamp)1265 public void returnImage(long timestamp) { 1266 ImageAndBuffer imageAndBuffer = mImageMap.get(timestamp); 1267 if (imageAndBuffer == null) { 1268 Log.e(TAG, "Image with timestamp " + timestamp + 1269 " was never encoded / already returned"); 1270 return; 1271 } 1272 imageAndBuffer.buffer.close(); 1273 imageAndBuffer.image.close(); 1274 mImageMap.remove(timestamp); 1275 if (VERBOSE) { 1276 Log.v(TAG, "Returned image " + timestamp); 1277 } 1278 } 1279 1280 /** 1281 * Returns the {@link CameraInfo} of the working camera. 1282 */ getCameraInfo()1283 public CameraInfo getCameraInfo() { 1284 return mCameraInfo; 1285 } 1286 1287 /** 1288 * Sets the new zoom ratio setting to the working camera. 1289 */ setZoomRatio(float zoomRatio)1290 public void setZoomRatio(float zoomRatio) { 1291 mZoomRatio = zoomRatio; 1292 mServiceEventsExecutor.execute(() -> { 1293 synchronized (mSerializationLock) { 1294 if (mCameraDevice == null || mCaptureSession == null) { 1295 return; 1296 } 1297 1298 try { 1299 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomRatio); 1300 mCaptureSession.setSingleRepeatingRequest(mPreviewRequestBuilder.build(), 1301 mCameraCallbacksExecutor, mCaptureCallback); 1302 mUserPrefs.storeZoomRatio(mCameraId.toString(), mZoomRatio); 1303 } catch (CameraAccessException e) { 1304 Log.e(TAG, "Failed to set zoom ratio to the working camera.", e); 1305 } 1306 } 1307 }); 1308 } 1309 1310 /** 1311 * Returns current zoom ratio setting. 1312 */ getZoomRatio()1313 public float getZoomRatio() { 1314 return mZoomRatio; 1315 } 1316 1317 /** 1318 * Returns true if High Quality Mode is enabled, false otherwise. 1319 */ isHighQualityModeEnabled()1320 public boolean isHighQualityModeEnabled() { 1321 return mHighQualityModeEnabled; 1322 } 1323 1324 /** 1325 * Toggles camera between the back and front cameras. 1326 * 1327 * The new camera is set up and configured asynchronously, but the camera state (as queried by 1328 * other methods in {@code CameraController}) is updated synchronously. So querying camera 1329 * state and metadata immediately after this method returns, returns values associated with the 1330 * new camera, even if the new camera hasn't started streaming. 1331 */ toggleCamera()1332 public void toggleCamera() { 1333 synchronized (mSerializationLock) { 1334 CameraId newCameraId; 1335 1336 if (Objects.equals(mCameraId, mBackCameraId)) { 1337 newCameraId = mFrontCameraId; 1338 } else { 1339 newCameraId = mBackCameraId; 1340 } 1341 1342 switchCamera(newCameraId); 1343 } 1344 } 1345 1346 /** 1347 * Switches current working camera to specific one. 1348 */ switchCamera(CameraId cameraId)1349 public void switchCamera(CameraId cameraId) { 1350 synchronized (mSerializationLock) { 1351 mCameraId = cameraId; 1352 mUserPrefs.storeCameraId(cameraId.toString()); 1353 mCameraInfo = getOrCreateCameraInfo(mCameraId); 1354 mZoomRatio = mUserPrefs.fetchZoomRatio(mCameraId.toString(), /*defaultZoom*/ 1.0f); 1355 mTapToFocusPoints = null; 1356 1357 // Stores the preferred back or front camera options 1358 if (mCameraInfo.getLensFacing() == CameraCharacteristics.LENS_FACING_BACK) { 1359 mBackCameraId = mCameraId; 1360 mUserPrefs.storeBackCameraId(mBackCameraId.toString()); 1361 } else if (mCameraInfo.getLensFacing() == CameraCharacteristics.LENS_FACING_FRONT) { 1362 mFrontCameraId = mCameraId; 1363 mUserPrefs.storeFrontCameraId(mFrontCameraId.toString()); 1364 } 1365 } 1366 mServiceEventsExecutor.execute(() -> { 1367 synchronized (mSerializationLock) { 1368 if (mCameraDevice == null) { 1369 // Its possible the preview screen is up before the camera device is opened. 1370 return; 1371 } 1372 mCaptureSession.close(); 1373 if (mCameraInfo != null) { 1374 mRotationProvider.updateSensorOrientation(mCameraInfo.getSensorOrientation(), 1375 mCameraInfo.getLensFacing()); 1376 } 1377 switch (mCurrentState) { 1378 case WEBCAM_STREAMING: 1379 setupWebcamOnlyStreamAndOpenCameraLocked(); 1380 break; 1381 case PREVIEW_STREAMING: 1382 // Preview size might change after toggling the camera. 1383 adjustPreviewOutputConfiguration(); 1384 setupPreviewOnlyStreamLocked(mPreviewSurface); 1385 break; 1386 case PREVIEW_AND_WEBCAM_STREAMING: 1387 setupWebcamOnlyStreamAndOpenCameraLocked(); 1388 // Preview size might change after toggling the camera. 1389 adjustPreviewOutputConfiguration(); 1390 setupPreviewStreamAlongsideWebcamStreamLocked(mPreviewSurface); 1391 break; 1392 } 1393 } 1394 }); 1395 } 1396 1397 /** 1398 * Sets a {@link RotationUpdateListener} to monitor the rotation changes. 1399 */ setRotationUpdateListener(RotationUpdateListener listener)1400 public void setRotationUpdateListener(RotationUpdateListener listener) { 1401 mRotationUpdateListener = listener; 1402 } 1403 1404 /** 1405 * Returns current rotation degrees value. 1406 */ getCurrentRotation()1407 public int getCurrentRotation() { 1408 return mRotationProvider.getRotation(); 1409 } 1410 1411 /** 1412 * Returns the best suitable output size for preview. 1413 * 1414 * <p>If the webcam stream doesn't exist, find the largest 16:9 supported output size which is 1415 * not larger than 1080p. If the webcam stream exists, find the largest supported output size 1416 * which matches the aspect ratio of the webcam stream size and is not larger than the 1417 * display size, 1080p, or the webcam stream resolution, whichever is smallest. 1418 */ getSuitablePreviewSize()1419 public Size getSuitablePreviewSize() { 1420 if (mCameraId == null) { 1421 Log.e(TAG, "No camera is found on the device."); 1422 return null; 1423 } 1424 1425 final Size s1080p = new Size(1920, 1080); 1426 Size maxPreviewSize = s1080p; 1427 1428 // For PREVIEW, choose the smallest of webcam stream size, display size, and 1080p. This 1429 // is guaranteed to be supported with a YUV stream. 1430 if (mImgReader != null) { 1431 maxPreviewSize = new Size(mImgReader.getWidth(), mImgReader.getHeight()); 1432 } 1433 1434 if (numPixels(maxPreviewSize) > numPixels(s1080p)) { 1435 maxPreviewSize = s1080p; 1436 } 1437 1438 if (numPixels(maxPreviewSize) > numPixels(mDisplaySize)) { 1439 maxPreviewSize = mDisplaySize; 1440 } 1441 1442 // If webcam stream exists, find an output size matching its aspect ratio. Otherwise, find 1443 // an output size with 16:9 aspect ratio. 1444 final Rational targetAspectRatio; 1445 if (mImgReader != null) { 1446 targetAspectRatio = new Rational(mImgReader.getWidth(), mImgReader.getHeight()); 1447 } else { 1448 targetAspectRatio = new Rational(s1080p.getWidth(), s1080p.getHeight()); 1449 } 1450 1451 StreamConfigurationMap map = getCameraCharacteristic(mCameraId.mainCameraId, 1452 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 1453 1454 if (map == null) { 1455 Log.e(TAG, "Failed to retrieve StreamConfigurationMap. Return null preview size."); 1456 return null; 1457 } 1458 1459 Size[] outputSizes = map.getOutputSizes(SurfaceTexture.class); 1460 1461 if (outputSizes == null || outputSizes.length == 0) { 1462 Log.e(TAG, "Empty output sizes. Return null preview size."); 1463 return null; 1464 } 1465 1466 Size finalMaxPreviewSize = maxPreviewSize; 1467 Size previewSize = Arrays.stream(outputSizes) 1468 .filter(size -> targetAspectRatio.equals( 1469 new Rational(size.getWidth(), size.getHeight()))) 1470 .filter(size -> numPixels(size) <= numPixels(finalMaxPreviewSize)) 1471 .max(Comparator.comparingInt(CameraController::numPixels)) 1472 .orElse(null); 1473 1474 Log.d(TAG, "Suitable preview size is " + previewSize); 1475 return previewSize; 1476 } 1477 numPixels(Size size)1478 private static int numPixels(Size size) { 1479 return size.getWidth() * size.getHeight(); 1480 } 1481 1482 /** 1483 * Trigger tap-to-focus operation for the specified normalized points mapping to the FOV. 1484 * 1485 * <p>The specified normalized points will be used to calculate the corresponding metering 1486 * rectangles that will be applied for AF, AE and AWB. 1487 */ tapToFocus(float[] normalizedPoint)1488 public void tapToFocus(float[] normalizedPoint) { 1489 mServiceEventsExecutor.execute(() -> { 1490 synchronized (mSerializationLock) { 1491 if (mCameraDevice == null || mCaptureSession == null) { 1492 return; 1493 } 1494 1495 try { 1496 mTapToFocusPoints = normalizedPoint; 1497 MeteringRectangle[] meteringRectangles = 1498 new MeteringRectangle[]{calculateMeteringRectangle(normalizedPoint)}; 1499 // Updates the metering rectangles to the repeating request 1500 updateTapToFocusParameters(mPreviewRequestBuilder, meteringRectangles, 1501 /* afTriggerStart */ false); 1502 mCaptureSession.setSingleRepeatingRequest(mPreviewRequestBuilder.build(), 1503 mCameraCallbacksExecutor, mCaptureCallback); 1504 1505 // Creates a capture request to trigger AF start for the metering rectangles. 1506 CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest( 1507 CameraDevice.TEMPLATE_PREVIEW); 1508 CaptureRequest previewCaptureRequest = mPreviewRequestBuilder.build(); 1509 1510 for (CaptureRequest.Key<?> key : previewCaptureRequest.getKeys()) { 1511 builder.set((CaptureRequest.Key) key, previewCaptureRequest.get(key)); 1512 } 1513 1514 if (mImgReader != null && previewCaptureRequest.containsTarget( 1515 mImgReader.getSurface())) { 1516 builder.addTarget(mImgReader.getSurface()); 1517 } 1518 1519 if (mPreviewSurface != null && previewCaptureRequest.containsTarget( 1520 mPreviewSurface)) { 1521 builder.addTarget(mPreviewSurface); 1522 } 1523 1524 updateTapToFocusParameters(builder, meteringRectangles, 1525 /* afTriggerStart */ true); 1526 1527 mCaptureSession.captureSingleRequest(builder.build(), 1528 mCameraCallbacksExecutor, mCaptureCallback); 1529 } catch (CameraAccessException e) { 1530 Log.e(TAG, "Failed to execute tap-to-focus to the working camera.", e); 1531 } 1532 } 1533 }); 1534 } 1535 1536 /** 1537 * Enables or disables HighQuality mode. This will likely perform slow operations to commit the 1538 * changes. {@code callback} will be called once the changes have been committed. 1539 * 1540 * Note that there is no guarantee that {@code callback} will be called on the UI thread 1541 * and {@code callback} should not block the calling thread. 1542 * 1543 * @param enabled true if HighQualityMode should be enabled, false otherwise 1544 * @param callback Callback to be called once the session has been reconfigured. 1545 */ setHighQualityModeEnabled(boolean enabled, Runnable callback)1546 public void setHighQualityModeEnabled(boolean enabled, Runnable callback) { 1547 synchronized (mSerializationLock) { 1548 if (enabled == mHighQualityModeEnabled) { 1549 callback.run(); 1550 return; 1551 } 1552 1553 mHighQualityModeEnabled = enabled; 1554 mUserPrefs.storeHighQualityModeEnabled(mHighQualityModeEnabled); 1555 } 1556 mServiceEventsExecutor.execute(() -> { 1557 synchronized (mSerializationLock) { 1558 int currentCameraFacing = getCameraInfo().getLensFacing(); 1559 mRroCameraInfo = createVendorCameraPrefs(mHighQualityModeEnabled); 1560 refreshAvailableCameraIdList(); 1561 refreshLensFacingCameraIds(); 1562 1563 // Choose a camera that faces the same way as the current camera. 1564 CameraId targetCameraId = mBackCameraId; 1565 if (currentCameraFacing == CameraCharacteristics.LENS_FACING_FRONT) { 1566 targetCameraId = mFrontCameraId; 1567 } 1568 1569 switchCamera(targetCameraId); 1570 // Let the caller know that the changes have been committed. 1571 callback.run(); 1572 } 1573 }); 1574 } 1575 1576 /** 1577 * Resets to the auto-focus mode. 1578 */ resetToAutoFocus()1579 public void resetToAutoFocus() { 1580 mServiceEventsExecutor.execute(() -> { 1581 synchronized (mSerializationLock) { 1582 if (mCameraDevice == null || mCaptureSession == null) { 1583 return; 1584 } 1585 mTapToFocusPoints = null; 1586 1587 // Resets to CONTINUOUS_VIDEO mode 1588 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, 1589 CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); 1590 // Clears the Af/Ae/Awb regions 1591 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, null); 1592 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, null); 1593 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AWB_REGIONS, null); 1594 1595 try { 1596 mCaptureSession.setSingleRepeatingRequest(mPreviewRequestBuilder.build(), 1597 mCameraCallbacksExecutor, mCaptureCallback); 1598 } catch (CameraAccessException e) { 1599 Log.e(TAG, "Failed to reset to auto-focus mode to the working camera.", e); 1600 } 1601 } 1602 }); 1603 } 1604 1605 /** 1606 * Retrieves current tap-to-focus points. 1607 * 1608 * @return the normalized points or {@code null} if it is auto-focus mode currently. 1609 */ getTapToFocusPoints()1610 public float[] getTapToFocusPoints() { 1611 synchronized (mSerializationLock) { 1612 return mTapToFocusPoints == null ? null 1613 : new float[]{mTapToFocusPoints[0], mTapToFocusPoints[1]}; 1614 } 1615 } 1616 1617 /** 1618 * Calculates the metering rectangle according to the normalized point. 1619 */ calculateMeteringRectangle(float[] normalizedPoint)1620 private MeteringRectangle calculateMeteringRectangle(float[] normalizedPoint) { 1621 CameraInfo cameraInfo = getCameraInfo(); 1622 Rect activeArraySize = cameraInfo.getActiveArraySize(); 1623 float halfMeteringRectWidth = (METERING_RECTANGLE_SIZE_RATIO * activeArraySize.width()) / 2; 1624 float halfMeteringRectHeight = 1625 (METERING_RECTANGLE_SIZE_RATIO * activeArraySize.height()) / 2; 1626 1627 Matrix matrix = new Matrix(); 1628 matrix.postRotate(-cameraInfo.getSensorOrientation(), 0.5f, 0.5f); 1629 // Flips if current working camera is front camera 1630 if (cameraInfo.getLensFacing() == CameraCharacteristics.LENS_FACING_FRONT) { 1631 matrix.postScale(1, -1, 0.5f, 0.5f); 1632 } 1633 matrix.postScale(activeArraySize.width(), activeArraySize.height()); 1634 float[] mappingPoints = new float[]{normalizedPoint[0], normalizedPoint[1]}; 1635 matrix.mapPoints(mappingPoints); 1636 1637 Rect meteringRegion = new Rect( 1638 clamp((int) (mappingPoints[0] - halfMeteringRectWidth), 0, 1639 activeArraySize.width()), 1640 clamp((int) (mappingPoints[1] - halfMeteringRectHeight), 0, 1641 activeArraySize.height()), 1642 clamp((int) (mappingPoints[0] + halfMeteringRectWidth), 0, 1643 activeArraySize.width()), 1644 clamp((int) (mappingPoints[1] + halfMeteringRectHeight), 0, 1645 activeArraySize.height()) 1646 ); 1647 1648 return new MeteringRectangle(meteringRegion, MeteringRectangle.METERING_WEIGHT_MAX); 1649 } 1650 clamp(int value, int min, int max)1651 private int clamp(int value, int min, int max) { 1652 return Math.min(Math.max(value, min), max); 1653 } 1654 1655 /** 1656 * Updates tap-to-focus parameters to the capture request builder. 1657 * 1658 * @param builder the capture request builder to apply the parameters 1659 * @param meteringRectangles the metering rectangles to apply to the capture request builder 1660 * @param afTriggerStart sets CONTROL_AF_TRIGGER as CONTROL_AF_TRIGGER_START if this 1661 * parameter is {@code true}. Otherwise, sets nothing to 1662 * CONTROL_AF_TRIGGER. 1663 */ updateTapToFocusParameters(CaptureRequest.Builder builder, MeteringRectangle[] meteringRectangles, boolean afTriggerStart)1664 private void updateTapToFocusParameters(CaptureRequest.Builder builder, 1665 MeteringRectangle[] meteringRectangles, boolean afTriggerStart) { 1666 builder.set(CaptureRequest.CONTROL_AF_REGIONS, meteringRectangles); 1667 builder.set(CaptureRequest.CONTROL_AF_MODE, 1668 CaptureRequest.CONTROL_AF_MODE_AUTO); 1669 builder.set(CaptureRequest.CONTROL_AE_REGIONS, meteringRectangles); 1670 builder.set(CaptureRequest.CONTROL_AE_MODE, 1671 CaptureRequest.CONTROL_AE_MODE_ON); 1672 builder.set(CaptureRequest.CONTROL_AWB_REGIONS, meteringRectangles); 1673 1674 if (afTriggerStart) { 1675 builder.set(CaptureRequest.CONTROL_AF_TRIGGER, 1676 CaptureRequest.CONTROL_AF_TRIGGER_START); 1677 } 1678 } 1679 1680 private static class ImageAndBuffer { 1681 public Image image; 1682 public HardwareBuffer buffer; ImageAndBuffer(Image i, HardwareBuffer b)1683 public ImageAndBuffer(Image i, HardwareBuffer b) { 1684 image = i; 1685 buffer = b; 1686 } 1687 } 1688 createVendorCameraPrefs(boolean highQualityMode)1689 private VendorCameraPrefs createVendorCameraPrefs(boolean highQualityMode) { 1690 return highQualityMode ? 1691 VendorCameraPrefs.createEmptyVendorCameraPrefs(mContext) : 1692 VendorCameraPrefs.getVendorCameraPrefsFromJson(mContext); 1693 } 1694 1695 /** 1696 * An interface to monitor the rotation changes. 1697 */ 1698 interface RotationUpdateListener { 1699 /** 1700 * Called when the physical rotation of the device changes to cause the corresponding 1701 * rotation degrees value is changed. 1702 * 1703 * @param rotation the updated rotation degrees value. 1704 */ onRotationUpdated(int rotation)1705 void onRotationUpdated(int rotation); 1706 } 1707 } 1708