1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.example.android.hdrviewfinder; 18 19 import android.Manifest; 20 import android.annotation.SuppressLint; 21 import android.content.Intent; 22 import android.content.pm.PackageManager; 23 import android.hardware.camera2.CameraAccessException; 24 import android.hardware.camera2.CameraCaptureSession; 25 import android.hardware.camera2.CameraCharacteristics; 26 import android.hardware.camera2.CameraDevice; 27 import android.hardware.camera2.CameraManager; 28 import android.hardware.camera2.CaptureRequest; 29 import android.hardware.camera2.CaptureResult; 30 import android.hardware.camera2.TotalCaptureResult; 31 import android.hardware.camera2.params.StreamConfigurationMap; 32 import android.net.Uri; 33 import android.os.Bundle; 34 import android.os.Handler; 35 import android.os.Looper; 36 import android.provider.Settings; 37 import android.renderscript.RenderScript; 38 import android.support.annotation.NonNull; 39 import android.support.design.widget.Snackbar; 40 import android.support.v4.app.ActivityCompat; 41 import android.support.v7.app.AppCompatActivity; 42 import android.util.Log; 43 import android.util.Size; 44 import android.view.GestureDetector; 45 import android.view.Menu; 46 import android.view.MenuItem; 47 import android.view.MotionEvent; 48 import android.view.Surface; 49 import android.view.SurfaceHolder; 50 import android.view.View; 51 import android.widget.Button; 52 import android.widget.TextView; 53 54 import java.util.ArrayList; 55 import java.util.List; 56 import java.util.Locale; 57 import java.util.Objects; 58 59 /** 60 * A small demo of advanced camera functionality with the Android camera2 API. 61 * 62 * <p>This demo implements a real-time high-dynamic-range camera viewfinder, 63 * by alternating the sensor's exposure time between two exposure values on even and odd 64 * frames, and then compositing together the latest two frames whenever a new frame is 65 * captured.</p> 66 * 67 * <p>The demo has three modes: Regular auto-exposure viewfinder, split-screen manual exposure, 68 * and the fused HDR viewfinder. The latter two use manual exposure controlled by the user, 69 * by swiping up/down on the right and left halves of the viewfinder. The left half controls 70 * the exposure time of even frames, and the right half controls the exposure time of odd frames. 71 * </p> 72 * 73 * <p>In split-screen mode, the even frames are shown on the left and the odd frames on the right, 74 * so the user can see two different exposures of the scene simultaneously. In fused HDR mode, 75 * the even/odd frames are merged together into a single image. By selecting different exposure 76 * values for the even/odd frames, the fused image has a higher dynamic range than the regular 77 * viewfinder.</p> 78 * 79 * <p>The HDR fusion and the split-screen viewfinder processing is done with RenderScript; as is the 80 * necessary YUV->RGB conversion. The camera subsystem outputs YUV images naturally, while the GPU 81 * and display subsystems generally only accept RGB data. Therefore, after the images are 82 * fused/composited, a standard YUV->RGB color transform is applied before the the data is written 83 * to the output Allocation. The HDR fusion algorithm is very simple, and tends to result in 84 * lower-contrast scenes, but has very few artifacts and can run very fast.</p> 85 * 86 * <p>Data is passed between the subsystems (camera, RenderScript, and display) using the 87 * Android {@link android.view.Surface} class, which allows for zero-copy transport of large 88 * buffers between processes and subsystems.</p> 89 */ 90 public class HdrViewfinderActivity extends AppCompatActivity implements 91 SurfaceHolder.Callback, CameraOps.ErrorDisplayer, CameraOps.CameraReadyListener { 92 93 private static final String TAG = "HdrViewfinderDemo"; 94 95 private static final String FRAGMENT_DIALOG = "dialog"; 96 97 private static final int REQUEST_PERMISSIONS_REQUEST_CODE = 34; 98 99 /** 100 * View for the camera preview. 101 */ 102 private FixedAspectSurfaceView mPreviewView; 103 104 /** 105 * Root view of this activity. 106 */ 107 private View rootView; 108 109 /** 110 * This shows the current mode of the app. 111 */ 112 private TextView mModeText; 113 114 // These show lengths of exposure for even frames, exposure for odd frames, and auto exposure. 115 private TextView mEvenExposureText, mOddExposureText, mAutoExposureText; 116 117 private Handler mUiHandler; 118 119 private CameraCharacteristics mCameraInfo; 120 121 private Surface mPreviewSurface; 122 private Surface mProcessingHdrSurface; 123 private Surface mProcessingNormalSurface; 124 CaptureRequest.Builder mHdrBuilder; 125 ArrayList<CaptureRequest> mHdrRequests = new ArrayList<>(2); 126 127 CaptureRequest mPreviewRequest; 128 129 RenderScript mRS; 130 ViewfinderProcessor mProcessor; 131 CameraManager mCameraManager; 132 CameraOps mCameraOps; 133 134 private int mRenderMode = ViewfinderProcessor.MODE_NORMAL; 135 136 // Durations in nanoseconds 137 private static final long MICRO_SECOND = 1000; 138 private static final long MILLI_SECOND = MICRO_SECOND * 1000; 139 private static final long ONE_SECOND = MILLI_SECOND * 1000; 140 141 private long mOddExposure = ONE_SECOND / 33; 142 private long mEvenExposure = ONE_SECOND / 33; 143 144 private Object mOddExposureTag = new Object(); 145 private Object mEvenExposureTag = new Object(); 146 private Object mAutoExposureTag = new Object(); 147 148 @Override onCreate(Bundle savedInstanceState)149 protected void onCreate(Bundle savedInstanceState) { 150 super.onCreate(savedInstanceState); 151 setContentView(R.layout.main); 152 153 rootView = findViewById(R.id.panels); 154 155 mPreviewView = (FixedAspectSurfaceView) findViewById(R.id.preview); 156 mPreviewView.getHolder().addCallback(this); 157 mPreviewView.setGestureListener(this, mViewListener); 158 159 Button helpButton = (Button) findViewById(R.id.help_button); 160 helpButton.setOnClickListener(mHelpButtonListener); 161 162 mModeText = (TextView) findViewById(R.id.mode_label); 163 mEvenExposureText = (TextView) findViewById(R.id.even_exposure); 164 mOddExposureText = (TextView) findViewById(R.id.odd_exposure); 165 mAutoExposureText = (TextView) findViewById(R.id.auto_exposure); 166 167 mUiHandler = new Handler(Looper.getMainLooper()); 168 169 mRS = RenderScript.create(this); 170 171 // When permissions are revoked the app is restarted so onCreate is sufficient to check for 172 // permissions core to the Activity's functionality. 173 if (!checkCameraPermissions()) { 174 requestCameraPermissions(); 175 } else { 176 findAndOpenCamera(); 177 } 178 } 179 180 @Override onResume()181 protected void onResume() { 182 super.onResume(); 183 } 184 185 @Override onPause()186 protected void onPause() { 187 super.onPause(); 188 189 // Wait until camera is closed to ensure the next application can open it 190 if (mCameraOps != null) { 191 mCameraOps.closeCameraAndWait(); 192 mCameraOps = null; 193 } 194 } 195 196 @Override onCreateOptionsMenu(Menu menu)197 public boolean onCreateOptionsMenu(Menu menu) { 198 getMenuInflater().inflate(R.menu.main, menu); 199 return super.onCreateOptionsMenu(menu); 200 } 201 202 @Override onOptionsItemSelected(MenuItem item)203 public boolean onOptionsItemSelected(MenuItem item) { 204 switch (item.getItemId()) { 205 case R.id.info: { 206 MessageDialogFragment.newInstance(R.string.intro_message) 207 .show(getSupportFragmentManager(), FRAGMENT_DIALOG); 208 break; 209 } 210 } 211 return super.onOptionsItemSelected(item); 212 } 213 214 private GestureDetector.OnGestureListener mViewListener 215 = new GestureDetector.SimpleOnGestureListener() { 216 217 @Override 218 public boolean onDown(MotionEvent e) { 219 return true; 220 } 221 222 @Override 223 public boolean onSingleTapUp(MotionEvent e) { 224 switchRenderMode(1); 225 return true; 226 } 227 228 @Override 229 public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { 230 if (mRenderMode == ViewfinderProcessor.MODE_NORMAL) return false; 231 232 float xPosition = e1.getAxisValue(MotionEvent.AXIS_X); 233 float width = mPreviewView.getWidth(); 234 float height = mPreviewView.getHeight(); 235 236 float xPosNorm = xPosition / width; 237 float yDistNorm = distanceY / height; 238 239 final float ACCELERATION_FACTOR = 8; 240 double scaleFactor = Math.pow(2.f, yDistNorm * ACCELERATION_FACTOR); 241 242 // Even on left, odd on right 243 if (xPosNorm > 0.5) { 244 mOddExposure *= scaleFactor; 245 } else { 246 mEvenExposure *= scaleFactor; 247 } 248 249 setHdrBurst(); 250 251 return true; 252 } 253 }; 254 255 /** 256 * Show help dialogs. 257 */ 258 private View.OnClickListener mHelpButtonListener = new View.OnClickListener() { 259 public void onClick(View v) { 260 MessageDialogFragment.newInstance(R.string.help_text) 261 .show(getSupportFragmentManager(), FRAGMENT_DIALOG); 262 } 263 }; 264 265 /** 266 * Return the current state of the camera permissions. 267 */ checkCameraPermissions()268 private boolean checkCameraPermissions() { 269 int permissionState = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA); 270 271 // Check if the Camera permission is already available. 272 if (permissionState != PackageManager.PERMISSION_GRANTED) { 273 // Camera permission has not been granted. 274 Log.i(TAG, "CAMERA permission has NOT been granted."); 275 return false; 276 } else { 277 // Camera permissions are available. 278 Log.i(TAG, "CAMERA permission has already been granted."); 279 return true; 280 } 281 } 282 283 /** 284 * Attempt to initialize the camera. 285 */ initializeCamera()286 private void initializeCamera() { 287 mCameraManager = (CameraManager) getSystemService(CAMERA_SERVICE); 288 if (mCameraManager != null) { 289 mCameraOps = new CameraOps(mCameraManager, 290 /*errorDisplayer*/ this, 291 /*readyListener*/ this, 292 /*readyHandler*/ mUiHandler); 293 294 mHdrRequests.add(null); 295 mHdrRequests.add(null); 296 } else { 297 Log.e(TAG, "Couldn't initialize the camera"); 298 } 299 } 300 requestCameraPermissions()301 private void requestCameraPermissions() { 302 // Provide an additional rationale to the user. This would happen if the user denied the 303 // request previously, but didn't check the "Don't ask again" checkbox. 304 if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) { 305 Log.i(TAG, "Displaying camera permission rationale to provide additional context."); 306 Snackbar.make(rootView, R.string.camera_permission_rationale, Snackbar 307 .LENGTH_INDEFINITE) 308 .setAction(R.string.ok, new View.OnClickListener() { 309 @Override 310 public void onClick(View view) { 311 // Request Camera permission 312 ActivityCompat.requestPermissions(HdrViewfinderActivity.this, 313 new String[]{Manifest.permission.CAMERA}, 314 REQUEST_PERMISSIONS_REQUEST_CODE); 315 } 316 }) 317 .show(); 318 } else { 319 Log.i(TAG, "Requesting camera permission"); 320 // Request Camera permission. It's possible this can be auto answered if device policy 321 // sets the permission in a given state or the user denied the permission 322 // previously and checked "Never ask again". 323 ActivityCompat.requestPermissions(HdrViewfinderActivity.this, 324 new String[]{Manifest.permission.CAMERA}, 325 REQUEST_PERMISSIONS_REQUEST_CODE); 326 } 327 } 328 329 /** 330 * Callback received when a permissions request has been completed. 331 */ 332 @Override onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults)333 public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, 334 @NonNull int[] grantResults) { 335 Log.i(TAG, "onRequestPermissionResult"); 336 if (requestCode == REQUEST_PERMISSIONS_REQUEST_CODE) { 337 if (grantResults.length <= 0) { 338 // If user interaction was interrupted, the permission request is cancelled and you 339 // receive empty arrays. 340 Log.i(TAG, "User interaction was cancelled."); 341 } else if (grantResults[0] == PackageManager.PERMISSION_GRANTED) { 342 // Permission was granted. 343 findAndOpenCamera(); 344 } else { 345 // Permission denied. 346 347 // In this Activity we've chosen to notify the user that they 348 // have rejected a core permission for the app since it makes the Activity useless. 349 // We're communicating this message in a Snackbar since this is a sample app, but 350 // core permissions would typically be best requested during a welcome-screen flow. 351 352 // Additionally, it is important to remember that a permission might have been 353 // rejected without asking the user for permission (device policy or "Never ask 354 // again" prompts). Therefore, a user interface affordance is typically implemented 355 // when permissions are denied. Otherwise, your app could appear unresponsive to 356 // touches or interactions which have required permissions. 357 Snackbar.make(rootView, R.string.camera_permission_denied_explanation, Snackbar 358 .LENGTH_INDEFINITE) 359 .setAction(R.string.settings, new View.OnClickListener() { 360 @Override 361 public void onClick(View view) { 362 // Build intent that displays the App settings screen. 363 Intent intent = new Intent(); 364 intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS); 365 Uri uri = Uri.fromParts("package", BuildConfig.APPLICATION_ID, null); 366 intent.setData(uri); 367 intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); 368 startActivity(intent); 369 } 370 }) 371 .show(); 372 } 373 } 374 } 375 findAndOpenCamera()376 private void findAndOpenCamera() { 377 boolean cameraPermissions = checkCameraPermissions(); 378 if (!cameraPermissions) { 379 return; 380 } 381 String errorMessage = "Unknown error"; 382 boolean foundCamera = false; 383 initializeCamera(); 384 if (mCameraOps != null) { 385 try { 386 // Find first back-facing camera that has necessary capability. 387 String[] cameraIds = mCameraManager.getCameraIdList(); 388 for (String id : cameraIds) { 389 CameraCharacteristics info = mCameraManager.getCameraCharacteristics(id); 390 Integer facing = info.get(CameraCharacteristics.LENS_FACING); 391 Integer level = info.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 392 boolean hasFullLevel = Objects.equals(level, 393 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); 394 395 int[] capabilities = info 396 .get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 397 Integer syncLatency = info.get(CameraCharacteristics.SYNC_MAX_LATENCY); 398 boolean hasManualControl = hasCapability(capabilities, 399 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR); 400 boolean hasEnoughCapability = hasManualControl && Objects.equals(syncLatency, 401 CameraCharacteristics.SYNC_MAX_LATENCY_PER_FRAME_CONTROL); 402 403 // All these are guaranteed by 404 // CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL, but checking 405 // for only the things we care about expands range of devices we can run on. 406 // We want: 407 // - Back-facing camera 408 // - Manual sensor control 409 // - Per-frame synchronization (so that exposure can be changed every frame) 410 if (Objects.equals(facing, CameraCharacteristics.LENS_FACING_BACK) && 411 (hasFullLevel || hasEnoughCapability)) { 412 // Found suitable camera - get info, open, and set up outputs 413 mCameraInfo = info; 414 mCameraOps.openCamera(id); 415 configureSurfaces(); 416 foundCamera = true; 417 break; 418 } 419 } 420 if (!foundCamera) { 421 errorMessage = getString(R.string.camera_no_good); 422 } 423 } catch (CameraAccessException e) { 424 errorMessage = getErrorString(e); 425 } 426 if (!foundCamera) { 427 showErrorDialog(errorMessage); 428 } 429 } 430 } 431 hasCapability(int[] capabilities, int capability)432 private boolean hasCapability(int[] capabilities, int capability) { 433 for (int c : capabilities) { 434 if (c == capability) return true; 435 } 436 return false; 437 } 438 switchRenderMode(int direction)439 private void switchRenderMode(int direction) { 440 if (mCameraOps != null) { 441 mRenderMode = (mRenderMode + direction) % 3; 442 443 mModeText.setText(getResources().getStringArray(R.array.mode_label_array)[mRenderMode]); 444 445 if (mProcessor != null) { 446 mProcessor.setRenderMode(mRenderMode); 447 } 448 if (mRenderMode == ViewfinderProcessor.MODE_NORMAL) { 449 mCameraOps.setRepeatingRequest(mPreviewRequest, 450 mCaptureCallback, mUiHandler); 451 } else { 452 setHdrBurst(); 453 } 454 } 455 } 456 457 /** 458 * Configure the surfaceview and RS processing. 459 */ configureSurfaces()460 private void configureSurfaces() { 461 // Find a good size for output - largest 16:9 aspect ratio that's less than 720p 462 final int MAX_WIDTH = 1280; 463 final float TARGET_ASPECT = 16.f / 9.f; 464 final float ASPECT_TOLERANCE = 0.1f; 465 466 StreamConfigurationMap configs = 467 mCameraInfo.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 468 if (configs == null) { 469 throw new RuntimeException("Cannot get available picture/preview sizes."); 470 } 471 Size[] outputSizes = configs.getOutputSizes(SurfaceHolder.class); 472 473 Size outputSize = outputSizes[0]; 474 float outputAspect = (float) outputSize.getWidth() / outputSize.getHeight(); 475 for (Size candidateSize : outputSizes) { 476 if (candidateSize.getWidth() > MAX_WIDTH) continue; 477 float candidateAspect = (float) candidateSize.getWidth() / candidateSize.getHeight(); 478 boolean goodCandidateAspect = 479 Math.abs(candidateAspect - TARGET_ASPECT) < ASPECT_TOLERANCE; 480 boolean goodOutputAspect = 481 Math.abs(outputAspect - TARGET_ASPECT) < ASPECT_TOLERANCE; 482 if ((goodCandidateAspect && !goodOutputAspect) || 483 candidateSize.getWidth() > outputSize.getWidth()) { 484 outputSize = candidateSize; 485 outputAspect = candidateAspect; 486 } 487 } 488 Log.i(TAG, "Resolution chosen: " + outputSize); 489 490 // Configure processing 491 mProcessor = new ViewfinderProcessor(mRS, outputSize); 492 setupProcessor(); 493 494 // Configure the output view - this will fire surfaceChanged 495 mPreviewView.setAspectRatio(outputAspect); 496 mPreviewView.getHolder().setFixedSize(outputSize.getWidth(), outputSize.getHeight()); 497 } 498 499 /** 500 * Once camera is open and output surfaces are ready, configure the RS processing 501 * and the camera device inputs/outputs. 502 */ 503 private void setupProcessor() { 504 if (mProcessor == null || mPreviewSurface == null) return; 505 506 mProcessor.setOutputSurface(mPreviewSurface); 507 mProcessingHdrSurface = mProcessor.getInputHdrSurface(); 508 mProcessingNormalSurface = mProcessor.getInputNormalSurface(); 509 510 List<Surface> cameraOutputSurfaces = new ArrayList<>(); 511 cameraOutputSurfaces.add(mProcessingHdrSurface); 512 cameraOutputSurfaces.add(mProcessingNormalSurface); 513 514 mCameraOps.setSurfaces(cameraOutputSurfaces); 515 } 516 517 /** 518 * Start running an HDR burst on a configured camera session 519 */ 520 public void setHdrBurst() { 521 522 mHdrBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 1600); 523 mHdrBuilder.set(CaptureRequest.SENSOR_FRAME_DURATION, ONE_SECOND / 30); 524 525 mHdrBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, mEvenExposure); 526 mHdrBuilder.setTag(mEvenExposureTag); 527 mHdrRequests.set(0, mHdrBuilder.build()); 528 529 mHdrBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, mOddExposure); 530 mHdrBuilder.setTag(mOddExposureTag); 531 mHdrRequests.set(1, mHdrBuilder.build()); 532 533 mCameraOps.setRepeatingBurst(mHdrRequests, mCaptureCallback, mUiHandler); 534 } 535 536 /** 537 * Listener for completed captures 538 * Invoked on UI thread 539 */ 540 private CameraCaptureSession.CaptureCallback mCaptureCallback 541 = new CameraCaptureSession.CaptureCallback() { 542 543 public void onCaptureCompleted(@NonNull CameraCaptureSession session, 544 @NonNull CaptureRequest request, 545 @NonNull TotalCaptureResult result) { 546 547 // Only update UI every so many frames 548 // Use an odd number here to ensure both even and odd exposures get an occasional update 549 long frameNumber = result.getFrameNumber(); 550 if (frameNumber % 3 != 0) return; 551 552 final Long exposureTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 553 if (exposureTime == null) { 554 throw new RuntimeException("Cannot get exposure time."); 555 } 556 557 // Format exposure time nicely 558 String exposureText; 559 if (exposureTime > ONE_SECOND) { 560 exposureText = String.format(Locale.US, "%.2f s", exposureTime / 1e9); 561 } else if (exposureTime > MILLI_SECOND) { 562 exposureText = String.format(Locale.US, "%.2f ms", exposureTime / 1e6); 563 } else if (exposureTime > MICRO_SECOND) { 564 exposureText = String.format(Locale.US, "%.2f us", exposureTime / 1e3); 565 } else { 566 exposureText = String.format(Locale.US, "%d ns", exposureTime); 567 } 568 569 Object tag = request.getTag(); 570 Log.i(TAG, "Exposure: " + exposureText); 571 572 if (tag == mEvenExposureTag) { 573 mEvenExposureText.setText(exposureText); 574 575 mEvenExposureText.setEnabled(true); 576 mOddExposureText.setEnabled(true); 577 mAutoExposureText.setEnabled(false); 578 } else if (tag == mOddExposureTag) { 579 mOddExposureText.setText(exposureText); 580 581 mEvenExposureText.setEnabled(true); 582 mOddExposureText.setEnabled(true); 583 mAutoExposureText.setEnabled(false); 584 } else { 585 mAutoExposureText.setText(exposureText); 586 587 mEvenExposureText.setEnabled(false); 588 mOddExposureText.setEnabled(false); 589 mAutoExposureText.setEnabled(true); 590 } 591 } 592 }; 593 594 /** 595 * Callbacks for the FixedAspectSurfaceView 596 */ 597 598 @Override surfaceChanged(SurfaceHolder holder, int format, int width, int height)599 public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 600 mPreviewSurface = holder.getSurface(); 601 602 setupProcessor(); 603 } 604 605 @Override surfaceCreated(SurfaceHolder holder)606 public void surfaceCreated(SurfaceHolder holder) { 607 // ignored 608 } 609 610 @Override surfaceDestroyed(SurfaceHolder holder)611 public void surfaceDestroyed(SurfaceHolder holder) { 612 mPreviewSurface = null; 613 } 614 615 /** 616 * Callbacks for CameraOps 617 */ 618 @Override onCameraReady()619 public void onCameraReady() { 620 // Ready to send requests in, so set them up 621 try { 622 CaptureRequest.Builder previewBuilder = 623 mCameraOps.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 624 previewBuilder.addTarget(mProcessingNormalSurface); 625 previewBuilder.setTag(mAutoExposureTag); 626 mPreviewRequest = previewBuilder.build(); 627 628 mHdrBuilder = 629 mCameraOps.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 630 mHdrBuilder.set(CaptureRequest.CONTROL_AE_MODE, 631 CaptureRequest.CONTROL_AE_MODE_OFF); 632 mHdrBuilder.addTarget(mProcessingHdrSurface); 633 634 switchRenderMode(0); 635 636 } catch (CameraAccessException e) { 637 String errorMessage = getErrorString(e); 638 showErrorDialog(errorMessage); 639 } 640 } 641 642 /** 643 * Utility methods 644 */ 645 @Override showErrorDialog(String errorMessage)646 public void showErrorDialog(String errorMessage) { 647 MessageDialogFragment.newInstance(errorMessage) 648 .show(getSupportFragmentManager(), FRAGMENT_DIALOG); 649 } 650 651 @SuppressLint("SwitchIntDef") 652 @Override getErrorString(CameraAccessException e)653 public String getErrorString(CameraAccessException e) { 654 String errorMessage; 655 switch (e.getReason()) { 656 case CameraAccessException.CAMERA_DISABLED: 657 errorMessage = getString(R.string.camera_disabled); 658 break; 659 case CameraAccessException.CAMERA_DISCONNECTED: 660 errorMessage = getString(R.string.camera_disconnected); 661 break; 662 case CameraAccessException.CAMERA_ERROR: 663 errorMessage = getString(R.string.camera_error); 664 break; 665 default: 666 errorMessage = getString(R.string.camera_unknown, e.getReason()); 667 break; 668 } 669 return errorMessage; 670 } 671 672 } 673