1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 package android.hardware.camera2.params;
19 
20 import static com.android.internal.util.Preconditions.*;
21 
22 import android.annotation.FlaggedApi;
23 import android.annotation.IntDef;
24 import android.annotation.NonNull;
25 import android.annotation.Nullable;
26 import android.annotation.SuppressLint;
27 import android.annotation.SystemApi;
28 import android.annotation.TestApi;
29 import android.graphics.ColorSpace;
30 import android.graphics.ImageFormat;
31 import android.graphics.ImageFormat.Format;
32 import android.hardware.HardwareBuffer;
33 import android.hardware.HardwareBuffer.Usage;
34 import android.hardware.camera2.CameraCaptureSession;
35 import android.hardware.camera2.CameraCharacteristics;
36 import android.hardware.camera2.CameraDevice;
37 import android.hardware.camera2.CameraMetadata;
38 import android.hardware.camera2.MultiResolutionImageReader;
39 import android.hardware.camera2.params.DynamicRangeProfiles;
40 import android.hardware.camera2.params.MultiResolutionStreamInfo;
41 import android.hardware.camera2.utils.HashCodeHelpers;
42 import android.hardware.camera2.utils.SurfaceUtils;
43 import android.media.ImageReader;
44 import android.os.Parcel;
45 import android.os.Parcelable;
46 import android.util.Log;
47 import android.util.Size;
48 import android.view.Surface;
49 
50 import com.android.internal.camera.flags.Flags;
51 
52 import java.lang.annotation.Retention;
53 import java.lang.annotation.RetentionPolicy;
54 import java.util.ArrayList;
55 import java.util.Collection;
56 import java.util.Collections;
57 import java.util.List;
58 import java.util.Objects;
59 import java.util.concurrent.atomic.AtomicInteger;
60 
61 /**
62  * A class for describing camera output, which contains a {@link Surface} and its specific
63  * configuration for creating capture session.
64  *
65  * <p>There are several ways to instantiate, modify and use OutputConfigurations. The most common
66  * and recommended usage patterns are summarized in the following list:</p>
67  *<ul>
68  * <li>Passing a {@link Surface} to the constructor and using the OutputConfiguration instance as
69  * argument to {@link CameraDevice#createCaptureSessionByOutputConfigurations}. This is the most
70  * frequent usage and clients should consider it first before other more complicated alternatives.
71  * </li>
72  *
73  * <li>Passing only a surface source class as an argument to the constructor. This is usually
74  * followed by a call to create a capture session
75  * (see {@link CameraDevice#createCaptureSessionByOutputConfigurations} and a {@link Surface} add
76  * call {@link #addSurface} with a valid {@link Surface}. The sequence completes with
77  * {@link CameraCaptureSession#finalizeOutputConfigurations}. This is the deferred usage case which
78  * aims to enhance performance by allowing the resource-intensive capture session create call to
79  * execute in parallel with any {@link Surface} initialization, such as waiting for a
80  * {@link android.view.SurfaceView} to be ready as part of the UI initialization.</li>
81  *
82  * <li>The third and most complex usage pattern involves surface sharing. Once instantiated an
83  * OutputConfiguration can be enabled for surface sharing via {@link #enableSurfaceSharing}. This
84  * must be done before creating a new capture session and enables calls to
85  * {@link CameraCaptureSession#updateOutputConfiguration}. An OutputConfiguration with enabled
86  * surface sharing can be modified via {@link #addSurface} or {@link #removeSurface}. The updates
87  * to this OutputConfiguration will only come into effect after
88  * {@link CameraCaptureSession#updateOutputConfiguration} returns without throwing exceptions.
89  * Such updates can be done as long as the session is active. Clients should always consider the
90  * additional requirements and limitations placed on the output surfaces (for more details see
91  * {@link #enableSurfaceSharing}, {@link #addSurface}, {@link #removeSurface},
92  * {@link CameraCaptureSession#updateOutputConfiguration}). A trade-off exists between additional
93  * complexity and flexibility. If exercised correctly surface sharing can switch between different
94  * output surfaces without interrupting any ongoing repeating capture requests. This saves time and
95  * can significantly improve the user experience.</li>
96  *
97  * <li>Surface sharing can be used in combination with deferred surfaces. The rules from both cases
98  * are combined and clients must call {@link #enableSurfaceSharing} before creating a capture
99  * session. Attach and/or remove output surfaces via  {@link #addSurface}/{@link #removeSurface} and
100  * finalize the configuration using {@link CameraCaptureSession#finalizeOutputConfigurations}.
101  * {@link CameraCaptureSession#updateOutputConfiguration} can be called after the configuration
102  * finalize method returns without exceptions.</li>
103  *
104  * <li>If the camera device supports multi-resolution output streams, {@link
105  * CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP} will contain the
106  * formats and their corresponding stream info. The application can use an OutputConfiguration
107  * created with the multi-resolution stream info queried from {@link
108  * MultiResolutionStreamConfigurationMap#getOutputInfo} and
109  * {@link android.hardware.camera2.MultiResolutionImageReader} to capture variable size images.
110  *
111  * </ul>
112  *
113  * <p> As of {@link android.os.Build.VERSION_CODES#P Android P}, all formats except
114  * {@link ImageFormat#JPEG} and {@link ImageFormat#RAW_PRIVATE} can be used for sharing, subject to
115  * device support. On prior API levels, only {@link ImageFormat#PRIVATE} format may be used.</p>
116  *
117  * @see CameraDevice#createCaptureSessionByOutputConfigurations
118  * @see CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP
119  *
120  */
121 public final class OutputConfiguration implements Parcelable {
122 
123     /**
124      * Rotation constant: 0 degree rotation (no rotation)
125      *
126      * @hide
127      */
128     @SystemApi
129     public static final int ROTATION_0 = 0;
130 
131     /**
132      * Rotation constant: 90 degree counterclockwise rotation.
133      *
134      * @hide
135      */
136     @SystemApi
137     public static final int ROTATION_90 = 1;
138 
139     /**
140      * Rotation constant: 180 degree counterclockwise rotation.
141      *
142      * @hide
143      */
144     @SystemApi
145     public static final int ROTATION_180 = 2;
146 
147     /**
148      * Rotation constant: 270 degree counterclockwise rotation.
149      *
150      * @hide
151      */
152     @SystemApi
153     public static final int ROTATION_270 = 3;
154 
155     /**
156      * Invalid surface group ID.
157      *
158      *<p>An {@link OutputConfiguration} with this value indicates that the included surface
159      *doesn't belong to any surface group.</p>
160      */
161     public static final int SURFACE_GROUP_ID_NONE = -1;
162 
163     /**
164      * Default timestamp base.
165      *
166      * <p>The camera device decides the timestamp based on the properties of the
167      * output surface.</p>
168      *
169      * <li> For a SurfaceView output surface, the timestamp base is {@link
170      * #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED}. The timestamp is overridden with choreographer
171      * pulses from the display subsystem for smoother display of camera frames when the camera
172      * device runs in fixed frame rate. The timestamp is roughly in the same time base as
173      * {@link android.os.SystemClock#uptimeMillis}.</li>
174      * <li> For an output surface of MediaRecorder, MediaCodec, or ImageReader with {@link
175      * android.hardware.HardwareBuffer#USAGE_VIDEO_ENCODE} usage flag, the timestamp base is
176      * {@link #TIMESTAMP_BASE_MONOTONIC}, which is roughly the same time base as
177      * {@link android.os.SystemClock#uptimeMillis}.</li>
178      * <li> For all other cases, the timestamp base is {@link #TIMESTAMP_BASE_SENSOR}, the same
179      * as what's specified by {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.
180      * <ul><li> For a SurfaceTexture output surface, the camera system re-spaces the delivery
181      * of output frames based on image readout intervals, reducing viewfinder jitter. The timestamps
182      * of images remain to be {@link #TIMESTAMP_BASE_SENSOR}.</li></ul></li>
183      *
184      * <p>Note that the reduction of frame jitter for SurfaceView and SurfaceTexture comes with
185      * slight increase in photon-to-photon latency, which is the time from when photons hit the
186      * scene to when the corresponding pixels show up on the screen. If the photon-to-photon latency
187      * is more important than the smoothness of viewfinder, {@link #TIMESTAMP_BASE_SENSOR} should be
188      * used instead.</p>
189      *
190      * @see #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED
191      * @see #TIMESTAMP_BASE_MONOTONIC
192      * @see #TIMESTAMP_BASE_SENSOR
193      */
194     public static final int TIMESTAMP_BASE_DEFAULT = 0;
195 
196     /**
197      * Timestamp base of {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.
198      *
199      * <p>The timestamps of the output images are in the time base as specified by {@link
200      * CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}. The application can look up the
201      * corresponding result metadata by matching the timestamp with a {@link
202      * CameraCaptureSession.CaptureCallback#onCaptureStarted}, or with a {@link
203      * CameraCaptureSession.CaptureCallback#onReadoutStarted} if readout timestamp is used.</p>
204      */
205     public static final int TIMESTAMP_BASE_SENSOR = 1;
206 
207     /**
208      * Timestamp base roughly the same as {@link android.os.SystemClock#uptimeMillis}.
209      *
210      * <p>The timestamps of the output images are monotonically increasing, and are roughly in the
211      * same time base as {@link android.os.SystemClock#uptimeMillis}. The timestamps with this
212      * time base can be directly used for audio-video sync in video recording.</p>
213      *
214      * <p>If the camera device's {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE} is
215      * REALTIME, timestamps with this time base cannot directly match the timestamps in
216      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
217      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
218      * {@link android.hardware.camera2.CaptureResult}.</p>
219      */
220     public static final int TIMESTAMP_BASE_MONOTONIC = 2;
221 
222     /**
223      * Timestamp base roughly the same as {@link android.os.SystemClock#elapsedRealtime}.
224      *
225      * <p>The timestamps of the output images are roughly in the
226      * same time base as {@link android.os.SystemClock#elapsedRealtime}. The timestamps with this
227      * time base cannot be directly used for audio-video sync in video recording.</p>
228      *
229      * <p>If the camera device's {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE} is
230      * UNKNOWN, timestamps with this time base cannot directly match the timestamps in
231      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
232      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
233      * {@link android.hardware.camera2.CaptureResult}.</p>
234      *
235      * <p>If using a REALTIME timestamp base on a device that supports only
236      * TIMESTAMP_SOURCE_UNKNOWN, the accuracy of timestamps is only what is guaranteed in the
237      * documentation for UNKNOWN. In particular, they have no guarantees about being accurate
238      * enough to use in fusing image data with the output of inertial sensors, for features such as
239      * image stabilization or augmented reality.</p>
240      */
241     public static final int TIMESTAMP_BASE_REALTIME = 3;
242 
243     /**
244      * Timestamp is synchronized to choreographer.
245      *
246      * <p>The timestamp of the output images are overridden with choreographer pulses from the
247      * display subsystem for smoother display of camera frames. An output target of SurfaceView
248      * uses this time base by default. Note that the timestamp override is done for fixed camera
249      * frame rate only.</p>
250      *
251      * <p>This timestamp base isn't applicable to SurfaceTexture targets. SurfaceTexture's
252      * {@link android.graphics.SurfaceTexture#updateTexImage updateTexImage} function always
253      * uses the latest image from the camera stream. In the case of a TextureView, the image is
254      * displayed right away.</p>
255      *
256      * <p>Timestamps with this time base cannot directly match the timestamps in
257      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
258      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
259      * {@link android.hardware.camera2.CaptureResult}. This timestamp base shouldn't be used if the
260      * timestamp needs to be used for audio-video synchronization.</p>
261      */
262     public static final int TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4;
263 
264     /**
265      * Timestamp is the start of readout in the same time domain as TIMESTAMP_BASE_SENSOR.
266      *
267      * <p>NOTE: do not use! Use setReadoutTimestampEnabled instead.</p>
268      *
269      * @hide
270      */
271     public static final int TIMESTAMP_BASE_READOUT_SENSOR = 5;
272 
273     /** @hide */
274     @Retention(RetentionPolicy.SOURCE)
275     @IntDef(prefix = {"TIMESTAMP_BASE_"}, value =
276         {TIMESTAMP_BASE_DEFAULT,
277          TIMESTAMP_BASE_SENSOR,
278          TIMESTAMP_BASE_MONOTONIC,
279          TIMESTAMP_BASE_REALTIME,
280          TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED,
281          TIMESTAMP_BASE_READOUT_SENSOR})
282     public @interface TimestampBase {};
283 
284     /** @hide */
285      @Retention(RetentionPolicy.SOURCE)
286      @IntDef(prefix = {"SENSOR_PIXEL_MODE_"}, value =
287          {CameraMetadata.SENSOR_PIXEL_MODE_DEFAULT,
288           CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION})
289      public @interface SensorPixelMode {};
290 
291     /** @hide */
292     @Retention(RetentionPolicy.SOURCE)
293     @IntDef(prefix = {"STREAM_USE_CASE_"}, value =
294         {CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
295          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
296          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
297          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
298          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
299          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL,
300          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW})
301     public @interface StreamUseCase {};
302 
303     /**
304      * Automatic mirroring based on camera facing
305      *
306      * <p>This is the default mirroring mode for the camera device. With this mode,
307      * the camera output is mirrored horizontally for front-facing cameras. There is
308      * no mirroring for rear-facing and external cameras.</p>
309      */
310     public static final int MIRROR_MODE_AUTO = 0;
311 
312     /**
313      * No mirror transform is applied
314      *
315      * <p>No mirroring is applied to the camera output regardless of the camera facing.</p>
316      */
317     public static final int MIRROR_MODE_NONE = 1;
318 
319     /**
320      * Camera output is mirrored horizontally
321      *
322      * <p>The camera output is mirrored horizontally, the same behavior as in AUTO mode for
323      * front facing camera.</p>
324      */
325     public static final int MIRROR_MODE_H = 2;
326 
327     /**
328      * Camera output is mirrored vertically
329      */
330     public static final int MIRROR_MODE_V = 3;
331 
332     /** @hide */
333     @Retention(RetentionPolicy.SOURCE)
334     @IntDef(prefix = {"MIRROR_MODE_"}, value =
335         {MIRROR_MODE_AUTO,
336           MIRROR_MODE_NONE,
337           MIRROR_MODE_H,
338           MIRROR_MODE_V})
339     public @interface MirrorMode {};
340 
341     /**
342      * Create a new {@link OutputConfiguration} instance with a {@link Surface}.
343      *
344      * @param surface
345      *          A Surface for camera to output to.
346      *
347      * <p>This constructor creates a default configuration, with a surface group ID of
348      * {@value #SURFACE_GROUP_ID_NONE}.</p>
349      *
350      */
OutputConfiguration(@onNull Surface surface)351     public OutputConfiguration(@NonNull Surface surface) {
352         this(SURFACE_GROUP_ID_NONE, surface, ROTATION_0);
353     }
354 
355     /**
356      * Unknown surface source type.
357      */
358     private final int SURFACE_TYPE_UNKNOWN = -1;
359 
360     /**
361      * The surface is obtained from {@link android.view.SurfaceView}.
362      */
363     private final int SURFACE_TYPE_SURFACE_VIEW = 0;
364 
365     /**
366      * The surface is obtained from {@link android.graphics.SurfaceTexture}.
367      */
368     private final int SURFACE_TYPE_SURFACE_TEXTURE = 1;
369 
370     /**
371      * The surface is obtained from {@link android.media.MediaRecorder}.
372      */
373     private static final int SURFACE_TYPE_MEDIA_RECORDER = 2;
374 
375     /**
376      * The surface is obtained from {@link android.media.MediaCodec}.
377      */
378     private static final int SURFACE_TYPE_MEDIA_CODEC = 3;
379 
380     /**
381      * The surface is obtained from {@link android.media.ImageReader}.
382      */
383     private static final int SURFACE_TYPE_IMAGE_READER = 4;
384 
385     /**
386      * Maximum number of surfaces supported by one {@link OutputConfiguration}.
387      *
388      * <p>The combined number of surfaces added by the constructor and
389      * {@link OutputConfiguration#addSurface} should not exceed this value.</p>
390      *
391      */
392     private static final int MAX_SURFACES_COUNT = 4;
393 
394     /**
395      * Create a new {@link OutputConfiguration} instance with a {@link Surface},
396      * with a surface group ID.
397      *
398      * <p>
399      * A surface group ID is used to identify which surface group this output surface belongs to. A
400      * surface group is a group of output surfaces that are not intended to receive camera output
401      * buffer streams simultaneously. The {@link CameraDevice} may be able to share the buffers used
402      * by all the surfaces from the same surface group, therefore may reduce the overall memory
403      * footprint. The application should only set the same set ID for the streams that are not
404      * simultaneously streaming. A negative ID indicates that this surface doesn't belong to any
405      * surface group. The default value is {@value #SURFACE_GROUP_ID_NONE}.</p>
406      *
407      * <p>For example, a video chat application that has an adaptive output resolution feature would
408      * need two (or more) output resolutions, to switch resolutions without any output glitches.
409      * However, at any given time, only one output is active to minimize outgoing network bandwidth
410      * and encoding overhead.  To save memory, the application should set the video outputs to have
411      * the same non-negative group ID, so that the camera device can share the same memory region
412      * for the alternating outputs.</p>
413      *
414      * <p>It is not an error to include output streams with the same group ID in the same capture
415      * request, but the resulting memory consumption may be higher than if the two streams were
416      * not in the same surface group to begin with, especially if the outputs have substantially
417      * different dimensions.</p>
418      *
419      * @param surfaceGroupId
420      *          A group ID for this output, used for sharing memory between multiple outputs.
421      * @param surface
422      *          A Surface for camera to output to.
423      *
424      */
OutputConfiguration(int surfaceGroupId, @NonNull Surface surface)425     public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface) {
426         this(surfaceGroupId, surface, ROTATION_0);
427     }
428 
429     /**
430      * Set the multi-resolution output flag.
431      *
432      * <p>Specify that this OutputConfiguration is part of a multi-resolution output stream group
433      * used by {@link android.hardware.camera2.MultiResolutionImageReader}.</p>
434      *
435      * <p>This function must only be called for an OutputConfiguration with a non-negative
436      * group ID. And all OutputConfigurations of a MultiResolutionImageReader will have the same
437      * group ID and have this flag set.</p>
438      *
439      * @throws IllegalStateException If surface sharing is enabled via {@link #enableSurfaceSharing}
440      *         call, or no non-negative group ID has been set.
441      * @hide
442      */
setMultiResolutionOutput()443     public void setMultiResolutionOutput() {
444         if (mIsShared) {
445             throw new IllegalStateException("Multi-resolution output flag must not be set for " +
446                     "configuration with surface sharing");
447         }
448         if (mSurfaceGroupId == SURFACE_GROUP_ID_NONE) {
449             throw new IllegalStateException("Multi-resolution output flag should only be set for " +
450                     "surface with non-negative group ID");
451         }
452 
453         mIsMultiResolution = true;
454     }
455 
456     /**
457      * Set a specific device supported dynamic range profile.
458      *
459      * <p>Clients can choose from any profile advertised as supported in
460      * CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES
461      * queried using {@link DynamicRangeProfiles#getSupportedProfiles()}.
462      * If this is not explicitly set, then the default profile will be
463      * {@link DynamicRangeProfiles#STANDARD}.</p>
464      *
465      * <p>Do note that invalid combinations between the registered output
466      * surface pixel format and the configured dynamic range profile will
467      * cause capture session initialization failure. Invalid combinations
468      * include any 10-bit dynamic range profile advertised in
469      * {@link DynamicRangeProfiles#getSupportedProfiles()} combined with
470      * an output Surface pixel format different from {@link ImageFormat#PRIVATE}
471      * (the default for Surfaces initialized by {@link android.view.SurfaceView},
472      * {@link android.view.TextureView}, {@link android.media.MediaRecorder},
473      * {@link android.media.MediaCodec} etc.)
474      * or {@link ImageFormat#YCBCR_P010}.</p>
475      */
setDynamicRangeProfile(@ynamicRangeProfiles.Profile long profile)476     public void setDynamicRangeProfile(@DynamicRangeProfiles.Profile long profile) {
477         mDynamicRangeProfile = profile;
478     }
479 
480     /**
481      * Return current dynamic range profile.
482      *
483      * @return the currently set dynamic range profile
484      */
getDynamicRangeProfile()485     public @DynamicRangeProfiles.Profile long getDynamicRangeProfile() {
486         return mDynamicRangeProfile;
487     }
488 
489     /**
490      * Set a specific device-supported color space.
491      *
492      * <p>Clients can choose from any profile advertised as supported in
493      * {@link CameraCharacteristics#REQUEST_AVAILABLE_COLOR_SPACE_PROFILES}
494      * queried using {@link ColorSpaceProfiles#getSupportedColorSpaces}.
495      * When set, the colorSpace will override the default color spaces of the output targets,
496      * or the color space implied by the dataSpace passed into an {@link ImageReader}'s
497      * constructor.</p>
498      *
499      * @hide
500      */
501     @TestApi
setColorSpace(@onNull ColorSpace.Named colorSpace)502     public void setColorSpace(@NonNull ColorSpace.Named colorSpace) {
503         mColorSpace = colorSpace.ordinal();
504     }
505 
506     /**
507      * Clear the color space, such that the default color space will be used.
508      *
509      * @hide
510      */
511     @TestApi
clearColorSpace()512     public void clearColorSpace() {
513         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
514     }
515 
516     /**
517      * Return the current color space.
518      *
519      * @return the currently set color space
520      * @hide
521      */
522     @TestApi
523     @SuppressLint("MethodNameUnits")
getColorSpace()524     public @Nullable ColorSpace getColorSpace() {
525         if (mColorSpace != ColorSpaceProfiles.UNSPECIFIED) {
526             return ColorSpace.get(ColorSpace.Named.values()[mColorSpace]);
527         } else {
528             return null;
529         }
530     }
531 
532     /**
533      * Create a new {@link OutputConfiguration} instance.
534      *
535      * <p>This constructor takes an argument for desired camera rotation</p>
536      *
537      * @param surface
538      *          A Surface for camera to output to.
539      * @param rotation
540      *          The desired rotation to be applied on camera output. Value must be one of
541      *          ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
542      *          application should make sure corresponding surface size has width and height
543      *          transposed relative to the width and height without rotation. For example,
544      *          if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
545      *          application should set rotation to {@code ROTATION_90} and make sure the
546      *          corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
547      *          throw {@code IllegalArgumentException} if device cannot perform such rotation.
548      * @hide
549      */
550     @SystemApi
OutputConfiguration(@onNull Surface surface, int rotation)551     public OutputConfiguration(@NonNull Surface surface, int rotation) {
552         this(SURFACE_GROUP_ID_NONE, surface, rotation);
553     }
554 
555     /**
556      * Create a new {@link OutputConfiguration} instance, with rotation and a group ID.
557      *
558      * <p>This constructor takes an argument for desired camera rotation and for the surface group
559      * ID.  See {@link #OutputConfiguration(int, Surface)} for details of the group ID.</p>
560      *
561      * @param surfaceGroupId
562      *          A group ID for this output, used for sharing memory between multiple outputs.
563      * @param surface
564      *          A Surface for camera to output to.
565      * @param rotation
566      *          The desired rotation to be applied on camera output. Value must be one of
567      *          ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
568      *          application should make sure corresponding surface size has width and height
569      *          transposed relative to the width and height without rotation. For example,
570      *          if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
571      *          application should set rotation to {@code ROTATION_90} and make sure the
572      *          corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
573      *          throw {@code IllegalArgumentException} if device cannot perform such rotation.
574      * @hide
575      */
576     @SystemApi
OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation)577     public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation) {
578         checkNotNull(surface, "Surface must not be null");
579         checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
580         mSurfaceGroupId = surfaceGroupId;
581         mSurfaceType = SURFACE_TYPE_UNKNOWN;
582         mSurfaces = new ArrayList<Surface>();
583         mSurfaces.add(surface);
584         mRotation = rotation;
585         mConfiguredSize = SurfaceUtils.getSurfaceSize(surface);
586         mConfiguredFormat = SurfaceUtils.getSurfaceFormat(surface);
587         mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(surface);
588         mConfiguredGenerationId = surface.getGenerationId();
589         mIsDeferredConfig = false;
590         mIsShared = false;
591         mPhysicalCameraId = null;
592         mIsMultiResolution = false;
593         mSensorPixelModesUsed = new ArrayList<Integer>();
594         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
595         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
596         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
597         mTimestampBase = TIMESTAMP_BASE_DEFAULT;
598         mMirrorMode = MIRROR_MODE_AUTO;
599         mReadoutTimestampEnabled = false;
600         mIsReadoutSensorTimestampBase = false;
601         mUsage = 0;
602     }
603 
604     /**
605      * Create a list of {@link OutputConfiguration} instances for the outputs used by a
606      * {@link android.hardware.camera2.MultiResolutionImageReader}.
607      *
608      * <p>This constructor takes an argument for a
609      * {@link android.hardware.camera2.MultiResolutionImageReader}.</p>
610      *
611      * @param multiResolutionImageReader
612      *          The multi-resolution image reader object.
613      */
createInstancesForMultiResolutionOutput( @onNull MultiResolutionImageReader multiResolutionImageReader)614     public static @NonNull Collection<OutputConfiguration> createInstancesForMultiResolutionOutput(
615             @NonNull MultiResolutionImageReader multiResolutionImageReader)  {
616         checkNotNull(multiResolutionImageReader, "Multi-resolution image reader must not be null");
617 
618         int groupId = getAndIncreaseMultiResolutionGroupId();
619         ImageReader[] imageReaders = multiResolutionImageReader.getReaders();
620         ArrayList<OutputConfiguration> configs = new ArrayList<OutputConfiguration>();
621         for (int i = 0; i < imageReaders.length; i++) {
622             MultiResolutionStreamInfo streamInfo =
623                     multiResolutionImageReader.getStreamInfoForImageReader(imageReaders[i]);
624 
625             OutputConfiguration config = new OutputConfiguration(
626                     groupId, imageReaders[i].getSurface());
627             config.setPhysicalCameraId(streamInfo.getPhysicalCameraId());
628             config.setMultiResolutionOutput();
629             configs.add(config);
630 
631             // No need to call addSensorPixelModeUsed for ultra high resolution sensor camera,
632             // because regular and max resolution output configurations are used for DEFAULT mode
633             // and MAX_RESOLUTION mode respectively by default.
634         }
635 
636         return configs;
637     }
638 
639     /**
640      * Create a list of {@link OutputConfiguration} instances for a
641      * {@link MultiResolutionImageReader}.
642      *
643      * <p>This method can be used to create query OutputConfigurations for a
644      * MultiResolutionImageReader that can be included in a SessionConfiguration passed into
645      * {@link
646      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
647      * before opening and setting up a camera device in full, at which point {@link
648      * #setSurfacesForMultiResolutionOutput} can be used to link to the actual
649      * MultiResolutionImageReader.</p>
650      *
651      * <p>This constructor takes same arguments used to create a {@link
652      * MultiResolutionImageReader}: a collection of {@link MultiResolutionStreamInfo}
653      * objects and the format.</p>
654      *
655      * @param streams The group of multi-resolution stream info objects, which are used to create a
656      *                multi-resolution image reader containing a number of ImageReaders.
657      * @param format The format of the MultiResolutionImageReader. This must be one of the {@link
658      *               android.graphics.ImageFormat} or {@link android.graphics.PixelFormat} constants
659      *               supported by the camera device. Note that not all formats are supported, like
660      *               {@link ImageFormat#NV21}. The supported multi-resolution reader format can be
661      *               queried by {@link MultiResolutionStreamConfigurationMap#getOutputFormats}.
662      *
663      * @return The list of {@link OutputConfiguration} objects for a MultiResolutionImageReader.
664      *
665      * @throws IllegalArgumentException If the {@code streams} is null or doesn't contain
666      *                                 at least 2 items, or if {@code format} isn't a valid camera
667      *                                 format.
668      *
669      * @see MultiResolutionImageReader
670      * @see MultiResolutionStreamInfo
671      */
672     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
createInstancesForMultiResolutionOutput( @onNull Collection<MultiResolutionStreamInfo> streams, @Format int format)673     public static @NonNull List<OutputConfiguration> createInstancesForMultiResolutionOutput(
674             @NonNull Collection<MultiResolutionStreamInfo> streams,
675             @Format int format)  {
676         if (streams == null || streams.size() <= 1) {
677             throw new IllegalArgumentException(
678                 "The streams list must contain at least 2 entries");
679         }
680         if (format == ImageFormat.NV21) {
681             throw new IllegalArgumentException(
682                     "NV21 format is not supported");
683         }
684 
685         int groupId = getAndIncreaseMultiResolutionGroupId();
686         ArrayList<OutputConfiguration> configs = new ArrayList<OutputConfiguration>();
687         for (MultiResolutionStreamInfo stream : streams) {
688             Size surfaceSize = new Size(stream.getWidth(), stream.getHeight());
689             OutputConfiguration config = new OutputConfiguration(
690                     groupId, format, surfaceSize);
691             config.setPhysicalCameraId(stream.getPhysicalCameraId());
692             config.setMultiResolutionOutput();
693             configs.add(config);
694 
695             // No need to call addSensorPixelModeUsed for ultra high resolution sensor camera,
696             // because regular and max resolution output configurations are used for DEFAULT mode
697             // and MAX_RESOLUTION mode respectively by default.
698         }
699 
700         return configs;
701     }
702 
703     /**
704      * Set the OutputConfiguration surfaces corresponding to the {@link MultiResolutionImageReader}.
705      *
706      * <p>This function should be used together with {@link
707      * #createInstancesForMultiResolutionOutput}. The application calls {@link
708      * #createInstancesForMultiResolutionOutput} first to create a list of
709      * OutputConfiguration objects without the actual MultiResolutionImageReader.
710      * Once the MultiResolutionImageReader is created later during full camera setup, the
711      * application then calls this function to assign the surfaces to the OutputConfiguration
712      * instances.</p>
713      *
714      * @param outputConfigurations The OutputConfiguration objects created by {@link
715      *                             #createInstancesForMultiResolutionOutput}
716      * @param multiResolutionImageReader The MultiResolutionImageReader object created from the same
717      *                                   MultiResolutionStreamInfo parameters as
718      *                                   {@code outputConfigurations}.
719      * @throws IllegalArgumentException If {@code outputConfigurations} or {@code
720      *                                  multiResolutionImageReader} is {@code null}, the {@code
721      *                                  outputConfigurations} and {@code multiResolutionImageReader}
722      *                                  sizes don't match, or if the
723      *                                  {@code multiResolutionImageReader}'s surfaces don't match
724      *                                  with the {@code outputConfigurations}.
725      * @throws IllegalStateException If {@code outputConfigurations} already contains valid output
726      *                               surfaces.
727      */
728     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
setSurfacesForMultiResolutionOutput( @onNull Collection<OutputConfiguration> outputConfigurations, @NonNull MultiResolutionImageReader multiResolutionImageReader)729     public static void setSurfacesForMultiResolutionOutput(
730             @NonNull Collection<OutputConfiguration> outputConfigurations,
731             @NonNull MultiResolutionImageReader multiResolutionImageReader) {
732         checkNotNull(outputConfigurations, "outputConfigurations must not be null");
733         checkNotNull(multiResolutionImageReader, "multiResolutionImageReader must not be null");
734         if (outputConfigurations.size() != multiResolutionImageReader.getReaders().length) {
735             throw new IllegalArgumentException(
736                     "outputConfigurations and multiResolutionImageReader sizes must match");
737         }
738 
739         for (OutputConfiguration config : outputConfigurations) {
740             String physicalCameraId = config.getPhysicalCameraId();
741             if (physicalCameraId == null) {
742                 physicalCameraId = "";
743             }
744             Surface surface = multiResolutionImageReader.getSurface(config.getConfiguredSize(),
745                     physicalCameraId);
746             config.addSurface(surface);
747         }
748     }
749 
750     /**
751      * Create a new {@link OutputConfiguration} instance, with desired Surface size and Surface
752      * source class.
753      * <p>
754      * This constructor takes an argument for desired Surface size and the Surface source class
755      * without providing the actual output Surface. This is used to setup an output configuration
756      * with a deferred Surface. The application can use this output configuration to create a
757      * session.
758      * </p>
759      *
760      * <p>Starting from {@link android.os.Build.VERSION_CODES#VANILLA_ICE_CREAM Android V},
761      * the deferred Surface can be obtained: (1) from {@link android.view.SurfaceView}
762      * by calling {@link android.view.SurfaceHolder#getSurface}, (2) from
763      * {@link android.graphics.SurfaceTexture} via
764      * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}, (3) from
765      * {@link android.media.MediaRecorder} via {@link android.media.MediaRecorder#getSurface} or
766      * {@link android.media.MediaCodec#createPersistentInputSurface}, or (4) from
767      * {@link android.media.MediaCodec} via {@link android.media.MediaCodec#createInputSurface} or
768      * {@link android.media.MediaCodec#createPersistentInputSurface}.</p>
769      *
770      * <ul>
771      * <li>Surfaces for {@link android.view.SurfaceView} and {@link android.graphics.SurfaceTexture}
772      * can be deferred until after {@link CameraDevice#createCaptureSession}. In that case, the
773      * output Surface must be set via {@link #addSurface}, and the Surface configuration must be
774      * finalized via {@link CameraCaptureSession#finalizeOutputConfigurations} before submitting
775      * a request with the Surface target.</li>
776      * <li>For all other target types, the output Surface must be set by {@link #addSurface},
777      * and {@link CameraCaptureSession#finalizeOutputConfigurations} is not needed because the
778      * OutputConfiguration used to create the session will contain the actual Surface.</li>
779      * </ul>
780      *
781      * <p>Before {@link android.os.Build.VERSION_CODES#VANILLA_ICE_CREAM Android V}, only {@link
782      * android.view.SurfaceView} and {@link android.graphics.SurfaceTexture} are supported. Both
783      * kind of outputs can be deferred until after {@link
784      * CameraDevice#createCaptureSessionByOutputConfigurations}.</p>
785      *
786      * <p>An OutputConfiguration object created by this constructor can be used for {@link
787      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
788      * and {@link
789      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
790      * having called {@link #addSurface}.</p>
791      *
792      * @param surfaceSize Size for the deferred surface.
793      * @param klass a non-{@code null} {@link Class} object reference that indicates the source of
794      *            this surface. Only {@link android.view.SurfaceHolder SurfaceHolder.class},
795      *            {@link android.graphics.SurfaceTexture SurfaceTexture.class}, {@link
796      *            android.media.MediaRecorder MediaRecorder.class}, and
797      *            {@link android.media.MediaCodec MediaCodec.class} are supported.
798      *            Before {@link android.os.Build.VERSION_CODES#VANILLA_ICE_CREAM Android V}, only
799      *            {@link android.view.SurfaceHolder SurfaceHolder.class} and {@link
800      *            android.graphics.SurfaceTexture SurfaceTexture.class} are supported.
801      * @throws IllegalArgumentException if the Surface source class is not supported, or Surface
802      *         size is zero.
803      */
OutputConfiguration(@onNull Size surfaceSize, @NonNull Class<T> klass)804     public <T> OutputConfiguration(@NonNull Size surfaceSize, @NonNull Class<T> klass) {
805         checkNotNull(surfaceSize, "surfaceSize must not be null");
806         checkNotNull(klass, "klass must not be null");
807         if (klass == android.view.SurfaceHolder.class) {
808             mSurfaceType = SURFACE_TYPE_SURFACE_VIEW;
809             mIsDeferredConfig = true;
810         } else if (klass == android.graphics.SurfaceTexture.class) {
811             mSurfaceType = SURFACE_TYPE_SURFACE_TEXTURE;
812             mIsDeferredConfig = true;
813         } else if (klass == android.media.MediaRecorder.class) {
814             mSurfaceType = SURFACE_TYPE_MEDIA_RECORDER;
815             mIsDeferredConfig = false;
816         } else if (klass == android.media.MediaCodec.class) {
817             mSurfaceType = SURFACE_TYPE_MEDIA_CODEC;
818             mIsDeferredConfig = false;
819         } else {
820             mSurfaceType = SURFACE_TYPE_UNKNOWN;
821             throw new IllegalArgumentException("Unknown surface source class type");
822         }
823 
824         if (surfaceSize.getWidth() == 0 || surfaceSize.getHeight() == 0) {
825             throw new IllegalArgumentException("Surface size needs to be non-zero");
826         }
827 
828         mSurfaceGroupId = SURFACE_GROUP_ID_NONE;
829         mSurfaces = new ArrayList<Surface>();
830         mRotation = ROTATION_0;
831         mConfiguredSize = surfaceSize;
832         mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
833         mConfiguredDataspace = StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
834         mConfiguredGenerationId = 0;
835         mIsShared = false;
836         mPhysicalCameraId = null;
837         mIsMultiResolution = false;
838         mSensorPixelModesUsed = new ArrayList<Integer>();
839         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
840         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
841         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
842         mReadoutTimestampEnabled = false;
843         mIsReadoutSensorTimestampBase = false;
844         mUsage = 0;
845     }
846 
847     /**
848      * Create a new {@link OutputConfiguration} instance for an {@link ImageReader} for a given
849      * format and size.
850      *
851      * <p>This constructor creates an OutputConfiguration for an ImageReader without providing
852      * the actual output Surface. The actual output Surface must be set via {@link #addSurface}
853      * before creating the capture session.</p>
854      *
855      * <p>An OutputConfiguration object created by this constructor can be used for {@link
856      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
857      * and {@link
858      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
859      * having called {@link #addSurface}.</p>
860      *
861      * @param format The format of the ImageReader output. This must be one of the
862      *               {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
863      *               constants. Note that not all formats are supported by the camera device.
864      * @param surfaceSize Size for the ImageReader surface.
865      * @throws IllegalArgumentException if the Surface size is null or zero.
866      */
867     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
OutputConfiguration(@ormat int format, @NonNull Size surfaceSize)868     public OutputConfiguration(@Format int format, @NonNull Size surfaceSize) {
869         this(format, surfaceSize,
870                 format == ImageFormat.PRIVATE ? 0 : HardwareBuffer.USAGE_CPU_READ_OFTEN);
871     }
872 
873     /**
874      * Create a new {@link OutputConfiguration} instance for an {@link ImageReader} for a given
875      * surfaceGroupId, format, and size.
876      *
877      * <p>This constructor creates an OutputConfiguration for an ImageReader without providing
878      * the actual output Surface. The actual output Surface must be set via {@link #addSurface}
879      * before creating the capture session.</p>
880      *
881      * <p>An OutputConfiguration object created by this constructor can be used for {@link
882      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
883      * and {@link
884      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
885      * having called {@link #addSurface}.</p>
886      *
887      * @param surfaceGroupId A group ID for this output, used for sharing memory between multiple
888      *                       outputs.
889      * @param format The format of the ImageReader output. This must be one of the
890      *               {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
891      *               constants. Note that not all formats are supported by the camera device.
892      * @param surfaceSize Size for the ImageReader surface.
893      * @throws IllegalArgumentException if the Surface size is null or zero.
894      */
895     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
OutputConfiguration(int surfaceGroupId, @Format int format, @NonNull Size surfaceSize)896     public OutputConfiguration(int surfaceGroupId, @Format int format, @NonNull Size surfaceSize) {
897         this(surfaceGroupId, format, surfaceSize,
898                 format == ImageFormat.PRIVATE ? 0 : HardwareBuffer.USAGE_CPU_READ_OFTEN);
899     }
900 
901     /**
902      * Create a new {@link OutputConfiguration} instance for an {@link ImageReader} for a given
903      * format, size, and usage flags.
904      *
905      * <p>This constructor creates an OutputConfiguration for an ImageReader without providing
906      * the actual output Surface. The actual output Surface must be set via {@link #addSurface}
907      * before creating the capture session.</p>
908      *
909      * <p>An OutputConfiguration object created by this constructor can be used for {@link
910      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
911      * and {@link
912      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
913      * having called {@link #addSurface}.</p>
914      *
915      * @param format The format of the ImageReader output. This must be one of the
916      *               {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
917      *               constants. Note that not all formats are supported by the camera device.
918      * @param surfaceSize Size for the ImageReader surface.
919      * @param usage The usage flags of the ImageReader output surface.
920      * @throws IllegalArgumentException if the Surface size is null or zero.
921      */
922     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
OutputConfiguration(@ormat int format, @NonNull Size surfaceSize, @Usage long usage)923     public OutputConfiguration(@Format int format, @NonNull Size surfaceSize, @Usage long usage) {
924         this(SURFACE_GROUP_ID_NONE, format, surfaceSize, usage);
925     }
926 
927     /**
928      * Create a new {@link OutputConfiguration} instance for an {@link ImageReader} for a given
929      * surface group id, format, size, and usage flags.
930      *
931      * <p>This constructor creates an OutputConfiguration for an ImageReader without providing
932      * the actual output Surface. The actual output Surface must be set via {@link #addSurface}
933      * before creating the capture session.</p>
934      *
935      * <p>An OutputConfiguration object created by this constructor can be used for {@link
936      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
937      * and {@link
938      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
939      * having called {@link #addSurface}.</p>
940      *
941      * @param surfaceGroupId A group ID for this output, used for sharing memory between multiple
942      *                       outputs.
943      * @param format The format of the ImageReader output. This must be one of the
944      *               {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
945      *               constants. Note that not all formats are supported by the camera device.
946      * @param surfaceSize Size for the ImageReader surface.
947      * @param usage The usage flags of the ImageReader output surface.
948      * @throws IllegalArgumentException if the Surface size is null or zero.
949      */
950     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
OutputConfiguration(int surfaceGroupId, @Format int format, @NonNull Size surfaceSize, @Usage long usage)951     public OutputConfiguration(int surfaceGroupId, @Format int format,
952             @NonNull Size surfaceSize, @Usage long usage) {
953         checkNotNull(surfaceSize, "surfaceSize must not be null");
954         if (surfaceSize.getWidth() == 0 || surfaceSize.getHeight() == 0) {
955             throw new IllegalArgumentException("Surface size needs to be non-zero");
956         }
957 
958         mSurfaceType = SURFACE_TYPE_IMAGE_READER;
959         mSurfaceGroupId = surfaceGroupId;
960         mSurfaces = new ArrayList<Surface>();
961         mRotation = ROTATION_0;
962         mConfiguredSize = surfaceSize;
963         mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(format);
964         mConfiguredDataspace = StreamConfigurationMap.imageFormatToDataspace(format);
965         mConfiguredGenerationId = 0;
966         mIsDeferredConfig = false;
967         mIsShared = false;
968         mPhysicalCameraId = null;
969         mIsMultiResolution = false;
970         mSensorPixelModesUsed = new ArrayList<Integer>();
971         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
972         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
973         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
974         mReadoutTimestampEnabled = false;
975         mIsReadoutSensorTimestampBase = false;
976         mUsage = usage;
977     }
978 
979     /**
980      * Enable multiple surfaces sharing the same OutputConfiguration
981      *
982      * <p>For advanced use cases, a camera application may require more streams than the combination
983      * guaranteed by {@link CameraDevice#createCaptureSession}. In this case, more than one
984      * compatible surface can be attached to an OutputConfiguration so that they map to one
985      * camera stream, and the outputs share memory buffers when possible. Due to buffer sharing
986      * clients should be careful when adding surface outputs that modify their input data. If such
987      * case exists, camera clients should have an additional mechanism to synchronize read and write
988      * access between individual consumers.</p>
989      *
990      * <p>Two surfaces are compatible in the below cases:</p>
991      *
992      * <li> Surfaces with the same size, format, dataSpace, and Surface source class. In this case,
993      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} is guaranteed to succeed.
994      *
995      * <li> Surfaces with the same size, format, and dataSpace, but different Surface source classes
996      * that are generally not compatible. However, on some devices, the underlying camera device is
997      * able to use the same buffer layout for both surfaces. The only way to discover if this is the
998      * case is to create a capture session with that output configuration. For example, if the
999      * camera device uses the same private buffer format between a SurfaceView/SurfaceTexture and a
1000      * MediaRecorder/MediaCodec, {@link CameraDevice#createCaptureSessionByOutputConfigurations}
1001      * will succeed. Otherwise, it fails with {@link
1002      * CameraCaptureSession.StateCallback#onConfigureFailed}.
1003      * </ol>
1004      *
1005      * <p>To enable surface sharing, this function must be called before {@link
1006      * CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
1007      * CameraDevice#createReprocessableCaptureSessionByConfigurations}. Calling this function after
1008      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} has no effect.</p>
1009      *
1010      * <p>Up to {@link #getMaxSharedSurfaceCount} surfaces can be shared for an OutputConfiguration.
1011      * The supported surfaces for sharing must be of type SurfaceTexture, SurfaceView,
1012      * MediaRecorder, MediaCodec, or implementation defined ImageReader.</p>
1013      *
1014      * <p>This function must not be called from OutputConfigurations created by {@link
1015      * #createInstancesForMultiResolutionOutput}.</p>
1016      *
1017      * @throws IllegalStateException If this OutputConfiguration is created via {@link
1018      * #createInstancesForMultiResolutionOutput} to back a MultiResolutionImageReader.
1019      */
enableSurfaceSharing()1020     public void enableSurfaceSharing() {
1021         if (mIsMultiResolution) {
1022             throw new IllegalStateException("Cannot enable surface sharing on "
1023                     + "multi-resolution output configurations");
1024         }
1025         mIsShared = true;
1026     }
1027 
1028     /**
1029      * Set the id of the physical camera for this OutputConfiguration
1030      *
1031      * <p>In the case one logical camera is made up of multiple physical cameras, it could be
1032      * desirable for the camera application to request streams from individual physical cameras.
1033      * This call achieves it by mapping the OutputConfiguration to the physical camera id.</p>
1034      *
1035      * <p>The valid physical camera ids can be queried by {@link
1036      * CameraCharacteristics#getPhysicalCameraIds}.</p>
1037      *
1038      * <p>Passing in a null physicalCameraId means that the OutputConfiguration is for a logical
1039      * stream.</p>
1040      *
1041      * <p>This function must be called before {@link
1042      * CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
1043      * CameraDevice#createReprocessableCaptureSessionByConfigurations}. Calling this function
1044      * after {@link CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
1045      * CameraDevice#createReprocessableCaptureSessionByConfigurations} has no effect.</p>
1046      *
1047      * <p>As of {@link android.os.Build.VERSION_CODES#S Android 12}, an image buffer from a
1048      * physical camera stream can be used for reprocessing to logical camera streams and streams
1049      * from the same physical camera if the camera device supports multi-resolution input and output
1050      * streams. See {@link CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP}
1051      * for details. The behaviors of reprocessing from a non-physical camera stream to a physical
1052      * camera stream, and from a physical camera stream to a physical camera stream of different
1053      * physical camera, are device-specific and not guaranteed to be supported.</p>
1054      *
1055      * <p>On prior API levels, the surface belonging to a physical camera OutputConfiguration must
1056      * not be used as input or output of a reprocessing request. </p>
1057      */
setPhysicalCameraId(@ullable String physicalCameraId)1058     public void setPhysicalCameraId(@Nullable String physicalCameraId) {
1059         mPhysicalCameraId = physicalCameraId;
1060     }
1061 
1062     /**
1063      * Add a sensor pixel mode that this OutputConfiguration will be used in.
1064      *
1065      * <p> In the case that this output stream configuration (format, width, height) is
1066      * available through {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP}
1067      * configurations and
1068      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION},
1069      * configurations, the camera sub-system will assume that this {@link OutputConfiguration} will
1070      * be used only with {@link android.hardware.camera2.CaptureRequest}s which has
1071      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
1072      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT}.
1073      * In such cases, if clients intend to use the
1074      * {@link OutputConfiguration}(s) in a {@link android.hardware.camera2.CaptureRequest} with
1075      * other sensor pixel modes, they must specify which
1076      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE}(s) they will use this
1077      * {@link OutputConfiguration} with, by calling this method.
1078      *
1079      * In case this output stream configuration (format, width, height) is only in
1080      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION},
1081      * configurations, this output target must only be used with
1082      * {@link android.hardware.camera2.CaptureRequest}s which has
1083      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
1084      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} and that
1085      * is what the camera sub-system will assume. If clients add
1086      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} in this
1087      * case, session configuration will fail, if this {@link OutputConfiguration} is included.
1088      *
1089      * In case this output stream configuration (format, width, height) is only in
1090      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP},
1091      * configurations, this output target must only be used with
1092      * {@link android.hardware.camera2.CaptureRequest}s which has
1093      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
1094      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} and that is what
1095      * the camera sub-system will assume. If clients add
1096      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} in this
1097      * case, session configuration will fail, if this {@link OutputConfiguration} is included.
1098      *
1099      * @param sensorPixelModeUsed The sensor pixel mode this OutputConfiguration will be used with
1100      * </p>
1101      *
1102      */
addSensorPixelModeUsed(@ensorPixelMode int sensorPixelModeUsed)1103     public void addSensorPixelModeUsed(@SensorPixelMode int sensorPixelModeUsed) {
1104         // Verify that the values are in range.
1105         if (sensorPixelModeUsed != CameraMetadata.SENSOR_PIXEL_MODE_DEFAULT &&
1106                 sensorPixelModeUsed != CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
1107             throw new IllegalArgumentException("Not a valid sensor pixel mode " +
1108                     sensorPixelModeUsed);
1109         }
1110 
1111         if (mSensorPixelModesUsed.contains(sensorPixelModeUsed)) {
1112             // Already added, ignore;
1113             return;
1114         }
1115         mSensorPixelModesUsed.add(sensorPixelModeUsed);
1116     }
1117 
1118     /**
1119      * Remove a sensor pixel mode, previously added through addSensorPixelModeUsed, from this
1120      * OutputConfiguration.
1121      *
1122      * <p> Sensor pixel modes added via calls to {@link #addSensorPixelModeUsed} can also be removed
1123      * from the OutputConfiguration.</p>
1124      *
1125      * @param sensorPixelModeUsed The sensor pixel mode to be removed.
1126      *
1127      * @throws IllegalArgumentException If the sensor pixel mode wasn't previously added
1128      *                                  through {@link #addSensorPixelModeUsed}.
1129      */
removeSensorPixelModeUsed(@ensorPixelMode int sensorPixelModeUsed)1130     public void removeSensorPixelModeUsed(@SensorPixelMode int sensorPixelModeUsed) {
1131       if (!mSensorPixelModesUsed.remove(Integer.valueOf(sensorPixelModeUsed))) {
1132             throw new IllegalArgumentException("sensorPixelMode " + sensorPixelModeUsed +
1133                     "is not part of this output configuration");
1134       }
1135     }
1136 
1137     /**
1138      * Check if this configuration is for a physical camera.
1139      *
1140      * <p>This returns true if the output configuration was for a physical camera making up a
1141      * logical multi camera via {@link OutputConfiguration#setPhysicalCameraId}.</p>
1142      * @hide
1143      */
isForPhysicalCamera()1144     public boolean isForPhysicalCamera() {
1145         return (mPhysicalCameraId != null);
1146     }
1147 
1148     /**
1149      * Check if this configuration has deferred configuration.
1150      *
1151      * <p>This will return true if the output configuration was constructed with {@link
1152      * android.view.SurfaceView} or {@link android.graphics.SurfaceTexture} deferred by
1153      * {@link OutputConfiguration#OutputConfiguration(Size, Class)}. It will return true even after
1154      * the deferred surface is added later by {@link OutputConfiguration#addSurface}.</p>
1155      *
1156      * @return true if this configuration has deferred surface.
1157      * @hide
1158      */
isDeferredConfiguration()1159     public boolean isDeferredConfiguration() {
1160         return mIsDeferredConfig;
1161     }
1162 
1163     /**
1164      * Add a surface to this OutputConfiguration.
1165      *
1166      * <p> This function can be called before or after {@link
1167      * CameraDevice#createCaptureSessionByOutputConfigurations}. If it's called after,
1168      * the application must finalize the capture session with
1169      * {@link CameraCaptureSession#finalizeOutputConfigurations}. It is possible to call this method
1170      * after the output configurations have been finalized only in cases of enabled surface sharing
1171      * see {@link #enableSurfaceSharing}. The modified output configuration must be updated with
1172      * {@link CameraCaptureSession#updateOutputConfiguration}. If this function is called before
1173      * session creation, {@link CameraCaptureSession#finalizeOutputConfigurations} doesn't need to
1174      * be called.</p>
1175      *
1176      * <p> If the OutputConfiguration was constructed by {@link
1177      * OutputConfiguration#OutputConfiguration(Size, Class)}, the added surface must be obtained:
1178      * <ul>
1179      * <li>from {@link android.view.SurfaceView} by calling
1180      * {@link android.view.SurfaceHolder#getSurface}</li>
1181      * <li>from {@link android.graphics.SurfaceTexture} by calling
1182      * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}</li>
1183      * <li>from {@link android.media.MediaRecorder} by calling
1184      * {@link android.media.MediaRecorder#getSurface} or {@link
1185      * android.media.MediaCodec#createPersistentInputSurface}</li>
1186      * <li>from {@link android.media.MediaCodec} by calling
1187      * {@link android.media.MediaCodec#createInputSurface} or
1188      * {@link android.media.MediaCodec#createPersistentInputSurface()}</li>
1189      * </ul>
1190      *
1191      * <p> If the OutputConfiguration was constructed by {@link #OutputConfiguration(int, Size)}
1192      * or its variants, the added surface must be obtained from {@link android.media.ImageReader}
1193      * by calling {@link android.media.ImageReader#getSurface}.</p>
1194      *
1195      * <p> If the OutputConfiguration was constructed by other constructors, the added
1196      * surface must be compatible with the existing surface. See {@link #enableSurfaceSharing} for
1197      * details of compatible surfaces.</p>
1198      *
1199      * <p> If the OutputConfiguration already contains a Surface, {@link #enableSurfaceSharing} must
1200      * be called before calling this function to add a new Surface.</p>
1201      *
1202      * @param surface The surface to be added.
1203      * @throws IllegalArgumentException if the Surface is invalid, the Surface's
1204      *         dataspace/format doesn't match, or adding the Surface would exceed number of
1205      *         shared surfaces supported.
1206      * @throws IllegalStateException if the Surface was already added to this OutputConfiguration,
1207      *         or if the OutputConfiguration is not shared and it already has a surface associated
1208      *         with it.
1209      */
addSurface(@onNull Surface surface)1210     public void addSurface(@NonNull Surface surface) {
1211         checkNotNull(surface, "Surface must not be null");
1212         if (mSurfaces.contains(surface)) {
1213             throw new IllegalStateException("Surface is already added!");
1214         }
1215         if (mSurfaces.size() == 1 && !mIsShared) {
1216             throw new IllegalStateException("Cannot have 2 surfaces for a non-sharing configuration");
1217         }
1218         if (mSurfaces.size() + 1 > MAX_SURFACES_COUNT) {
1219             throw new IllegalArgumentException("Exceeds maximum number of surfaces");
1220         }
1221 
1222         // This will throw IAE is the surface was abandoned.
1223         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
1224         if (!surfaceSize.equals(mConfiguredSize)) {
1225             Log.w(TAG, "Added surface size " + surfaceSize +
1226                     " is different than pre-configured size " + mConfiguredSize +
1227                     ", the pre-configured size will be used.");
1228         }
1229 
1230         if (mConfiguredFormat != SurfaceUtils.getSurfaceFormat(surface)) {
1231             throw new IllegalArgumentException("The format of added surface format doesn't match");
1232         }
1233 
1234         // If the surface format is PRIVATE, do not enforce dataSpace because camera device may
1235         // override it.
1236         if (mConfiguredFormat != ImageFormat.PRIVATE &&
1237                 mConfiguredDataspace != SurfaceUtils.getSurfaceDataspace(surface)) {
1238             throw new IllegalArgumentException("The dataspace of added surface doesn't match");
1239         }
1240 
1241         mSurfaces.add(surface);
1242     }
1243 
1244     /**
1245      * Remove a surface from this OutputConfiguration.
1246      *
1247      * <p> Surfaces added via calls to {@link #addSurface} can also be removed from the
1248      *  OutputConfiguration. The only notable exception is the surface associated with
1249      *  the OutputConfiguration (see {@link #getSurface}) which was passed as part of the
1250      *  constructor or was added first in the case of
1251      *  {@link OutputConfiguration#OutputConfiguration(Size, Class)}, {@link
1252      *  OutputConfiguration#OutputConfiguration(int, Size)}, {@link
1253      *  OutputConfiguration#OutputConfiguration(int, Size, long)}, {@link
1254      *  OutputConfiguration#OutputConfiguration(int, int, Size)}, {@link
1255      *  OutputConfiguration#OutputConfiguration(int, int, Size, long)}.</p>
1256      *
1257      * @param surface The surface to be removed.
1258      *
1259      * @throws IllegalArgumentException If the surface is associated with this OutputConfiguration
1260      *                                  (see {@link #getSurface}) or the surface didn't get added
1261      *                                  with {@link #addSurface}.
1262      */
removeSurface(@onNull Surface surface)1263     public void removeSurface(@NonNull Surface surface) {
1264         checkNotNull(surface, "Surface must not be null");
1265         if (getSurface() == surface) {
1266             throw new IllegalArgumentException(
1267                     "Cannot remove surface associated with this output configuration");
1268         }
1269         if (!mSurfaces.remove(surface)) {
1270             throw new IllegalArgumentException("Surface is not part of this output configuration");
1271         }
1272     }
1273 
1274     /**
1275      * Set stream use case for this OutputConfiguration
1276      *
1277      * <p>Stream use case is used to describe the purpose of the stream, whether it's for live
1278      * preview, still image capture, video recording, or their combinations. This flag is useful
1279      * for scenarios where the immediate consumer target isn't sufficient to indicate the stream's
1280      * usage.</p>
1281      *
1282      * <p>The main difference between stream use case and capture intent is that the former
1283      * enables the camera device to optimize camera hardware and software pipelines based on user
1284      * scenarios for each stream, whereas the latter is mainly a hint to camera to decide
1285      * optimal 3A strategy that's applicable to the whole session. The camera device carries out
1286      * configurations such as selecting tuning parameters, choosing camera sensor mode, and
1287      * constructing image processing pipeline based on the streams's use cases. Capture intents are
1288      * then used to fine tune 3A behaviors such as adjusting AE/AF convergence speed, and capture
1289      * intents may change during the lifetime of a session. For example, for a session with a
1290      * PREVIEW_VIDEO_STILL use case stream and a STILL_CAPTURE use case stream, the capture intents
1291      * may be PREVIEW with fast 3A convergence speed and flash metering with automatic control for
1292      * live preview, STILL_CAPTURE with best 3A parameters for still photo capture, or VIDEO_RECORD
1293      * with slower 3A convergence speed for better video playback experience.</p>
1294      *
1295      * <p>The supported stream use cases supported by a camera device can be queried by
1296      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES}.</p>
1297      *
1298      * <p>The mandatory stream combinations involving stream use cases can be found at {@link
1299      * android.hardware.camera2.CameraDevice#createCaptureSession}, as well as queried via
1300      * {@link android.hardware.camera2.params.MandatoryStreamCombination}. The application is
1301      * strongly recommended to select one of the guaranteed stream combinations where all streams'
1302      * use cases are set to non-DEFAULT values. If the application chooses a stream combination
1303      * not in the mandatory list, the camera device may ignore some use case flags due to
1304      * hardware constraints or implementation details.</p>
1305      *
1306      * <p>This function must be called before {@link CameraDevice#createCaptureSession} or {@link
1307      * CameraDevice#createCaptureSessionByOutputConfigurations}. Calling this function after
1308      * {@link CameraDevice#createCaptureSession} or
1309      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} has no effect to the camera
1310      * session.</p>
1311      *
1312      * @param streamUseCase The stream use case to be set.
1313      *
1314      * @throws IllegalArgumentException If the streamUseCase isn't within the range of valid
1315      *                                  values.
1316      */
setStreamUseCase(@treamUseCase long streamUseCase)1317     public void setStreamUseCase(@StreamUseCase long streamUseCase) {
1318         // Verify that the value is in range
1319         long maxUseCaseValue = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
1320         if (streamUseCase > maxUseCaseValue &&
1321                 streamUseCase < CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
1322             throw new IllegalArgumentException("Not a valid stream use case value " +
1323                     streamUseCase);
1324         }
1325 
1326         mStreamUseCase = streamUseCase;
1327     }
1328 
1329     /**
1330      * Get the current stream use case
1331      *
1332      * <p>If no {@link #setStreamUseCase} is called first, this function returns
1333      * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT DEFAULT}.</p>
1334      *
1335      * @return the currently set stream use case
1336      */
getStreamUseCase()1337     public long getStreamUseCase() {
1338         return mStreamUseCase;
1339     }
1340 
1341     /**
1342      * Set timestamp base for this output target
1343      *
1344      * <p>Timestamp base describes the time domain of images from this
1345      * camera output and its relationship with {@link
1346      * CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.</p>
1347      *
1348      * <p>If this function is not called, the timestamp base for this output
1349      * is {@link #TIMESTAMP_BASE_DEFAULT}, with which the camera device adjusts
1350      * timestamps based on the output target.</p>
1351      *
1352      * <p>See {@link #TIMESTAMP_BASE_DEFAULT}, {@link #TIMESTAMP_BASE_SENSOR},
1353      * and {@link #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED} for details of each timestamp base.</p>
1354      *
1355      * @param timestampBase The timestamp base to be set.
1356      *
1357      * @throws IllegalArgumentException If the timestamp base isn't within the range of valid
1358      *                                  values.
1359      */
setTimestampBase(@imestampBase int timestampBase)1360     public void setTimestampBase(@TimestampBase int timestampBase) {
1361         // Verify that the value is in range
1362         if (timestampBase < TIMESTAMP_BASE_DEFAULT ||
1363                 timestampBase > TIMESTAMP_BASE_READOUT_SENSOR) {
1364             throw new IllegalArgumentException("Not a valid timestamp base value " +
1365                     timestampBase);
1366         }
1367 
1368         if (timestampBase == TIMESTAMP_BASE_READOUT_SENSOR) {
1369             mTimestampBase = TIMESTAMP_BASE_SENSOR;
1370             mReadoutTimestampEnabled = true;
1371             mIsReadoutSensorTimestampBase = true;
1372         } else {
1373             mTimestampBase = timestampBase;
1374             mIsReadoutSensorTimestampBase = false;
1375         }
1376     }
1377 
1378     /**
1379      * Get the current timestamp base
1380      *
1381      * <p>If no {@link #setTimestampBase} is called first, this function returns
1382      * {@link #TIMESTAMP_BASE_DEFAULT}.</p>
1383      *
1384      * @return The currently set timestamp base
1385      */
getTimestampBase()1386     public @TimestampBase int getTimestampBase() {
1387         if (mIsReadoutSensorTimestampBase) {
1388             return TIMESTAMP_BASE_READOUT_SENSOR;
1389         } else {
1390             return mTimestampBase;
1391         }
1392     }
1393 
1394     /**
1395      * Set the mirroring mode for this output target
1396      *
1397      * <p>If this function is not called, the mirroring mode for this output is
1398      * {@link #MIRROR_MODE_AUTO}, with which the camera API will mirror the output images
1399      * horizontally for front facing camera.</p>
1400      *
1401      * <p>For efficiency, the mirror effect is applied as a transform flag, so it is only effective
1402      * in some outputs. It works automatically for SurfaceView and TextureView outputs. For manual
1403      * use of SurfaceTexture, it is reflected in the value of
1404      * {@link android.graphics.SurfaceTexture#getTransformMatrix}. For other end points, such as
1405      * ImageReader, MediaRecorder, or MediaCodec, the mirror mode has no effect. If mirroring is
1406      * needed for such outputs, the application needs to mirror the image buffers itself before
1407      * passing them onward.</p>
1408      */
setMirrorMode(@irrorMode int mirrorMode)1409     public void setMirrorMode(@MirrorMode int mirrorMode) {
1410         // Verify that the value is in range
1411         if (mirrorMode < MIRROR_MODE_AUTO ||
1412                 mirrorMode > MIRROR_MODE_V) {
1413             throw new IllegalArgumentException("Not a valid mirror mode " + mirrorMode);
1414         }
1415         mMirrorMode = mirrorMode;
1416     }
1417 
1418     /**
1419      * Get the current mirroring mode
1420      *
1421      * <p>If no {@link #setMirrorMode} is called first, this function returns
1422      * {@link #MIRROR_MODE_AUTO}.</p>
1423      *
1424      * @return The currently set mirroring mode
1425      */
getMirrorMode()1426     public @MirrorMode int getMirrorMode() {
1427         return mMirrorMode;
1428     }
1429 
1430     /**
1431      * Use the camera sensor's readout time for the image timestamp.
1432      *
1433      * <p>The start of the camera sensor readout after exposure. For a rolling shutter camera
1434      * sensor, the timestamp is typically equal to {@code (the start of exposure time) +
1435      * (exposure time) + (certain fixed offset)}. The fixed offset can vary per session, depending
1436      * on the underlying sensor configuration. The benefit of using readout time is that when
1437      * camera runs in a fixed frame rate, the timestamp intervals between frames are constant.</p>
1438      *
1439      * <p>Readout timestamp is supported only if {@link
1440      * CameraCharacteristics#SENSOR_READOUT_TIMESTAMP} is
1441      * {@link CameraMetadata#SENSOR_READOUT_TIMESTAMP_HARDWARE}.</p>
1442      *
1443      * <p>As long as readout timestamp is supported, if the timestamp base is
1444      * {@link #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED}, or if the timestamp base is DEFAULT for a
1445      * SurfaceView output, the image timestamps for the output are always readout time regardless
1446      * of whether this function is called.</p>
1447      *
1448      * @param on The output image timestamp is the start of exposure time if false, and
1449      *           the start of readout time if true.
1450      */
setReadoutTimestampEnabled(boolean on)1451     public void setReadoutTimestampEnabled(boolean on) {
1452         mReadoutTimestampEnabled = on;
1453     }
1454 
1455     /** Whether readout timestamp is used for this OutputConfiguration.
1456      *
1457      * @see #setReadoutTimestampEnabled
1458      */
isReadoutTimestampEnabled()1459     public boolean isReadoutTimestampEnabled() {
1460         return mReadoutTimestampEnabled;
1461     }
1462 
1463     /**
1464      * Create a new {@link OutputConfiguration} instance with another {@link OutputConfiguration}
1465      * instance.
1466      *
1467      * @param other Another {@link OutputConfiguration} instance to be copied.
1468      *
1469      * @hide
1470      */
OutputConfiguration(@onNull OutputConfiguration other)1471     public OutputConfiguration(@NonNull OutputConfiguration other) {
1472         if (other == null) {
1473             throw new IllegalArgumentException("OutputConfiguration shouldn't be null");
1474         }
1475 
1476         this.mSurfaces = other.mSurfaces;
1477         this.mRotation = other.mRotation;
1478         this.mSurfaceGroupId = other.mSurfaceGroupId;
1479         this.mSurfaceType = other.mSurfaceType;
1480         this.mConfiguredDataspace = other.mConfiguredDataspace;
1481         this.mConfiguredFormat = other.mConfiguredFormat;
1482         this.mConfiguredSize = other.mConfiguredSize;
1483         this.mConfiguredGenerationId = other.mConfiguredGenerationId;
1484         this.mIsDeferredConfig = other.mIsDeferredConfig;
1485         this.mIsShared = other.mIsShared;
1486         this.mPhysicalCameraId = other.mPhysicalCameraId;
1487         this.mIsMultiResolution = other.mIsMultiResolution;
1488         this.mSensorPixelModesUsed = other.mSensorPixelModesUsed;
1489         this.mDynamicRangeProfile = other.mDynamicRangeProfile;
1490         this.mColorSpace = other.mColorSpace;
1491         this.mStreamUseCase = other.mStreamUseCase;
1492         this.mTimestampBase = other.mTimestampBase;
1493         this.mMirrorMode = other.mMirrorMode;
1494         this.mReadoutTimestampEnabled = other.mReadoutTimestampEnabled;
1495         this.mUsage = other.mUsage;
1496     }
1497 
1498     /**
1499      * Create an OutputConfiguration from Parcel.
1500      */
OutputConfiguration(@onNull Parcel source)1501     private OutputConfiguration(@NonNull Parcel source) {
1502         int rotation = source.readInt();
1503         int surfaceSetId = source.readInt();
1504         int surfaceType = source.readInt();
1505         int width = source.readInt();
1506         int height = source.readInt();
1507         boolean isDeferred = source.readInt() == 1;
1508         boolean isShared = source.readInt() == 1;
1509         ArrayList<Surface> surfaces = new ArrayList<Surface>();
1510         source.readTypedList(surfaces, Surface.CREATOR);
1511         String physicalCameraId = source.readString();
1512         boolean isMultiResolutionOutput = source.readInt() == 1;
1513         int[] sensorPixelModesUsed = source.createIntArray();
1514 
1515         checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
1516         long dynamicRangeProfile = source.readLong();
1517         DynamicRangeProfiles.checkProfileValue(dynamicRangeProfile);
1518         int colorSpace = source.readInt();
1519         long streamUseCase = source.readLong();
1520 
1521         int timestampBase = source.readInt();
1522         int mirrorMode = source.readInt();
1523         boolean readoutTimestampEnabled = source.readInt() == 1;
1524         int format = source.readInt();
1525         int dataSpace = source.readInt();
1526         long usage = source.readLong();
1527 
1528         mSurfaceGroupId = surfaceSetId;
1529         mRotation = rotation;
1530         mSurfaces = surfaces;
1531         mConfiguredSize = new Size(width, height);
1532         mIsDeferredConfig = isDeferred;
1533         mIsShared = isShared;
1534         mSurfaces = surfaces;
1535         mUsage = 0;
1536         if (mSurfaces.size() > 0) {
1537             mSurfaceType = SURFACE_TYPE_UNKNOWN;
1538             mConfiguredFormat = SurfaceUtils.getSurfaceFormat(mSurfaces.get(0));
1539             mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(mSurfaces.get(0));
1540             mConfiguredGenerationId = mSurfaces.get(0).getGenerationId();
1541         } else {
1542             mSurfaceType = surfaceType;
1543             if (mSurfaceType != SURFACE_TYPE_IMAGE_READER) {
1544                 mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(
1545                         ImageFormat.PRIVATE);
1546                 mConfiguredDataspace =
1547                         StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
1548             } else {
1549                 mConfiguredFormat = format;
1550                 mConfiguredDataspace = dataSpace;
1551                 mUsage = usage;
1552             }
1553             mConfiguredGenerationId = 0;
1554         }
1555         mPhysicalCameraId = physicalCameraId;
1556         mIsMultiResolution = isMultiResolutionOutput;
1557         mSensorPixelModesUsed = convertIntArrayToIntegerList(sensorPixelModesUsed);
1558         mDynamicRangeProfile = dynamicRangeProfile;
1559         mColorSpace = colorSpace;
1560         mStreamUseCase = streamUseCase;
1561         mTimestampBase = timestampBase;
1562         mMirrorMode = mirrorMode;
1563         mReadoutTimestampEnabled = readoutTimestampEnabled;
1564     }
1565 
1566     /**
1567      * Get the maximum supported shared {@link Surface} count.
1568      *
1569      * @return the maximum number of surfaces that can be added per each OutputConfiguration.
1570      *
1571      * @see #enableSurfaceSharing
1572      */
getMaxSharedSurfaceCount()1573     public int getMaxSharedSurfaceCount() {
1574         return MAX_SURFACES_COUNT;
1575     }
1576 
1577     /**
1578      * Get the {@link Surface} associated with this {@link OutputConfiguration}.
1579      *
1580      * If more than one surface is associated with this {@link OutputConfiguration}, return the
1581      * first one as specified in the constructor or {@link OutputConfiguration#addSurface}.
1582      */
getSurface()1583     public @Nullable Surface getSurface() {
1584         if (mSurfaces.size() == 0) {
1585             return null;
1586         }
1587 
1588         return mSurfaces.get(0);
1589     }
1590 
1591     /**
1592      * Get the immutable list of surfaces associated with this {@link OutputConfiguration}.
1593      *
1594      * @return the list of surfaces associated with this {@link OutputConfiguration} as specified in
1595      * the constructor and {@link OutputConfiguration#addSurface}. The list should not be modified.
1596      */
1597     @NonNull
getSurfaces()1598     public List<Surface> getSurfaces() {
1599         return Collections.unmodifiableList(mSurfaces);
1600     }
1601 
1602     /**
1603      * Get the rotation associated with this {@link OutputConfiguration}.
1604      *
1605      * @return the rotation associated with this {@link OutputConfiguration}.
1606      *         Value will be one of ROTATION_[0, 90, 180, 270]
1607      *
1608      * @hide
1609      */
1610     @SystemApi
getRotation()1611     public int getRotation() {
1612         return mRotation;
1613     }
1614 
1615     /**
1616      * Get the surface group ID associated with this {@link OutputConfiguration}.
1617      *
1618      * @return the surface group ID associated with this {@link OutputConfiguration}.
1619      *         The default value is {@value #SURFACE_GROUP_ID_NONE}.
1620      */
getSurfaceGroupId()1621     public int getSurfaceGroupId() {
1622         return mSurfaceGroupId;
1623     }
1624 
1625     /**
1626      * Get the configured size associated with this {@link OutputConfiguration}.
1627      *
1628      * @return The configured size associated with this {@link OutputConfiguration}.
1629      *
1630      * @hide
1631      */
getConfiguredSize()1632     public Size getConfiguredSize() {
1633         return mConfiguredSize;
1634     }
1635 
1636     /**
1637      * Get the physical camera ID associated with this {@link OutputConfiguration}.
1638      *
1639      * <p>If this OutputConfiguration isn't targeting a physical camera of a logical
1640      * multi-camera, this function returns {@code null}.</p>
1641      *
1642      * @return The physical camera Id associated with this {@link OutputConfiguration}.
1643      *
1644      * @hide
1645      */
getPhysicalCameraId()1646     public @Nullable String getPhysicalCameraId() {
1647         return mPhysicalCameraId;
1648     }
1649 
1650     public static final @android.annotation.NonNull Parcelable.Creator<OutputConfiguration> CREATOR =
1651             new Parcelable.Creator<OutputConfiguration>() {
1652         @Override
1653         public OutputConfiguration createFromParcel(Parcel source) {
1654             return new OutputConfiguration(source);
1655         }
1656 
1657         @Override
1658         public OutputConfiguration[] newArray(int size) {
1659             return new OutputConfiguration[size];
1660         }
1661     };
1662 
1663     @Override
describeContents()1664     public int describeContents() {
1665         return 0;
1666     }
1667 
convertIntegerToIntList(List<Integer> integerList)1668     private static int[] convertIntegerToIntList(List<Integer> integerList) {
1669         int[] integerArray = new int[integerList.size()];
1670         for (int i = 0; i < integerList.size(); i++) {
1671             integerArray[i] = integerList.get(i);
1672         }
1673         return integerArray;
1674     }
1675 
convertIntArrayToIntegerList(int[] intArray)1676     private static ArrayList<Integer> convertIntArrayToIntegerList(int[] intArray) {
1677         ArrayList<Integer> integerList = new ArrayList<Integer>();
1678         if (intArray == null) {
1679             return integerList;
1680         }
1681         for (int i = 0; i < intArray.length; i++) {
1682             integerList.add(intArray[i]);
1683         }
1684         return integerList;
1685     }
1686 
1687     @Override
writeToParcel(Parcel dest, int flags)1688     public void writeToParcel(Parcel dest, int flags) {
1689         if (dest == null) {
1690             throw new IllegalArgumentException("dest must not be null");
1691         }
1692         dest.writeInt(mRotation);
1693         dest.writeInt(mSurfaceGroupId);
1694         dest.writeInt(mSurfaceType);
1695         dest.writeInt(mConfiguredSize.getWidth());
1696         dest.writeInt(mConfiguredSize.getHeight());
1697         dest.writeInt(mIsDeferredConfig ? 1 : 0);
1698         dest.writeInt(mIsShared ? 1 : 0);
1699         dest.writeTypedList(mSurfaces);
1700         dest.writeString(mPhysicalCameraId);
1701         dest.writeInt(mIsMultiResolution ? 1 : 0);
1702         // writeList doesn't seem to work well with Integer list.
1703         dest.writeIntArray(convertIntegerToIntList(mSensorPixelModesUsed));
1704         dest.writeLong(mDynamicRangeProfile);
1705         dest.writeInt(mColorSpace);
1706         dest.writeLong(mStreamUseCase);
1707         dest.writeInt(mTimestampBase);
1708         dest.writeInt(mMirrorMode);
1709         dest.writeInt(mReadoutTimestampEnabled ? 1 : 0);
1710         dest.writeInt(mConfiguredFormat);
1711         dest.writeInt(mConfiguredDataspace);
1712         dest.writeLong(mUsage);
1713     }
1714 
1715     /**
1716      * Check if this {@link OutputConfiguration} is equal to another {@link OutputConfiguration}.
1717      *
1718      * <p>Two output configurations are only equal if and only if the underlying surfaces, surface
1719      * properties (width, height, format, dataspace) when the output configurations are created,
1720      * and all other configuration parameters are equal. </p>
1721      *
1722      * @return {@code true} if the objects were equal, {@code false} otherwise
1723      */
1724     @Override
equals(@ullable Object obj)1725     public boolean equals(@Nullable Object obj) {
1726         if (obj == null) {
1727             return false;
1728         } else if (this == obj) {
1729             return true;
1730         } else if (obj instanceof OutputConfiguration) {
1731             final OutputConfiguration other = (OutputConfiguration) obj;
1732             if (mRotation != other.mRotation
1733                     || !mConfiguredSize.equals(other.mConfiguredSize)
1734                     || mConfiguredFormat != other.mConfiguredFormat
1735                     || mSurfaceGroupId != other.mSurfaceGroupId
1736                     || mSurfaceType != other.mSurfaceType
1737                     || mIsDeferredConfig != other.mIsDeferredConfig
1738                     || mIsShared != other.mIsShared
1739                     || mConfiguredDataspace != other.mConfiguredDataspace
1740                     || mConfiguredGenerationId != other.mConfiguredGenerationId
1741                     || !Objects.equals(mPhysicalCameraId, other.mPhysicalCameraId)
1742                     || mIsMultiResolution != other.mIsMultiResolution
1743                     || mStreamUseCase != other.mStreamUseCase
1744                     || mTimestampBase != other.mTimestampBase
1745                     || mMirrorMode != other.mMirrorMode
1746                     || mReadoutTimestampEnabled != other.mReadoutTimestampEnabled
1747                     || mUsage != other.mUsage) {
1748                 return false;
1749             }
1750             if (mSensorPixelModesUsed.size() != other.mSensorPixelModesUsed.size()) {
1751                 return false;
1752             }
1753             for (int j = 0; j < mSensorPixelModesUsed.size(); j++) {
1754                 if (!Objects.equals(
1755                         mSensorPixelModesUsed.get(j), other.mSensorPixelModesUsed.get(j))) {
1756                     return false;
1757                 }
1758             }
1759             int minLen = Math.min(mSurfaces.size(), other.mSurfaces.size());
1760             for (int i = 0;  i < minLen; i++) {
1761                 if (mSurfaces.get(i) != other.mSurfaces.get(i))
1762                     return false;
1763             }
1764             if (!mIsDeferredConfig && mSurfaces.size() != other.mSurfaces.size()) return false;
1765             if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
1766                 return false;
1767             }
1768             if (mColorSpace != other.mColorSpace) {
1769                 return false;
1770             }
1771 
1772             return true;
1773         }
1774         return false;
1775     }
1776 
1777     /**
1778      * Get and increase the next MultiResolution group id.
1779      *
1780      * If the ID reaches -1, skip it.
1781      */
getAndIncreaseMultiResolutionGroupId()1782     private static int getAndIncreaseMultiResolutionGroupId() {
1783         return sNextMultiResolutionGroupId.getAndUpdate(i ->
1784                 i + 1 == SURFACE_GROUP_ID_NONE ? i + 2 : i + 1);
1785     }
1786 
1787     /**
1788      * {@inheritDoc}
1789      */
1790     @Override
hashCode()1791     public int hashCode() {
1792         // Need ensure that the hashcode remains unchanged after adding a deferred surface.
1793         // Otherwise the deferred output configuration will be lost in the camera stream map
1794         // after the deferred surface is set.
1795         if (mIsDeferredConfig) {
1796             return HashCodeHelpers.hashCode(
1797                     mRotation, mConfiguredSize.hashCode(), mConfiguredFormat, mConfiguredDataspace,
1798                     mSurfaceGroupId, mSurfaceType, mIsShared ? 1 : 0,
1799                     mPhysicalCameraId == null ? 0 : mPhysicalCameraId.hashCode(),
1800                     mIsMultiResolution ? 1 : 0, mSensorPixelModesUsed.hashCode(),
1801                     mDynamicRangeProfile, mColorSpace, mStreamUseCase,
1802                     mTimestampBase, mMirrorMode, mReadoutTimestampEnabled ? 1 : 0,
1803                     Long.hashCode(mUsage));
1804         }
1805 
1806         return HashCodeHelpers.hashCode(
1807                 mRotation, mSurfaces.hashCode(), mConfiguredGenerationId,
1808                 mConfiguredSize.hashCode(), mConfiguredFormat,
1809                 mConfiguredDataspace, mSurfaceGroupId, mIsShared ? 1 : 0,
1810                 mPhysicalCameraId == null ? 0 : mPhysicalCameraId.hashCode(),
1811                 mIsMultiResolution ? 1 : 0, mSensorPixelModesUsed.hashCode(),
1812                 mDynamicRangeProfile, mColorSpace, mStreamUseCase, mTimestampBase,
1813                 mMirrorMode, mReadoutTimestampEnabled ? 1 : 0, Long.hashCode(mUsage));
1814     }
1815 
1816     private static final String TAG = "OutputConfiguration";
1817 
1818     // A surfaceGroupId counter used for MultiResolutionImageReader. Its value is
1819     // incremented every time {@link createInstancesForMultiResolutionOutput} is called.
1820     private static AtomicInteger sNextMultiResolutionGroupId = new AtomicInteger(0);
1821 
1822     private ArrayList<Surface> mSurfaces;
1823     private final int mRotation;
1824     private final int mSurfaceGroupId;
1825     // Surface source type, this is only used by the deferred surface configuration objects.
1826     private final int mSurfaceType;
1827 
1828     // The size, format, and dataspace of the surface when OutputConfiguration is created.
1829     private final Size mConfiguredSize;
1830     private final int mConfiguredFormat;
1831     private final int mConfiguredDataspace;
1832     // Surface generation ID to distinguish changes to Surface native internals
1833     private final int mConfiguredGenerationId;
1834     // Flag indicating if this config has deferred surface.
1835     private final boolean mIsDeferredConfig;
1836     // Flag indicating if this config has shared surfaces
1837     private boolean mIsShared;
1838     // The physical camera id that this output configuration is for.
1839     private String mPhysicalCameraId;
1840     // Flag indicating if this config is for a multi-resolution output with a
1841     // MultiResolutionImageReader
1842     private boolean mIsMultiResolution;
1843     // The sensor pixel modes that this OutputConfiguration will use
1844     private ArrayList<Integer> mSensorPixelModesUsed;
1845     // Dynamic range profile
1846     private long mDynamicRangeProfile;
1847     // Color space
1848     private int mColorSpace;
1849     // Stream use case
1850     private long mStreamUseCase;
1851     // Timestamp base
1852     private int mTimestampBase;
1853     // Mirroring mode
1854     private int mMirrorMode;
1855     // readout timestamp
1856     private boolean mReadoutTimestampEnabled;
1857     // Whether the timestamp base is set to READOUT_SENSOR
1858     private boolean mIsReadoutSensorTimestampBase;
1859     // The usage flags. Only set for instances created for ImageReader without specifying surface.
1860     private long mUsage;
1861 }
1862