1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 package android.hardware.camera2.params;
19 
20 import static com.android.internal.util.Preconditions.*;
21 
22 import android.annotation.IntDef;
23 import android.annotation.NonNull;
24 import android.annotation.Nullable;
25 import android.annotation.SuppressLint;
26 import android.annotation.SystemApi;
27 import android.annotation.TestApi;
28 import android.graphics.ColorSpace;
29 import android.graphics.ImageFormat;
30 import android.hardware.camera2.CameraCaptureSession;
31 import android.hardware.camera2.CameraCharacteristics;
32 import android.hardware.camera2.CameraDevice;
33 import android.hardware.camera2.CameraMetadata;
34 import android.hardware.camera2.MultiResolutionImageReader;
35 import android.hardware.camera2.params.DynamicRangeProfiles;
36 import android.hardware.camera2.params.MultiResolutionStreamInfo;
37 import android.hardware.camera2.utils.HashCodeHelpers;
38 import android.hardware.camera2.utils.SurfaceUtils;
39 import android.media.ImageReader;
40 import android.os.Parcel;
41 import android.os.Parcelable;
42 import android.util.Log;
43 import android.util.Size;
44 import android.view.Surface;
45 
46 import java.lang.annotation.Retention;
47 import java.lang.annotation.RetentionPolicy;
48 import java.util.ArrayList;
49 import java.util.Collection;
50 import java.util.Collections;
51 import java.util.List;
52 import java.util.Objects;
53 
54 /**
55  * A class for describing camera output, which contains a {@link Surface} and its specific
56  * configuration for creating capture session.
57  *
58  * <p>There are several ways to instantiate, modify and use OutputConfigurations. The most common
59  * and recommended usage patterns are summarized in the following list:</p>
60  *<ul>
61  * <li>Passing a {@link Surface} to the constructor and using the OutputConfiguration instance as
62  * argument to {@link CameraDevice#createCaptureSessionByOutputConfigurations}. This is the most
63  * frequent usage and clients should consider it first before other more complicated alternatives.
64  * </li>
65  *
66  * <li>Passing only a surface source class as an argument to the constructor. This is usually
67  * followed by a call to create a capture session
68  * (see {@link CameraDevice#createCaptureSessionByOutputConfigurations} and a {@link Surface} add
69  * call {@link #addSurface} with a valid {@link Surface}. The sequence completes with
70  * {@link CameraCaptureSession#finalizeOutputConfigurations}. This is the deferred usage case which
71  * aims to enhance performance by allowing the resource-intensive capture session create call to
72  * execute in parallel with any {@link Surface} initialization, such as waiting for a
73  * {@link android.view.SurfaceView} to be ready as part of the UI initialization.</li>
74  *
75  * <li>The third and most complex usage pattern involves surface sharing. Once instantiated an
76  * OutputConfiguration can be enabled for surface sharing via {@link #enableSurfaceSharing}. This
77  * must be done before creating a new capture session and enables calls to
78  * {@link CameraCaptureSession#updateOutputConfiguration}. An OutputConfiguration with enabled
79  * surface sharing can be modified via {@link #addSurface} or {@link #removeSurface}. The updates
80  * to this OutputConfiguration will only come into effect after
81  * {@link CameraCaptureSession#updateOutputConfiguration} returns without throwing exceptions.
82  * Such updates can be done as long as the session is active. Clients should always consider the
83  * additional requirements and limitations placed on the output surfaces (for more details see
84  * {@link #enableSurfaceSharing}, {@link #addSurface}, {@link #removeSurface},
85  * {@link CameraCaptureSession#updateOutputConfiguration}). A trade-off exists between additional
86  * complexity and flexibility. If exercised correctly surface sharing can switch between different
87  * output surfaces without interrupting any ongoing repeating capture requests. This saves time and
88  * can significantly improve the user experience.</li>
89  *
90  * <li>Surface sharing can be used in combination with deferred surfaces. The rules from both cases
91  * are combined and clients must call {@link #enableSurfaceSharing} before creating a capture
92  * session. Attach and/or remove output surfaces via  {@link #addSurface}/{@link #removeSurface} and
93  * finalize the configuration using {@link CameraCaptureSession#finalizeOutputConfigurations}.
94  * {@link CameraCaptureSession#updateOutputConfiguration} can be called after the configuration
95  * finalize method returns without exceptions.</li>
96  *
97  * <li>If the camera device supports multi-resolution output streams, {@link
98  * CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP} will contain the
99  * formats and their corresponding stream info. The application can use an OutputConfiguration
100  * created with the multi-resolution stream info queried from {@link
101  * MultiResolutionStreamConfigurationMap#getOutputInfo} and
102  * {@link android.hardware.camera2.MultiResolutionImageReader} to capture variable size images.
103  *
104  * </ul>
105  *
106  * <p> As of {@link android.os.Build.VERSION_CODES#P Android P}, all formats except
107  * {@link ImageFormat#JPEG} and {@link ImageFormat#RAW_PRIVATE} can be used for sharing, subject to
108  * device support. On prior API levels, only {@link ImageFormat#PRIVATE} format may be used.</p>
109  *
110  * @see CameraDevice#createCaptureSessionByOutputConfigurations
111  * @see CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP
112  *
113  */
114 public final class OutputConfiguration implements Parcelable {
115 
116     /**
117      * Rotation constant: 0 degree rotation (no rotation)
118      *
119      * @hide
120      */
121     @SystemApi
122     public static final int ROTATION_0 = 0;
123 
124     /**
125      * Rotation constant: 90 degree counterclockwise rotation.
126      *
127      * @hide
128      */
129     @SystemApi
130     public static final int ROTATION_90 = 1;
131 
132     /**
133      * Rotation constant: 180 degree counterclockwise rotation.
134      *
135      * @hide
136      */
137     @SystemApi
138     public static final int ROTATION_180 = 2;
139 
140     /**
141      * Rotation constant: 270 degree counterclockwise rotation.
142      *
143      * @hide
144      */
145     @SystemApi
146     public static final int ROTATION_270 = 3;
147 
148     /**
149      * Invalid surface group ID.
150      *
151      *<p>An {@link OutputConfiguration} with this value indicates that the included surface
152      *doesn't belong to any surface group.</p>
153      */
154     public static final int SURFACE_GROUP_ID_NONE = -1;
155 
156     /**
157      * Default timestamp base.
158      *
159      * <p>The camera device decides the timestamp based on the properties of the
160      * output surface.</p>
161      *
162      * <li> For a SurfaceView output surface, the timestamp base is {@link
163      * #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED}. The timestamp is overridden with choreographer
164      * pulses from the display subsystem for smoother display of camera frames when the camera
165      * device runs in fixed frame rate. The timestamp is roughly in the same time base as
166      * {@link android.os.SystemClock#uptimeMillis}.</li>
167      * <li> For an output surface of MediaRecorder, MediaCodec, or ImageReader with {@link
168      * android.hardware.HardwareBuffer#USAGE_VIDEO_ENCODE} usage flag, the timestamp base is
169      * {@link #TIMESTAMP_BASE_MONOTONIC}, which is roughly the same time base as
170      * {@link android.os.SystemClock#uptimeMillis}.</li>
171      * <li> For all other cases, the timestamp base is {@link #TIMESTAMP_BASE_SENSOR}, the same
172      * as what's specified by {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.
173      * <ul><li> For a SurfaceTexture output surface, the camera system re-spaces the delivery
174      * of output frames based on image readout intervals, reducing viewfinder jitter. The timestamps
175      * of images remain to be {@link #TIMESTAMP_BASE_SENSOR}.</li></ul></li>
176      *
177      * <p>Note that the reduction of frame jitter for SurfaceView and SurfaceTexture comes with
178      * slight increase in photon-to-photon latency, which is the time from when photons hit the
179      * scene to when the corresponding pixels show up on the screen. If the photon-to-photon latency
180      * is more important than the smoothness of viewfinder, {@link #TIMESTAMP_BASE_SENSOR} should be
181      * used instead.</p>
182      *
183      * @see #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED
184      * @see #TIMESTAMP_BASE_MONOTONIC
185      * @see #TIMESTAMP_BASE_SENSOR
186      */
187     public static final int TIMESTAMP_BASE_DEFAULT = 0;
188 
189     /**
190      * Timestamp base of {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.
191      *
192      * <p>The timestamps of the output images are in the time base as specified by {@link
193      * CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}. The application can look up the
194      * corresponding result metadata by matching the timestamp with a {@link
195      * CameraCaptureSession.CaptureCallback#onCaptureStarted}, or with a {@link
196      * CameraCaptureSession.CaptureCallback#onReadoutStarted} if readout timestamp is used.</p>
197      */
198     public static final int TIMESTAMP_BASE_SENSOR = 1;
199 
200     /**
201      * Timestamp base roughly the same as {@link android.os.SystemClock#uptimeMillis}.
202      *
203      * <p>The timestamps of the output images are monotonically increasing, and are roughly in the
204      * same time base as {@link android.os.SystemClock#uptimeMillis}. The timestamps with this
205      * time base can be directly used for audio-video sync in video recording.</p>
206      *
207      * <p>If the camera device's {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE} is
208      * REALTIME, timestamps with this time base cannot directly match the timestamps in
209      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
210      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
211      * {@link android.hardware.camera2.CaptureResult}.</p>
212      */
213     public static final int TIMESTAMP_BASE_MONOTONIC = 2;
214 
215     /**
216      * Timestamp base roughly the same as {@link android.os.SystemClock#elapsedRealtime}.
217      *
218      * <p>The timestamps of the output images are roughly in the
219      * same time base as {@link android.os.SystemClock#elapsedRealtime}. The timestamps with this
220      * time base cannot be directly used for audio-video sync in video recording.</p>
221      *
222      * <p>If the camera device's {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE} is
223      * UNKNOWN, timestamps with this time base cannot directly match the timestamps in
224      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
225      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
226      * {@link android.hardware.camera2.CaptureResult}.</p>
227      *
228      * <p>If using a REALTIME timestamp base on a device that supports only
229      * TIMESTAMP_SOURCE_UNKNOWN, the accuracy of timestamps is only what is guaranteed in the
230      * documentation for UNKNOWN. In particular, they have no guarantees about being accurate
231      * enough to use in fusing image data with the output of inertial sensors, for features such as
232      * image stabilization or augmented reality.</p>
233      */
234     public static final int TIMESTAMP_BASE_REALTIME = 3;
235 
236     /**
237      * Timestamp is synchronized to choreographer.
238      *
239      * <p>The timestamp of the output images are overridden with choreographer pulses from the
240      * display subsystem for smoother display of camera frames. An output target of SurfaceView
241      * uses this time base by default. Note that the timestamp override is done for fixed camera
242      * frame rate only.</p>
243      *
244      * <p>This timestamp base isn't applicable to SurfaceTexture targets. SurfaceTexture's
245      * {@link android.graphics.SurfaceTexture#updateTexImage updateTexImage} function always
246      * uses the latest image from the camera stream. In the case of a TextureView, the image is
247      * displayed right away.</p>
248      *
249      * <p>Timestamps with this time base cannot directly match the timestamps in
250      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
251      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
252      * {@link android.hardware.camera2.CaptureResult}. This timestamp base shouldn't be used if the
253      * timestamp needs to be used for audio-video synchronization.</p>
254      */
255     public static final int TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4;
256 
257     /**
258      * Timestamp is the start of readout in the same time domain as TIMESTAMP_BASE_SENSOR.
259      *
260      * <p>NOTE: do not use! Use setReadoutTimestampEnabled instead.</p>
261      *
262      * @hide
263      */
264     public static final int TIMESTAMP_BASE_READOUT_SENSOR = 5;
265 
266     /** @hide */
267     @Retention(RetentionPolicy.SOURCE)
268     @IntDef(prefix = {"TIMESTAMP_BASE_"}, value =
269         {TIMESTAMP_BASE_DEFAULT,
270          TIMESTAMP_BASE_SENSOR,
271          TIMESTAMP_BASE_MONOTONIC,
272          TIMESTAMP_BASE_REALTIME,
273          TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED,
274          TIMESTAMP_BASE_READOUT_SENSOR})
275     public @interface TimestampBase {};
276 
277     /** @hide */
278      @Retention(RetentionPolicy.SOURCE)
279      @IntDef(prefix = {"SENSOR_PIXEL_MODE_"}, value =
280          {CameraMetadata.SENSOR_PIXEL_MODE_DEFAULT,
281           CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION})
282      public @interface SensorPixelMode {};
283 
284     /** @hide */
285     @Retention(RetentionPolicy.SOURCE)
286     @IntDef(prefix = {"STREAM_USE_CASE_"}, value =
287         {CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
288          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
289          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
290          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
291          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
292          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL,
293          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW})
294     public @interface StreamUseCase {};
295 
296     /**
297      * Automatic mirroring based on camera facing
298      *
299      * <p>This is the default mirroring mode for the camera device. With this mode,
300      * the camera output is mirrored horizontally for front-facing cameras. There is
301      * no mirroring for rear-facing and external cameras.</p>
302      */
303     public static final int MIRROR_MODE_AUTO = 0;
304 
305     /**
306      * No mirror transform is applied
307      *
308      * <p>No mirroring is applied to the camera output regardless of the camera facing.</p>
309      */
310     public static final int MIRROR_MODE_NONE = 1;
311 
312     /**
313      * Camera output is mirrored horizontally
314      *
315      * <p>The camera output is mirrored horizontally, the same behavior as in AUTO mode for
316      * front facing camera.</p>
317      */
318     public static final int MIRROR_MODE_H = 2;
319 
320     /**
321      * Camera output is mirrored vertically
322      */
323     public static final int MIRROR_MODE_V = 3;
324 
325     /** @hide */
326     @Retention(RetentionPolicy.SOURCE)
327     @IntDef(prefix = {"MIRROR_MODE_"}, value =
328         {MIRROR_MODE_AUTO,
329           MIRROR_MODE_NONE,
330           MIRROR_MODE_H,
331           MIRROR_MODE_V})
332     public @interface MirrorMode {};
333 
334     /**
335      * Create a new {@link OutputConfiguration} instance with a {@link Surface}.
336      *
337      * @param surface
338      *          A Surface for camera to output to.
339      *
340      * <p>This constructor creates a default configuration, with a surface group ID of
341      * {@value #SURFACE_GROUP_ID_NONE}.</p>
342      *
343      */
OutputConfiguration(@onNull Surface surface)344     public OutputConfiguration(@NonNull Surface surface) {
345         this(SURFACE_GROUP_ID_NONE, surface, ROTATION_0);
346     }
347 
348     /**
349      * Unknown surface source type.
350      */
351     private final int SURFACE_TYPE_UNKNOWN = -1;
352 
353     /**
354      * The surface is obtained from {@link android.view.SurfaceView}.
355      */
356     private final int SURFACE_TYPE_SURFACE_VIEW = 0;
357 
358     /**
359      * The surface is obtained from {@link android.graphics.SurfaceTexture}.
360      */
361     private final int SURFACE_TYPE_SURFACE_TEXTURE = 1;
362 
363     /**
364      * Maximum number of surfaces supported by one {@link OutputConfiguration}.
365      *
366      * <p>The combined number of surfaces added by the constructor and
367      * {@link OutputConfiguration#addSurface} should not exceed this value.</p>
368      *
369      */
370     private static final int MAX_SURFACES_COUNT = 4;
371 
372     /**
373      * Create a new {@link OutputConfiguration} instance with a {@link Surface},
374      * with a surface group ID.
375      *
376      * <p>
377      * A surface group ID is used to identify which surface group this output surface belongs to. A
378      * surface group is a group of output surfaces that are not intended to receive camera output
379      * buffer streams simultaneously. The {@link CameraDevice} may be able to share the buffers used
380      * by all the surfaces from the same surface group, therefore may reduce the overall memory
381      * footprint. The application should only set the same set ID for the streams that are not
382      * simultaneously streaming. A negative ID indicates that this surface doesn't belong to any
383      * surface group. The default value is {@value #SURFACE_GROUP_ID_NONE}.</p>
384      *
385      * <p>For example, a video chat application that has an adaptive output resolution feature would
386      * need two (or more) output resolutions, to switch resolutions without any output glitches.
387      * However, at any given time, only one output is active to minimize outgoing network bandwidth
388      * and encoding overhead.  To save memory, the application should set the video outputs to have
389      * the same non-negative group ID, so that the camera device can share the same memory region
390      * for the alternating outputs.</p>
391      *
392      * <p>It is not an error to include output streams with the same group ID in the same capture
393      * request, but the resulting memory consumption may be higher than if the two streams were
394      * not in the same surface group to begin with, especially if the outputs have substantially
395      * different dimensions.</p>
396      *
397      * @param surfaceGroupId
398      *          A group ID for this output, used for sharing memory between multiple outputs.
399      * @param surface
400      *          A Surface for camera to output to.
401      *
402      */
OutputConfiguration(int surfaceGroupId, @NonNull Surface surface)403     public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface) {
404         this(surfaceGroupId, surface, ROTATION_0);
405     }
406 
407     /**
408      * Set the multi-resolution output flag.
409      *
410      * <p>Specify that this OutputConfiguration is part of a multi-resolution output stream group
411      * used by {@link android.hardware.camera2.MultiResolutionImageReader}.</p>
412      *
413      * <p>This function must only be called for an OutputConfiguration with a non-negative
414      * group ID. And all OutputConfigurations of a MultiResolutionImageReader will have the same
415      * group ID and have this flag set.</p>
416      *
417      * @throws IllegalStateException If surface sharing is enabled via {@link #enableSurfaceSharing}
418      *         call, or no non-negative group ID has been set.
419      * @hide
420      */
setMultiResolutionOutput()421     public void setMultiResolutionOutput() {
422         if (mIsShared) {
423             throw new IllegalStateException("Multi-resolution output flag must not be set for " +
424                     "configuration with surface sharing");
425         }
426         if (mSurfaceGroupId == SURFACE_GROUP_ID_NONE) {
427             throw new IllegalStateException("Multi-resolution output flag should only be set for " +
428                     "surface with non-negative group ID");
429         }
430 
431         mIsMultiResolution = true;
432     }
433 
434     /**
435      * Set a specific device supported dynamic range profile.
436      *
437      * <p>Clients can choose from any profile advertised as supported in
438      * CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES
439      * queried using {@link DynamicRangeProfiles#getSupportedProfiles()}.
440      * If this is not explicitly set, then the default profile will be
441      * {@link DynamicRangeProfiles#STANDARD}.</p>
442      *
443      * <p>Do note that invalid combinations between the registered output
444      * surface pixel format and the configured dynamic range profile will
445      * cause capture session initialization failure. Invalid combinations
446      * include any 10-bit dynamic range profile advertised in
447      * {@link DynamicRangeProfiles#getSupportedProfiles()} combined with
448      * an output Surface pixel format different from {@link ImageFormat#PRIVATE}
449      * (the default for Surfaces initialized by {@link android.view.SurfaceView},
450      * {@link android.view.TextureView}, {@link android.media.MediaRecorder},
451      * {@link android.media.MediaCodec} etc.)
452      * or {@link ImageFormat#YCBCR_P010}.</p>
453      */
setDynamicRangeProfile(@ynamicRangeProfiles.Profile long profile)454     public void setDynamicRangeProfile(@DynamicRangeProfiles.Profile long profile) {
455         mDynamicRangeProfile = profile;
456     }
457 
458     /**
459      * Return current dynamic range profile.
460      *
461      * @return the currently set dynamic range profile
462      */
getDynamicRangeProfile()463     public @DynamicRangeProfiles.Profile long getDynamicRangeProfile() {
464         return mDynamicRangeProfile;
465     }
466 
467     /**
468      * Set a specific device-supported color space.
469      *
470      * <p>Clients can choose from any profile advertised as supported in
471      * {@link CameraCharacteristics#REQUEST_AVAILABLE_COLOR_SPACE_PROFILES}
472      * queried using {@link ColorSpaceProfiles#getSupportedColorSpaces}.
473      * When set, the colorSpace will override the default color spaces of the output targets,
474      * or the color space implied by the dataSpace passed into an {@link ImageReader}'s
475      * constructor.</p>
476      *
477      * @hide
478      */
479     @TestApi
setColorSpace(@onNull ColorSpace.Named colorSpace)480     public void setColorSpace(@NonNull ColorSpace.Named colorSpace) {
481         mColorSpace = colorSpace.ordinal();
482     }
483 
484     /**
485      * Clear the color space, such that the default color space will be used.
486      *
487      * @hide
488      */
489     @TestApi
clearColorSpace()490     public void clearColorSpace() {
491         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
492     }
493 
494     /**
495      * Return the current color space.
496      *
497      * @return the currently set color space
498      * @hide
499      */
500     @TestApi
501     @SuppressLint("MethodNameUnits")
getColorSpace()502     public @Nullable ColorSpace getColorSpace() {
503         if (mColorSpace != ColorSpaceProfiles.UNSPECIFIED) {
504             return ColorSpace.get(ColorSpace.Named.values()[mColorSpace]);
505         } else {
506             return null;
507         }
508     }
509 
510     /**
511      * Create a new {@link OutputConfiguration} instance.
512      *
513      * <p>This constructor takes an argument for desired camera rotation</p>
514      *
515      * @param surface
516      *          A Surface for camera to output to.
517      * @param rotation
518      *          The desired rotation to be applied on camera output. Value must be one of
519      *          ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
520      *          application should make sure corresponding surface size has width and height
521      *          transposed relative to the width and height without rotation. For example,
522      *          if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
523      *          application should set rotation to {@code ROTATION_90} and make sure the
524      *          corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
525      *          throw {@code IllegalArgumentException} if device cannot perform such rotation.
526      * @hide
527      */
528     @SystemApi
OutputConfiguration(@onNull Surface surface, int rotation)529     public OutputConfiguration(@NonNull Surface surface, int rotation) {
530         this(SURFACE_GROUP_ID_NONE, surface, rotation);
531     }
532 
533     /**
534      * Create a new {@link OutputConfiguration} instance, with rotation and a group ID.
535      *
536      * <p>This constructor takes an argument for desired camera rotation and for the surface group
537      * ID.  See {@link #OutputConfiguration(int, Surface)} for details of the group ID.</p>
538      *
539      * @param surfaceGroupId
540      *          A group ID for this output, used for sharing memory between multiple outputs.
541      * @param surface
542      *          A Surface for camera to output to.
543      * @param rotation
544      *          The desired rotation to be applied on camera output. Value must be one of
545      *          ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
546      *          application should make sure corresponding surface size has width and height
547      *          transposed relative to the width and height without rotation. For example,
548      *          if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
549      *          application should set rotation to {@code ROTATION_90} and make sure the
550      *          corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
551      *          throw {@code IllegalArgumentException} if device cannot perform such rotation.
552      * @hide
553      */
554     @SystemApi
OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation)555     public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation) {
556         checkNotNull(surface, "Surface must not be null");
557         checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
558         mSurfaceGroupId = surfaceGroupId;
559         mSurfaceType = SURFACE_TYPE_UNKNOWN;
560         mSurfaces = new ArrayList<Surface>();
561         mSurfaces.add(surface);
562         mRotation = rotation;
563         mConfiguredSize = SurfaceUtils.getSurfaceSize(surface);
564         mConfiguredFormat = SurfaceUtils.getSurfaceFormat(surface);
565         mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(surface);
566         mConfiguredGenerationId = surface.getGenerationId();
567         mIsDeferredConfig = false;
568         mIsShared = false;
569         mPhysicalCameraId = null;
570         mIsMultiResolution = false;
571         mSensorPixelModesUsed = new ArrayList<Integer>();
572         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
573         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
574         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
575         mTimestampBase = TIMESTAMP_BASE_DEFAULT;
576         mMirrorMode = MIRROR_MODE_AUTO;
577         mReadoutTimestampEnabled = false;
578         mIsReadoutSensorTimestampBase = false;
579     }
580 
581     /**
582      * Create a list of {@link OutputConfiguration} instances for the outputs used by a
583      * {@link android.hardware.camera2.MultiResolutionImageReader}.
584      *
585      * <p>This constructor takes an argument for a
586      * {@link android.hardware.camera2.MultiResolutionImageReader}.</p>
587      *
588      * @param multiResolutionImageReader
589      *          The multi-resolution image reader object.
590      */
createInstancesForMultiResolutionOutput( @onNull MultiResolutionImageReader multiResolutionImageReader)591     public static @NonNull Collection<OutputConfiguration> createInstancesForMultiResolutionOutput(
592             @NonNull MultiResolutionImageReader multiResolutionImageReader)  {
593         checkNotNull(multiResolutionImageReader, "Multi-resolution image reader must not be null");
594 
595         int groupId = MULTI_RESOLUTION_GROUP_ID_COUNTER;
596         MULTI_RESOLUTION_GROUP_ID_COUNTER++;
597         // Skip in case the group id counter overflows to -1, the invalid value.
598         if (MULTI_RESOLUTION_GROUP_ID_COUNTER == -1) {
599             MULTI_RESOLUTION_GROUP_ID_COUNTER++;
600         }
601 
602         ImageReader[] imageReaders = multiResolutionImageReader.getReaders();
603         ArrayList<OutputConfiguration> configs = new ArrayList<OutputConfiguration>();
604         for (int i = 0; i < imageReaders.length; i++) {
605             MultiResolutionStreamInfo streamInfo =
606                     multiResolutionImageReader.getStreamInfoForImageReader(imageReaders[i]);
607 
608             OutputConfiguration config = new OutputConfiguration(
609                     groupId, imageReaders[i].getSurface());
610             config.setPhysicalCameraId(streamInfo.getPhysicalCameraId());
611             config.setMultiResolutionOutput();
612             configs.add(config);
613 
614             // No need to call addSensorPixelModeUsed for ultra high resolution sensor camera,
615             // because regular and max resolution output configurations are used for DEFAULT mode
616             // and MAX_RESOLUTION mode respectively by default.
617         }
618 
619         return configs;
620     }
621 
622     /**
623      * Create a new {@link OutputConfiguration} instance, with desired Surface size and Surface
624      * source class.
625      * <p>
626      * This constructor takes an argument for desired Surface size and the Surface source class
627      * without providing the actual output Surface. This is used to setup an output configuration
628      * with a deferred Surface. The application can use this output configuration to create a
629      * session.
630      * </p>
631      * <p>
632      * However, the actual output Surface must be set via {@link #addSurface} and the deferred
633      * Surface configuration must be finalized via {@link
634      * CameraCaptureSession#finalizeOutputConfigurations} before submitting a request with this
635      * Surface target. The deferred Surface can only be obtained either from {@link
636      * android.view.SurfaceView} by calling {@link android.view.SurfaceHolder#getSurface}, or from
637      * {@link android.graphics.SurfaceTexture} via
638      * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}).
639      * </p>
640      *
641      * @param surfaceSize Size for the deferred surface.
642      * @param klass a non-{@code null} {@link Class} object reference that indicates the source of
643      *            this surface. Only {@link android.view.SurfaceHolder SurfaceHolder.class} and
644      *            {@link android.graphics.SurfaceTexture SurfaceTexture.class} are supported.
645      * @throws IllegalArgumentException if the Surface source class is not supported, or Surface
646      *         size is zero.
647      */
OutputConfiguration(@onNull Size surfaceSize, @NonNull Class<T> klass)648     public <T> OutputConfiguration(@NonNull Size surfaceSize, @NonNull Class<T> klass) {
649         checkNotNull(klass, "surfaceSize must not be null");
650         checkNotNull(klass, "klass must not be null");
651         if (klass == android.view.SurfaceHolder.class) {
652             mSurfaceType = SURFACE_TYPE_SURFACE_VIEW;
653         } else if (klass == android.graphics.SurfaceTexture.class) {
654             mSurfaceType = SURFACE_TYPE_SURFACE_TEXTURE;
655         } else {
656             mSurfaceType = SURFACE_TYPE_UNKNOWN;
657             throw new IllegalArgumentException("Unknown surface source class type");
658         }
659 
660         if (surfaceSize.getWidth() == 0 || surfaceSize.getHeight() == 0) {
661             throw new IllegalArgumentException("Surface size needs to be non-zero");
662         }
663 
664         mSurfaceGroupId = SURFACE_GROUP_ID_NONE;
665         mSurfaces = new ArrayList<Surface>();
666         mRotation = ROTATION_0;
667         mConfiguredSize = surfaceSize;
668         mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
669         mConfiguredDataspace = StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
670         mConfiguredGenerationId = 0;
671         mIsDeferredConfig = true;
672         mIsShared = false;
673         mPhysicalCameraId = null;
674         mIsMultiResolution = false;
675         mSensorPixelModesUsed = new ArrayList<Integer>();
676         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
677         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
678         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
679         mReadoutTimestampEnabled = false;
680         mIsReadoutSensorTimestampBase = false;
681     }
682 
683     /**
684      * Enable multiple surfaces sharing the same OutputConfiguration
685      *
686      * <p>For advanced use cases, a camera application may require more streams than the combination
687      * guaranteed by {@link CameraDevice#createCaptureSession}. In this case, more than one
688      * compatible surface can be attached to an OutputConfiguration so that they map to one
689      * camera stream, and the outputs share memory buffers when possible. Due to buffer sharing
690      * clients should be careful when adding surface outputs that modify their input data. If such
691      * case exists, camera clients should have an additional mechanism to synchronize read and write
692      * access between individual consumers.</p>
693      *
694      * <p>Two surfaces are compatible in the below cases:</p>
695      *
696      * <li> Surfaces with the same size, format, dataSpace, and Surface source class. In this case,
697      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} is guaranteed to succeed.
698      *
699      * <li> Surfaces with the same size, format, and dataSpace, but different Surface source classes
700      * that are generally not compatible. However, on some devices, the underlying camera device is
701      * able to use the same buffer layout for both surfaces. The only way to discover if this is the
702      * case is to create a capture session with that output configuration. For example, if the
703      * camera device uses the same private buffer format between a SurfaceView/SurfaceTexture and a
704      * MediaRecorder/MediaCodec, {@link CameraDevice#createCaptureSessionByOutputConfigurations}
705      * will succeed. Otherwise, it fails with {@link
706      * CameraCaptureSession.StateCallback#onConfigureFailed}.
707      * </ol>
708      *
709      * <p>To enable surface sharing, this function must be called before {@link
710      * CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
711      * CameraDevice#createReprocessableCaptureSessionByConfigurations}. Calling this function after
712      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} has no effect.</p>
713      *
714      * <p>Up to {@link #getMaxSharedSurfaceCount} surfaces can be shared for an OutputConfiguration.
715      * The supported surfaces for sharing must be of type SurfaceTexture, SurfaceView,
716      * MediaRecorder, MediaCodec, or implementation defined ImageReader.</p>
717      *
718      * <p>This function must not be called from OutputConfigurations created by {@link
719      * #createInstancesForMultiResolutionOutput}.</p>
720      *
721      * @throws IllegalStateException If this OutputConfiguration is created via {@link
722      * #createInstancesForMultiResolutionOutput} to back a MultiResolutionImageReader.
723      */
enableSurfaceSharing()724     public void enableSurfaceSharing() {
725         if (mIsMultiResolution) {
726             throw new IllegalStateException("Cannot enable surface sharing on "
727                     + "multi-resolution output configurations");
728         }
729         mIsShared = true;
730     }
731 
732     /**
733      * Set the id of the physical camera for this OutputConfiguration
734      *
735      * <p>In the case one logical camera is made up of multiple physical cameras, it could be
736      * desirable for the camera application to request streams from individual physical cameras.
737      * This call achieves it by mapping the OutputConfiguration to the physical camera id.</p>
738      *
739      * <p>The valid physical camera ids can be queried by {@link
740      * CameraCharacteristics#getPhysicalCameraIds}.</p>
741      *
742      * <p>Passing in a null physicalCameraId means that the OutputConfiguration is for a logical
743      * stream.</p>
744      *
745      * <p>This function must be called before {@link
746      * CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
747      * CameraDevice#createReprocessableCaptureSessionByConfigurations}. Calling this function
748      * after {@link CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
749      * CameraDevice#createReprocessableCaptureSessionByConfigurations} has no effect.</p>
750      *
751      * <p>As of {@link android.os.Build.VERSION_CODES#S Android 12}, an image buffer from a
752      * physical camera stream can be used for reprocessing to logical camera streams and streams
753      * from the same physical camera if the camera device supports multi-resolution input and output
754      * streams. See {@link CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP}
755      * for details. The behaviors of reprocessing from a non-physical camera stream to a physical
756      * camera stream, and from a physical camera stream to a physical camera stream of different
757      * physical camera, are device-specific and not guaranteed to be supported.</p>
758      *
759      * <p>On prior API levels, the surface belonging to a physical camera OutputConfiguration must
760      * not be used as input or output of a reprocessing request. </p>
761      */
setPhysicalCameraId(@ullable String physicalCameraId)762     public void setPhysicalCameraId(@Nullable String physicalCameraId) {
763         mPhysicalCameraId = physicalCameraId;
764     }
765 
766     /**
767      * Add a sensor pixel mode that this OutputConfiguration will be used in.
768      *
769      * <p> In the case that this output stream configuration (format, width, height) is
770      * available through {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP}
771      * configurations and
772      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION},
773      * configurations, the camera sub-system will assume that this {@link OutputConfiguration} will
774      * be used only with {@link android.hardware.camera2.CaptureRequest}s which has
775      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
776      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT}.
777      * In such cases, if clients intend to use the
778      * {@link OutputConfiguration}(s) in a {@link android.hardware.camera2.CaptureRequest} with
779      * other sensor pixel modes, they must specify which
780      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE}(s) they will use this
781      * {@link OutputConfiguration} with, by calling this method.
782      *
783      * In case this output stream configuration (format, width, height) is only in
784      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION},
785      * configurations, this output target must only be used with
786      * {@link android.hardware.camera2.CaptureRequest}s which has
787      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
788      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} and that
789      * is what the camera sub-system will assume. If clients add
790      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} in this
791      * case, session configuration will fail, if this {@link OutputConfiguration} is included.
792      *
793      * In case this output stream configuration (format, width, height) is only in
794      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP},
795      * configurations, this output target must only be used with
796      * {@link android.hardware.camera2.CaptureRequest}s which has
797      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
798      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} and that is what
799      * the camera sub-system will assume. If clients add
800      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} in this
801      * case, session configuration will fail, if this {@link OutputConfiguration} is included.
802      *
803      * @param sensorPixelModeUsed The sensor pixel mode this OutputConfiguration will be used with
804      * </p>
805      *
806      */
addSensorPixelModeUsed(@ensorPixelMode int sensorPixelModeUsed)807     public void addSensorPixelModeUsed(@SensorPixelMode int sensorPixelModeUsed) {
808         // Verify that the values are in range.
809         if (sensorPixelModeUsed != CameraMetadata.SENSOR_PIXEL_MODE_DEFAULT &&
810                 sensorPixelModeUsed != CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
811             throw new IllegalArgumentException("Not a valid sensor pixel mode " +
812                     sensorPixelModeUsed);
813         }
814 
815         if (mSensorPixelModesUsed.contains(sensorPixelModeUsed)) {
816             // Already added, ignore;
817             return;
818         }
819         mSensorPixelModesUsed.add(sensorPixelModeUsed);
820     }
821 
822     /**
823      * Remove a sensor pixel mode, previously added through addSensorPixelModeUsed, from this
824      * OutputConfiguration.
825      *
826      * <p> Sensor pixel modes added via calls to {@link #addSensorPixelModeUsed} can also be removed
827      * from the OutputConfiguration.</p>
828      *
829      * @param sensorPixelModeUsed The sensor pixel mode to be removed.
830      *
831      * @throws IllegalArgumentException If the sensor pixel mode wasn't previously added
832      *                                  through {@link #addSensorPixelModeUsed}.
833      */
removeSensorPixelModeUsed(@ensorPixelMode int sensorPixelModeUsed)834     public void removeSensorPixelModeUsed(@SensorPixelMode int sensorPixelModeUsed) {
835       if (!mSensorPixelModesUsed.remove(Integer.valueOf(sensorPixelModeUsed))) {
836             throw new IllegalArgumentException("sensorPixelMode " + sensorPixelModeUsed +
837                     "is not part of this output configuration");
838       }
839     }
840 
841     /**
842      * Check if this configuration is for a physical camera.
843      *
844      * <p>This returns true if the output configuration was for a physical camera making up a
845      * logical multi camera via {@link OutputConfiguration#setPhysicalCameraId}.</p>
846      * @hide
847      */
isForPhysicalCamera()848     public boolean isForPhysicalCamera() {
849         return (mPhysicalCameraId != null);
850     }
851 
852     /**
853      * Check if this configuration has deferred configuration.
854      *
855      * <p>This will return true if the output configuration was constructed with surface deferred by
856      * {@link OutputConfiguration#OutputConfiguration(Size, Class)}. It will return true even after
857      * the deferred surface is added later by {@link OutputConfiguration#addSurface}.</p>
858      *
859      * @return true if this configuration has deferred surface.
860      * @hide
861      */
isDeferredConfiguration()862     public boolean isDeferredConfiguration() {
863         return mIsDeferredConfig;
864     }
865 
866     /**
867      * Add a surface to this OutputConfiguration.
868      *
869      * <p> This function can be called before or after {@link
870      * CameraDevice#createCaptureSessionByOutputConfigurations}. If it's called after,
871      * the application must finalize the capture session with
872      * {@link CameraCaptureSession#finalizeOutputConfigurations}. It is possible to call this method
873      * after the output configurations have been finalized only in cases of enabled surface sharing
874      * see {@link #enableSurfaceSharing}. The modified output configuration must be updated with
875      * {@link CameraCaptureSession#updateOutputConfiguration}.</p>
876      *
877      * <p> If the OutputConfiguration was constructed with a deferred surface by {@link
878      * OutputConfiguration#OutputConfiguration(Size, Class)}, the added surface must be obtained
879      * from {@link android.view.SurfaceView} by calling {@link android.view.SurfaceHolder#getSurface},
880      * or from {@link android.graphics.SurfaceTexture} via
881      * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}).</p>
882      *
883      * <p> If the OutputConfiguration was constructed by other constructors, the added
884      * surface must be compatible with the existing surface. See {@link #enableSurfaceSharing} for
885      * details of compatible surfaces.</p>
886      *
887      * <p> If the OutputConfiguration already contains a Surface, {@link #enableSurfaceSharing} must
888      * be called before calling this function to add a new Surface.</p>
889      *
890      * @param surface The surface to be added.
891      * @throws IllegalArgumentException if the Surface is invalid, the Surface's
892      *         dataspace/format doesn't match, or adding the Surface would exceed number of
893      *         shared surfaces supported.
894      * @throws IllegalStateException if the Surface was already added to this OutputConfiguration,
895      *         or if the OutputConfiguration is not shared and it already has a surface associated
896      *         with it.
897      */
addSurface(@onNull Surface surface)898     public void addSurface(@NonNull Surface surface) {
899         checkNotNull(surface, "Surface must not be null");
900         if (mSurfaces.contains(surface)) {
901             throw new IllegalStateException("Surface is already added!");
902         }
903         if (mSurfaces.size() == 1 && !mIsShared) {
904             throw new IllegalStateException("Cannot have 2 surfaces for a non-sharing configuration");
905         }
906         if (mSurfaces.size() + 1 > MAX_SURFACES_COUNT) {
907             throw new IllegalArgumentException("Exceeds maximum number of surfaces");
908         }
909 
910         // This will throw IAE is the surface was abandoned.
911         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
912         if (!surfaceSize.equals(mConfiguredSize)) {
913             Log.w(TAG, "Added surface size " + surfaceSize +
914                     " is different than pre-configured size " + mConfiguredSize +
915                     ", the pre-configured size will be used.");
916         }
917 
918         if (mConfiguredFormat != SurfaceUtils.getSurfaceFormat(surface)) {
919             throw new IllegalArgumentException("The format of added surface format doesn't match");
920         }
921 
922         // If the surface format is PRIVATE, do not enforce dataSpace because camera device may
923         // override it.
924         if (mConfiguredFormat != ImageFormat.PRIVATE &&
925                 mConfiguredDataspace != SurfaceUtils.getSurfaceDataspace(surface)) {
926             throw new IllegalArgumentException("The dataspace of added surface doesn't match");
927         }
928 
929         mSurfaces.add(surface);
930     }
931 
932     /**
933      * Remove a surface from this OutputConfiguration.
934      *
935      * <p> Surfaces added via calls to {@link #addSurface} can also be removed from the
936      *  OutputConfiguration. The only notable exception is the surface associated with
937      *  the OutputConfiguration see {@link #getSurface} which was passed as part of the constructor
938      *  or was added first in the deferred case
939      *  {@link OutputConfiguration#OutputConfiguration(Size, Class)}.</p>
940      *
941      * @param surface The surface to be removed.
942      *
943      * @throws IllegalArgumentException If the surface is associated with this OutputConfiguration
944      *                                  (see {@link #getSurface}) or the surface didn't get added
945      *                                  with {@link #addSurface}.
946      */
removeSurface(@onNull Surface surface)947     public void removeSurface(@NonNull Surface surface) {
948         if (getSurface() == surface) {
949             throw new IllegalArgumentException(
950                     "Cannot remove surface associated with this output configuration");
951         }
952         if (!mSurfaces.remove(surface)) {
953             throw new IllegalArgumentException("Surface is not part of this output configuration");
954         }
955     }
956 
957     /**
958      * Set stream use case for this OutputConfiguration
959      *
960      * <p>Stream use case is used to describe the purpose of the stream, whether it's for live
961      * preview, still image capture, video recording, or their combinations. This flag is useful
962      * for scenarios where the immediate consumer target isn't sufficient to indicate the stream's
963      * usage.</p>
964      *
965      * <p>The main difference between stream use case and capture intent is that the former
966      * enables the camera device to optimize camera hardware and software pipelines based on user
967      * scenarios for each stream, whereas the latter is mainly a hint to camera to decide
968      * optimal 3A strategy that's applicable to the whole session. The camera device carries out
969      * configurations such as selecting tuning parameters, choosing camera sensor mode, and
970      * constructing image processing pipeline based on the streams's use cases. Capture intents are
971      * then used to fine tune 3A behaviors such as adjusting AE/AF convergence speed, and capture
972      * intents may change during the lifetime of a session. For example, for a session with a
973      * PREVIEW_VIDEO_STILL use case stream and a STILL_CAPTURE use case stream, the capture intents
974      * may be PREVIEW with fast 3A convergence speed and flash metering with automatic control for
975      * live preview, STILL_CAPTURE with best 3A parameters for still photo capture, or VIDEO_RECORD
976      * with slower 3A convergence speed for better video playback experience.</p>
977      *
978      * <p>The supported stream use cases supported by a camera device can be queried by
979      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES}.</p>
980      *
981      * <p>The mandatory stream combinations involving stream use cases can be found at {@link
982      * android.hardware.camera2.CameraDevice#createCaptureSession}, as well as queried via
983      * {@link android.hardware.camera2.params.MandatoryStreamCombination}. The application is
984      * strongly recommended to select one of the guaranteed stream combinations where all streams'
985      * use cases are set to non-DEFAULT values. If the application chooses a stream combination
986      * not in the mandatory list, the camera device may ignore some use case flags due to
987      * hardware constraints or implementation details.</p>
988      *
989      * <p>This function must be called before {@link CameraDevice#createCaptureSession} or {@link
990      * CameraDevice#createCaptureSessionByOutputConfigurations}. Calling this function after
991      * {@link CameraDevice#createCaptureSession} or
992      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} has no effect to the camera
993      * session.</p>
994      *
995      * @param streamUseCase The stream use case to be set.
996      *
997      * @throws IllegalArgumentException If the streamUseCase isn't within the range of valid
998      *                                  values.
999      */
setStreamUseCase(@treamUseCase long streamUseCase)1000     public void setStreamUseCase(@StreamUseCase long streamUseCase) {
1001         // Verify that the value is in range
1002         long maxUseCaseValue = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
1003         if (streamUseCase > maxUseCaseValue &&
1004                 streamUseCase < CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
1005             throw new IllegalArgumentException("Not a valid stream use case value " +
1006                     streamUseCase);
1007         }
1008 
1009         mStreamUseCase = streamUseCase;
1010     }
1011 
1012     /**
1013      * Get the current stream use case
1014      *
1015      * <p>If no {@link #setStreamUseCase} is called first, this function returns
1016      * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT DEFAULT}.</p>
1017      *
1018      * @return the currently set stream use case
1019      */
getStreamUseCase()1020     public long getStreamUseCase() {
1021         return mStreamUseCase;
1022     }
1023 
1024     /**
1025      * Set timestamp base for this output target
1026      *
1027      * <p>Timestamp base describes the time domain of images from this
1028      * camera output and its relationship with {@link
1029      * CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.</p>
1030      *
1031      * <p>If this function is not called, the timestamp base for this output
1032      * is {@link #TIMESTAMP_BASE_DEFAULT}, with which the camera device adjusts
1033      * timestamps based on the output target.</p>
1034      *
1035      * <p>See {@link #TIMESTAMP_BASE_DEFAULT}, {@link #TIMESTAMP_BASE_SENSOR},
1036      * and {@link #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED} for details of each timestamp base.</p>
1037      *
1038      * @param timestampBase The timestamp base to be set.
1039      *
1040      * @throws IllegalArgumentException If the timestamp base isn't within the range of valid
1041      *                                  values.
1042      */
setTimestampBase(@imestampBase int timestampBase)1043     public void setTimestampBase(@TimestampBase int timestampBase) {
1044         // Verify that the value is in range
1045         if (timestampBase < TIMESTAMP_BASE_DEFAULT ||
1046                 timestampBase > TIMESTAMP_BASE_READOUT_SENSOR) {
1047             throw new IllegalArgumentException("Not a valid timestamp base value " +
1048                     timestampBase);
1049         }
1050 
1051         if (timestampBase == TIMESTAMP_BASE_READOUT_SENSOR) {
1052             mTimestampBase = TIMESTAMP_BASE_SENSOR;
1053             mReadoutTimestampEnabled = true;
1054             mIsReadoutSensorTimestampBase = true;
1055         } else {
1056             mTimestampBase = timestampBase;
1057             mIsReadoutSensorTimestampBase = false;
1058         }
1059     }
1060 
1061     /**
1062      * Get the current timestamp base
1063      *
1064      * <p>If no {@link #setTimestampBase} is called first, this function returns
1065      * {@link #TIMESTAMP_BASE_DEFAULT}.</p>
1066      *
1067      * @return The currently set timestamp base
1068      */
getTimestampBase()1069     public @TimestampBase int getTimestampBase() {
1070         if (mIsReadoutSensorTimestampBase) {
1071             return TIMESTAMP_BASE_READOUT_SENSOR;
1072         } else {
1073             return mTimestampBase;
1074         }
1075     }
1076 
1077     /**
1078      * Set the mirroring mode for this output target
1079      *
1080      * <p>If this function is not called, the mirroring mode for this output is
1081      * {@link #MIRROR_MODE_AUTO}, with which the camera API will mirror the output images
1082      * horizontally for front facing camera.</p>
1083      *
1084      * <p>For efficiency, the mirror effect is applied as a transform flag, so it is only effective
1085      * in some outputs. It works automatically for SurfaceView and TextureView outputs. For manual
1086      * use of SurfaceTexture, it is reflected in the value of
1087      * {@link android.graphics.SurfaceTexture#getTransformMatrix}. For other end points, such as
1088      * ImageReader, MediaRecorder, or MediaCodec, the mirror mode has no effect. If mirroring is
1089      * needed for such outputs, the application needs to mirror the image buffers itself before
1090      * passing them onward.</p>
1091      */
setMirrorMode(@irrorMode int mirrorMode)1092     public void setMirrorMode(@MirrorMode int mirrorMode) {
1093         // Verify that the value is in range
1094         if (mirrorMode < MIRROR_MODE_AUTO ||
1095                 mirrorMode > MIRROR_MODE_V) {
1096             throw new IllegalArgumentException("Not a valid mirror mode " + mirrorMode);
1097         }
1098         mMirrorMode = mirrorMode;
1099     }
1100 
1101     /**
1102      * Get the current mirroring mode
1103      *
1104      * <p>If no {@link #setMirrorMode} is called first, this function returns
1105      * {@link #MIRROR_MODE_AUTO}.</p>
1106      *
1107      * @return The currently set mirroring mode
1108      */
getMirrorMode()1109     public @MirrorMode int getMirrorMode() {
1110         return mMirrorMode;
1111     }
1112 
1113     /**
1114      * Use the camera sensor's readout time for the image timestamp.
1115      *
1116      * <p>The start of the camera sensor readout after exposure. For a rolling shutter camera
1117      * sensor, the timestamp is typically equal to {@code (the start of exposure time) +
1118      * (exposure time) + (certain fixed offset)}. The fixed offset can vary per session, depending
1119      * on the underlying sensor configuration. The benefit of using readout time is that when
1120      * camera runs in a fixed frame rate, the timestamp intervals between frames are constant.</p>
1121      *
1122      * <p>Readout timestamp is supported only if {@link
1123      * CameraCharacteristics#SENSOR_READOUT_TIMESTAMP} is
1124      * {@link CameraMetadata#SENSOR_READOUT_TIMESTAMP_HARDWARE}.</p>
1125      *
1126      * <p>As long as readout timestamp is supported, if the timestamp base is
1127      * {@link #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED}, or if the timestamp base is DEFAULT for a
1128      * SurfaceView output, the image timestamps for the output are always readout time regardless
1129      * of whether this function is called.</p>
1130      *
1131      * @param on The output image timestamp is the start of exposure time if false, and
1132      *           the start of readout time if true.
1133      */
setReadoutTimestampEnabled(boolean on)1134     public void setReadoutTimestampEnabled(boolean on) {
1135         mReadoutTimestampEnabled = on;
1136     }
1137 
1138     /** Whether readout timestamp is used for this OutputConfiguration.
1139      *
1140      * @see #setReadoutTimestampEnabled
1141      */
isReadoutTimestampEnabled()1142     public boolean isReadoutTimestampEnabled() {
1143         return mReadoutTimestampEnabled;
1144     }
1145 
1146     /**
1147      * Create a new {@link OutputConfiguration} instance with another {@link OutputConfiguration}
1148      * instance.
1149      *
1150      * @param other Another {@link OutputConfiguration} instance to be copied.
1151      *
1152      * @hide
1153      */
OutputConfiguration(@onNull OutputConfiguration other)1154     public OutputConfiguration(@NonNull OutputConfiguration other) {
1155         if (other == null) {
1156             throw new IllegalArgumentException("OutputConfiguration shouldn't be null");
1157         }
1158 
1159         this.mSurfaces = other.mSurfaces;
1160         this.mRotation = other.mRotation;
1161         this.mSurfaceGroupId = other.mSurfaceGroupId;
1162         this.mSurfaceType = other.mSurfaceType;
1163         this.mConfiguredDataspace = other.mConfiguredDataspace;
1164         this.mConfiguredFormat = other.mConfiguredFormat;
1165         this.mConfiguredSize = other.mConfiguredSize;
1166         this.mConfiguredGenerationId = other.mConfiguredGenerationId;
1167         this.mIsDeferredConfig = other.mIsDeferredConfig;
1168         this.mIsShared = other.mIsShared;
1169         this.mPhysicalCameraId = other.mPhysicalCameraId;
1170         this.mIsMultiResolution = other.mIsMultiResolution;
1171         this.mSensorPixelModesUsed = other.mSensorPixelModesUsed;
1172         this.mDynamicRangeProfile = other.mDynamicRangeProfile;
1173         this.mColorSpace = other.mColorSpace;
1174         this.mStreamUseCase = other.mStreamUseCase;
1175         this.mTimestampBase = other.mTimestampBase;
1176         this.mMirrorMode = other.mMirrorMode;
1177         this.mReadoutTimestampEnabled = other.mReadoutTimestampEnabled;
1178     }
1179 
1180     /**
1181      * Create an OutputConfiguration from Parcel.
1182      */
OutputConfiguration(@onNull Parcel source)1183     private OutputConfiguration(@NonNull Parcel source) {
1184         int rotation = source.readInt();
1185         int surfaceSetId = source.readInt();
1186         int surfaceType = source.readInt();
1187         int width = source.readInt();
1188         int height = source.readInt();
1189         boolean isDeferred = source.readInt() == 1;
1190         boolean isShared = source.readInt() == 1;
1191         ArrayList<Surface> surfaces = new ArrayList<Surface>();
1192         source.readTypedList(surfaces, Surface.CREATOR);
1193         String physicalCameraId = source.readString();
1194         boolean isMultiResolutionOutput = source.readInt() == 1;
1195         int[] sensorPixelModesUsed = source.createIntArray();
1196         long streamUseCase = source.readLong();
1197 
1198         checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
1199         long dynamicRangeProfile = source.readLong();
1200         DynamicRangeProfiles.checkProfileValue(dynamicRangeProfile);
1201         int colorSpace = source.readInt();
1202 
1203         int timestampBase = source.readInt();
1204         int mirrorMode = source.readInt();
1205         boolean readoutTimestampEnabled = source.readInt() == 1;
1206 
1207         mSurfaceGroupId = surfaceSetId;
1208         mRotation = rotation;
1209         mSurfaces = surfaces;
1210         mConfiguredSize = new Size(width, height);
1211         mIsDeferredConfig = isDeferred;
1212         mIsShared = isShared;
1213         mSurfaces = surfaces;
1214         if (mSurfaces.size() > 0) {
1215             mSurfaceType = SURFACE_TYPE_UNKNOWN;
1216             mConfiguredFormat = SurfaceUtils.getSurfaceFormat(mSurfaces.get(0));
1217             mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(mSurfaces.get(0));
1218             mConfiguredGenerationId = mSurfaces.get(0).getGenerationId();
1219         } else {
1220             mSurfaceType = surfaceType;
1221             mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
1222             mConfiguredDataspace =
1223                     StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
1224             mConfiguredGenerationId = 0;
1225         }
1226         mPhysicalCameraId = physicalCameraId;
1227         mIsMultiResolution = isMultiResolutionOutput;
1228         mSensorPixelModesUsed = convertIntArrayToIntegerList(sensorPixelModesUsed);
1229         mDynamicRangeProfile = dynamicRangeProfile;
1230         mColorSpace = colorSpace;
1231         mStreamUseCase = streamUseCase;
1232         mTimestampBase = timestampBase;
1233         mMirrorMode = mirrorMode;
1234         mReadoutTimestampEnabled = readoutTimestampEnabled;
1235     }
1236 
1237     /**
1238      * Get the maximum supported shared {@link Surface} count.
1239      *
1240      * @return the maximum number of surfaces that can be added per each OutputConfiguration.
1241      *
1242      * @see #enableSurfaceSharing
1243      */
getMaxSharedSurfaceCount()1244     public int getMaxSharedSurfaceCount() {
1245         return MAX_SURFACES_COUNT;
1246     }
1247 
1248     /**
1249      * Get the {@link Surface} associated with this {@link OutputConfiguration}.
1250      *
1251      * If more than one surface is associated with this {@link OutputConfiguration}, return the
1252      * first one as specified in the constructor or {@link OutputConfiguration#addSurface}.
1253      */
getSurface()1254     public @Nullable Surface getSurface() {
1255         if (mSurfaces.size() == 0) {
1256             return null;
1257         }
1258 
1259         return mSurfaces.get(0);
1260     }
1261 
1262     /**
1263      * Get the immutable list of surfaces associated with this {@link OutputConfiguration}.
1264      *
1265      * @return the list of surfaces associated with this {@link OutputConfiguration} as specified in
1266      * the constructor and {@link OutputConfiguration#addSurface}. The list should not be modified.
1267      */
1268     @NonNull
getSurfaces()1269     public List<Surface> getSurfaces() {
1270         return Collections.unmodifiableList(mSurfaces);
1271     }
1272 
1273     /**
1274      * Get the rotation associated with this {@link OutputConfiguration}.
1275      *
1276      * @return the rotation associated with this {@link OutputConfiguration}.
1277      *         Value will be one of ROTATION_[0, 90, 180, 270]
1278      *
1279      * @hide
1280      */
1281     @SystemApi
getRotation()1282     public int getRotation() {
1283         return mRotation;
1284     }
1285 
1286     /**
1287      * Get the surface group ID associated with this {@link OutputConfiguration}.
1288      *
1289      * @return the surface group ID associated with this {@link OutputConfiguration}.
1290      *         The default value is {@value #SURFACE_GROUP_ID_NONE}.
1291      */
getSurfaceGroupId()1292     public int getSurfaceGroupId() {
1293         return mSurfaceGroupId;
1294     }
1295 
1296     public static final @android.annotation.NonNull Parcelable.Creator<OutputConfiguration> CREATOR =
1297             new Parcelable.Creator<OutputConfiguration>() {
1298         @Override
1299         public OutputConfiguration createFromParcel(Parcel source) {
1300             return new OutputConfiguration(source);
1301         }
1302 
1303         @Override
1304         public OutputConfiguration[] newArray(int size) {
1305             return new OutputConfiguration[size];
1306         }
1307     };
1308 
1309     @Override
describeContents()1310     public int describeContents() {
1311         return 0;
1312     }
1313 
convertIntegerToIntList(List<Integer> integerList)1314     private static int[] convertIntegerToIntList(List<Integer> integerList) {
1315         int[] integerArray = new int[integerList.size()];
1316         for (int i = 0; i < integerList.size(); i++) {
1317             integerArray[i] = integerList.get(i);
1318         }
1319         return integerArray;
1320     }
1321 
convertIntArrayToIntegerList(int[] intArray)1322     private static ArrayList<Integer> convertIntArrayToIntegerList(int[] intArray) {
1323         ArrayList<Integer> integerList = new ArrayList<Integer>();
1324         if (intArray == null) {
1325             return integerList;
1326         }
1327         for (int i = 0; i < intArray.length; i++) {
1328             integerList.add(intArray[i]);
1329         }
1330         return integerList;
1331     }
1332 
1333     @Override
writeToParcel(Parcel dest, int flags)1334     public void writeToParcel(Parcel dest, int flags) {
1335         if (dest == null) {
1336             throw new IllegalArgumentException("dest must not be null");
1337         }
1338         dest.writeInt(mRotation);
1339         dest.writeInt(mSurfaceGroupId);
1340         dest.writeInt(mSurfaceType);
1341         dest.writeInt(mConfiguredSize.getWidth());
1342         dest.writeInt(mConfiguredSize.getHeight());
1343         dest.writeInt(mIsDeferredConfig ? 1 : 0);
1344         dest.writeInt(mIsShared ? 1 : 0);
1345         dest.writeTypedList(mSurfaces);
1346         dest.writeString(mPhysicalCameraId);
1347         dest.writeInt(mIsMultiResolution ? 1 : 0);
1348         // writeList doesn't seem to work well with Integer list.
1349         dest.writeIntArray(convertIntegerToIntList(mSensorPixelModesUsed));
1350         dest.writeLong(mDynamicRangeProfile);
1351         dest.writeInt(mColorSpace);
1352         dest.writeLong(mStreamUseCase);
1353         dest.writeInt(mTimestampBase);
1354         dest.writeInt(mMirrorMode);
1355         dest.writeInt(mReadoutTimestampEnabled ? 1 : 0);
1356     }
1357 
1358     /**
1359      * Check if this {@link OutputConfiguration} is equal to another {@link OutputConfiguration}.
1360      *
1361      * <p>Two output configurations are only equal if and only if the underlying surfaces, surface
1362      * properties (width, height, format, dataspace) when the output configurations are created,
1363      * and all other configuration parameters are equal. </p>
1364      *
1365      * @return {@code true} if the objects were equal, {@code false} otherwise
1366      */
1367     @Override
equals(@ullable Object obj)1368     public boolean equals(@Nullable Object obj) {
1369         if (obj == null) {
1370             return false;
1371         } else if (this == obj) {
1372             return true;
1373         } else if (obj instanceof OutputConfiguration) {
1374             final OutputConfiguration other = (OutputConfiguration) obj;
1375             if (mRotation != other.mRotation ||
1376                     !mConfiguredSize.equals(other.mConfiguredSize) ||
1377                     mConfiguredFormat != other.mConfiguredFormat ||
1378                     mSurfaceGroupId != other.mSurfaceGroupId ||
1379                     mSurfaceType != other.mSurfaceType ||
1380                     mIsDeferredConfig != other.mIsDeferredConfig ||
1381                     mIsShared != other.mIsShared ||
1382                     mConfiguredFormat != other.mConfiguredFormat ||
1383                     mConfiguredDataspace != other.mConfiguredDataspace ||
1384                     mConfiguredGenerationId != other.mConfiguredGenerationId ||
1385                     !Objects.equals(mPhysicalCameraId, other.mPhysicalCameraId) ||
1386                     mIsMultiResolution != other.mIsMultiResolution ||
1387                     mStreamUseCase != other.mStreamUseCase ||
1388                     mTimestampBase != other.mTimestampBase ||
1389                     mMirrorMode != other.mMirrorMode ||
1390                     mReadoutTimestampEnabled != other.mReadoutTimestampEnabled)
1391                 return false;
1392             if (mSensorPixelModesUsed.size() != other.mSensorPixelModesUsed.size()) {
1393                 return false;
1394             }
1395             for (int j = 0; j < mSensorPixelModesUsed.size(); j++) {
1396                 if (!Objects.equals(
1397                         mSensorPixelModesUsed.get(j), other.mSensorPixelModesUsed.get(j))) {
1398                     return false;
1399                 }
1400             }
1401             int minLen = Math.min(mSurfaces.size(), other.mSurfaces.size());
1402             for (int i = 0;  i < minLen; i++) {
1403                 if (mSurfaces.get(i) != other.mSurfaces.get(i))
1404                     return false;
1405             }
1406             if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
1407                 return false;
1408             }
1409             if (mColorSpace != other.mColorSpace) {
1410                 return false;
1411             }
1412 
1413             return true;
1414         }
1415         return false;
1416     }
1417 
1418     /**
1419      * {@inheritDoc}
1420      */
1421     @Override
hashCode()1422     public int hashCode() {
1423         // Need ensure that the hashcode remains unchanged after adding a deferred surface.
1424         // Otherwise the deferred output configuration will be lost in the camera stream map
1425         // after the deferred surface is set.
1426         if (mIsDeferredConfig) {
1427             return HashCodeHelpers.hashCode(
1428                     mRotation, mConfiguredSize.hashCode(), mConfiguredFormat, mConfiguredDataspace,
1429                     mSurfaceGroupId, mSurfaceType, mIsShared ? 1 : 0,
1430                     mPhysicalCameraId == null ? 0 : mPhysicalCameraId.hashCode(),
1431                     mIsMultiResolution ? 1 : 0, mSensorPixelModesUsed.hashCode(),
1432                     mDynamicRangeProfile, mColorSpace, mStreamUseCase,
1433                     mTimestampBase, mMirrorMode, mReadoutTimestampEnabled ? 1 : 0);
1434         }
1435 
1436         return HashCodeHelpers.hashCode(
1437                 mRotation, mSurfaces.hashCode(), mConfiguredGenerationId,
1438                 mConfiguredSize.hashCode(), mConfiguredFormat,
1439                 mConfiguredDataspace, mSurfaceGroupId, mIsShared ? 1 : 0,
1440                 mPhysicalCameraId == null ? 0 : mPhysicalCameraId.hashCode(),
1441                 mIsMultiResolution ? 1 : 0, mSensorPixelModesUsed.hashCode(),
1442                 mDynamicRangeProfile, mColorSpace, mStreamUseCase, mTimestampBase,
1443                 mMirrorMode, mReadoutTimestampEnabled ? 1 : 0);
1444     }
1445 
1446     private static final String TAG = "OutputConfiguration";
1447 
1448     // A surfaceGroupId counter used for MultiResolutionImageReader. Its value is
1449     // incremented every time {@link createInstancesForMultiResolutionOutput} is called.
1450     private static int MULTI_RESOLUTION_GROUP_ID_COUNTER = 0;
1451 
1452     private ArrayList<Surface> mSurfaces;
1453     private final int mRotation;
1454     private final int mSurfaceGroupId;
1455     // Surface source type, this is only used by the deferred surface configuration objects.
1456     private final int mSurfaceType;
1457 
1458     // The size, format, and dataspace of the surface when OutputConfiguration is created.
1459     private final Size mConfiguredSize;
1460     private final int mConfiguredFormat;
1461     private final int mConfiguredDataspace;
1462     // Surface generation ID to distinguish changes to Surface native internals
1463     private final int mConfiguredGenerationId;
1464     // Flag indicating if this config has deferred surface.
1465     private final boolean mIsDeferredConfig;
1466     // Flag indicating if this config has shared surfaces
1467     private boolean mIsShared;
1468     // The physical camera id that this output configuration is for.
1469     private String mPhysicalCameraId;
1470     // Flag indicating if this config is for a multi-resolution output with a
1471     // MultiResolutionImageReader
1472     private boolean mIsMultiResolution;
1473     // The sensor pixel modes that this OutputConfiguration will use
1474     private ArrayList<Integer> mSensorPixelModesUsed;
1475     // Dynamic range profile
1476     private long mDynamicRangeProfile;
1477     // Color space
1478     private int mColorSpace;
1479     // Stream use case
1480     private long mStreamUseCase;
1481     // Timestamp base
1482     private int mTimestampBase;
1483     // Mirroring mode
1484     private int mMirrorMode;
1485     // readout timestamp
1486     private boolean mReadoutTimestampEnabled;
1487     // Whether the timestamp base is set to READOUT_SENSOR
1488     private boolean mIsReadoutSensorTimestampBase;
1489 }
1490