1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.impl; 18 19 import android.annotation.NonNull; 20 import android.compat.annotation.UnsupportedAppUsage; 21 import android.graphics.ImageFormat; 22 import android.graphics.Point; 23 import android.graphics.Rect; 24 import android.hardware.camera2.CameraCharacteristics; 25 import android.hardware.camera2.CameraMetadata; 26 import android.hardware.camera2.CaptureRequest; 27 import android.hardware.camera2.CaptureResult; 28 import android.hardware.camera2.marshal.MarshalQueryable; 29 import android.hardware.camera2.marshal.MarshalRegistry; 30 import android.hardware.camera2.marshal.Marshaler; 31 import android.hardware.camera2.marshal.impl.MarshalQueryableArray; 32 import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern; 33 import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean; 34 import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform; 35 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration; 37 import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle; 38 import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger; 39 import android.hardware.camera2.marshal.impl.MarshalQueryablePair; 40 import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable; 41 import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive; 42 import android.hardware.camera2.marshal.impl.MarshalQueryableRange; 43 import android.hardware.camera2.marshal.impl.MarshalQueryableRecommendedStreamConfiguration; 44 import android.hardware.camera2.marshal.impl.MarshalQueryableRect; 45 import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap; 46 import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector; 47 import android.hardware.camera2.marshal.impl.MarshalQueryableSize; 48 import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF; 49 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration; 50 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration; 51 import android.hardware.camera2.marshal.impl.MarshalQueryableString; 52 import android.hardware.camera2.params.Capability; 53 import android.hardware.camera2.params.DeviceStateSensorOrientationMap; 54 import android.hardware.camera2.params.Face; 55 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 56 import android.hardware.camera2.params.LensShadingMap; 57 import android.hardware.camera2.params.MandatoryStreamCombination; 58 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap; 59 import android.hardware.camera2.params.OisSample; 60 import android.hardware.camera2.params.RecommendedStreamConfiguration; 61 import android.hardware.camera2.params.RecommendedStreamConfigurationMap; 62 import android.hardware.camera2.params.ReprocessFormatsMap; 63 import android.hardware.camera2.params.StreamConfiguration; 64 import android.hardware.camera2.params.StreamConfigurationDuration; 65 import android.hardware.camera2.params.StreamConfigurationMap; 66 import android.hardware.camera2.params.TonemapCurve; 67 import android.hardware.camera2.utils.ArrayUtils; 68 import android.hardware.camera2.utils.TypeReference; 69 import android.location.Location; 70 import android.location.LocationManager; 71 import android.os.Build; 72 import android.os.Parcel; 73 import android.os.Parcelable; 74 import android.os.ServiceSpecificException; 75 import android.util.Log; 76 import android.util.Range; 77 import android.util.Size; 78 79 import dalvik.annotation.optimization.FastNative; 80 import dalvik.system.VMRuntime; 81 82 import java.io.IOException; 83 import java.nio.ByteBuffer; 84 import java.nio.ByteOrder; 85 import java.util.ArrayList; 86 import java.util.Arrays; 87 import java.util.Collections; 88 import java.util.HashMap; 89 import java.util.HashSet; 90 import java.util.Map; 91 import java.util.List; 92 import java.util.Objects; 93 import java.util.Set; 94 95 /** 96 * Implementation of camera metadata marshal/unmarshal across Binder to 97 * the camera service 98 */ 99 public class CameraMetadataNative implements Parcelable { 100 101 public static class Key<T> { 102 private boolean mHasTag; 103 private int mTag; 104 private long mVendorId = Long.MAX_VALUE; 105 private final Class<T> mType; 106 private final TypeReference<T> mTypeReference; 107 private final String mName; 108 private final String mFallbackName; 109 private final int mHash; 110 111 /** 112 * @hide 113 */ Key(String name, Class<T> type, long vendorId)114 public Key(String name, Class<T> type, long vendorId) { 115 if (name == null) { 116 throw new NullPointerException("Key needs a valid name"); 117 } else if (type == null) { 118 throw new NullPointerException("Type needs to be non-null"); 119 } 120 mName = name; 121 mFallbackName = null; 122 mType = type; 123 mVendorId = vendorId; 124 mTypeReference = TypeReference.createSpecializedTypeReference(type); 125 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 126 } 127 128 /** 129 * @hide 130 */ Key(String name, String fallbackName, Class<T> type)131 public Key(String name, String fallbackName, Class<T> type) { 132 if (name == null) { 133 throw new NullPointerException("Key needs a valid name"); 134 } else if (type == null) { 135 throw new NullPointerException("Type needs to be non-null"); 136 } 137 mName = name; 138 mFallbackName = fallbackName; 139 mType = type; 140 mTypeReference = TypeReference.createSpecializedTypeReference(type); 141 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 142 } 143 144 /** 145 * Visible for testing only. 146 * 147 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 148 * for application code or vendor-extended keys.</p> 149 */ Key(String name, Class<T> type)150 public Key(String name, Class<T> type) { 151 if (name == null) { 152 throw new NullPointerException("Key needs a valid name"); 153 } else if (type == null) { 154 throw new NullPointerException("Type needs to be non-null"); 155 } 156 mName = name; 157 mFallbackName = null; 158 mType = type; 159 mTypeReference = TypeReference.createSpecializedTypeReference(type); 160 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 161 } 162 163 /** 164 * Visible for testing only. 165 * 166 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 167 * for application code or vendor-extended keys.</p> 168 */ 169 @SuppressWarnings("unchecked") Key(String name, TypeReference<T> typeReference)170 public Key(String name, TypeReference<T> typeReference) { 171 if (name == null) { 172 throw new NullPointerException("Key needs a valid name"); 173 } else if (typeReference == null) { 174 throw new NullPointerException("TypeReference needs to be non-null"); 175 } 176 mName = name; 177 mFallbackName = null; 178 mType = (Class<T>)typeReference.getRawType(); 179 mTypeReference = typeReference; 180 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 181 } 182 183 /** 184 * Return a camelCase, period separated name formatted like: 185 * {@code "root.section[.subsections].name"}. 186 * 187 * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."}; 188 * keys that are device/platform-specific are prefixed with {@code "com."}.</p> 189 * 190 * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would 191 * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device 192 * specific key might look like {@code "com.google.nexus.data.private"}.</p> 193 * 194 * @return String representation of the key name 195 */ getName()196 public final String getName() { 197 return mName; 198 } 199 200 /** 201 * {@inheritDoc} 202 */ 203 @Override hashCode()204 public final int hashCode() { 205 return mHash; 206 } 207 208 /** 209 * Compare this key against other native keys, request keys, result keys, and 210 * characteristics keys. 211 * 212 * <p>Two keys are considered equal if their name and type reference are equal.</p> 213 * 214 * <p>Note that the equality against non-native keys is one-way. A native key may be equal 215 * to a result key; but that same result key will not be equal to a native key.</p> 216 */ 217 @SuppressWarnings("rawtypes") 218 @Override equals(Object o)219 public final boolean equals(Object o) { 220 if (this == o) { 221 return true; 222 } 223 224 if (o == null || this.hashCode() != o.hashCode()) { 225 return false; 226 } 227 228 Key<?> lhs; 229 230 if (o instanceof CaptureResult.Key) { 231 lhs = ((CaptureResult.Key)o).getNativeKey(); 232 } else if (o instanceof CaptureRequest.Key) { 233 lhs = ((CaptureRequest.Key)o).getNativeKey(); 234 } else if (o instanceof CameraCharacteristics.Key) { 235 lhs = ((CameraCharacteristics.Key)o).getNativeKey(); 236 } else if ((o instanceof Key)) { 237 lhs = (Key<?>)o; 238 } else { 239 return false; 240 } 241 242 return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference); 243 } 244 245 /** 246 * <p> 247 * Get the tag corresponding to this key. This enables insertion into the 248 * native metadata. 249 * </p> 250 * 251 * <p>This value is looked up the first time, and cached subsequently.</p> 252 * 253 * <p>This function may be called without cacheTag() if this is not a vendor key. 254 * If this is a vendor key, cacheTag() must be called first before getTag() can 255 * be called. Otherwise, mVendorId could be default (Long.MAX_VALUE) and vendor 256 * tag lookup could fail.</p> 257 * 258 * @return The tag numeric value corresponding to the string 259 */ 260 @UnsupportedAppUsage getTag()261 public final int getTag() { 262 if (!mHasTag) { 263 mTag = CameraMetadataNative.getTag(mName, mVendorId); 264 mHasTag = true; 265 } 266 return mTag; 267 } 268 269 /** 270 * Whether this key's tag is cached. 271 * 272 * @hide 273 */ 274 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) hasTag()275 public final boolean hasTag() { 276 return mHasTag; 277 } 278 279 /** 280 * Cache this key's tag. 281 * 282 * @hide 283 */ 284 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) cacheTag(int tag)285 public final void cacheTag(int tag) { 286 mHasTag = true; 287 mTag = tag; 288 } 289 290 /** 291 * Get the raw class backing the type {@code T} for this key. 292 * 293 * <p>The distinction is only important if {@code T} is a generic, e.g. 294 * {@code Range<Integer>} since the nested type will be erased.</p> 295 */ getType()296 public final Class<T> getType() { 297 // TODO: remove this; other places should use #getTypeReference() instead 298 return mType; 299 } 300 301 /** 302 * Get the vendor tag provider id. 303 * 304 * @hide 305 */ getVendorId()306 public final long getVendorId() { 307 return mVendorId; 308 } 309 310 /** 311 * Get the type reference backing the type {@code T} for this key. 312 * 313 * <p>The distinction is only important if {@code T} is a generic, e.g. 314 * {@code Range<Integer>} since the nested type will be retained.</p> 315 */ getTypeReference()316 public final TypeReference<T> getTypeReference() { 317 return mTypeReference; 318 } 319 } 320 321 private static final String TAG = "CameraMetadataJV"; 322 private static final boolean DEBUG = false; 323 324 // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h 325 public static final int NATIVE_JPEG_FORMAT = 0x21; 326 327 private static final String CELLID_PROCESS = "CELLID"; 328 private static final String GPS_PROCESS = "GPS"; 329 private static final int FACE_LANDMARK_SIZE = 6; 330 331 private static final int MANDATORY_STREAM_CONFIGURATIONS_DEFAULT = 0; 332 private static final int MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION = 1; 333 private static final int MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT = 2; 334 translateLocationProviderToProcess(final String provider)335 private static String translateLocationProviderToProcess(final String provider) { 336 if (provider == null) { 337 return null; 338 } 339 switch(provider) { 340 case LocationManager.GPS_PROVIDER: 341 return GPS_PROCESS; 342 case LocationManager.NETWORK_PROVIDER: 343 return CELLID_PROCESS; 344 default: 345 return null; 346 } 347 } 348 translateProcessToLocationProvider(final String process)349 private static String translateProcessToLocationProvider(final String process) { 350 if (process == null) { 351 return null; 352 } 353 switch(process) { 354 case GPS_PROCESS: 355 return LocationManager.GPS_PROVIDER; 356 case CELLID_PROCESS: 357 return LocationManager.NETWORK_PROVIDER; 358 default: 359 return null; 360 } 361 } 362 CameraMetadataNative()363 public CameraMetadataNative() { 364 super(); 365 mMetadataPtr = nativeAllocate(); 366 if (mMetadataPtr == 0) { 367 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 368 } 369 updateNativeAllocation(); 370 } 371 372 /** 373 * Copy constructor - clone metadata 374 */ CameraMetadataNative(CameraMetadataNative other)375 public CameraMetadataNative(CameraMetadataNative other) { 376 super(); 377 mMetadataPtr = nativeAllocateCopy(other.mMetadataPtr); 378 if (mMetadataPtr == 0) { 379 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 380 } 381 updateNativeAllocation(); 382 } 383 384 /** 385 * Move the contents from {@code other} into a new camera metadata instance.</p> 386 * 387 * <p>After this call, {@code other} will become empty.</p> 388 * 389 * @param other the previous metadata instance which will get pilfered 390 * @return a new metadata instance with the values from {@code other} moved into it 391 */ move(CameraMetadataNative other)392 public static CameraMetadataNative move(CameraMetadataNative other) { 393 CameraMetadataNative newObject = new CameraMetadataNative(); 394 newObject.swap(other); 395 return newObject; 396 } 397 398 /** 399 * Set all metadata values in the destination argument by using the corresponding 400 * values from the source. Metadata tags present in the destination and absent 401 * from the source will remain unmodified. 402 * 403 * @param dst Destination metadata 404 * @param src Source metadata 405 * @hide 406 */ update(CameraMetadataNative dst, CameraMetadataNative src)407 public static void update(CameraMetadataNative dst, CameraMetadataNative src) { 408 nativeUpdate(dst.mMetadataPtr, src.mMetadataPtr); 409 } 410 411 public static final @android.annotation.NonNull Parcelable.Creator<CameraMetadataNative> CREATOR = 412 new Parcelable.Creator<CameraMetadataNative>() { 413 @Override 414 public CameraMetadataNative createFromParcel(Parcel in) { 415 CameraMetadataNative metadata = new CameraMetadataNative(); 416 metadata.readFromParcel(in); 417 return metadata; 418 } 419 420 @Override 421 public CameraMetadataNative[] newArray(int size) { 422 return new CameraMetadataNative[size]; 423 } 424 }; 425 426 @Override describeContents()427 public int describeContents() { 428 return 0; 429 } 430 431 @Override writeToParcel(Parcel dest, int flags)432 public void writeToParcel(Parcel dest, int flags) { 433 nativeWriteToParcel(dest, mMetadataPtr); 434 } 435 436 /** 437 * @hide 438 */ get(CameraCharacteristics.Key<T> key)439 public <T> T get(CameraCharacteristics.Key<T> key) { 440 return get(key.getNativeKey()); 441 } 442 443 /** 444 * @hide 445 */ get(CaptureResult.Key<T> key)446 public <T> T get(CaptureResult.Key<T> key) { 447 return get(key.getNativeKey()); 448 } 449 450 /** 451 * @hide 452 */ get(CaptureRequest.Key<T> key)453 public <T> T get(CaptureRequest.Key<T> key) { 454 return get(key.getNativeKey()); 455 } 456 457 /** 458 * Look-up a metadata field value by its key. 459 * 460 * @param key a non-{@code null} key instance 461 * @return the field corresponding to the {@code key}, or {@code null} if no value was set 462 */ get(Key<T> key)463 public <T> T get(Key<T> key) { 464 Objects.requireNonNull(key, "key must not be null"); 465 466 // Check if key has been overridden to use a wrapper class on the java side. 467 GetCommand g = sGetCommandMap.get(key); 468 if (g != null) { 469 return g.getValue(this, key); 470 } 471 return getBase(key); 472 } 473 readFromParcel(Parcel in)474 public void readFromParcel(Parcel in) { 475 nativeReadFromParcel(in, mMetadataPtr); 476 updateNativeAllocation(); 477 } 478 479 /** 480 * Set the global client-side vendor tag descriptor to allow use of vendor 481 * tags in camera applications. 482 * 483 * @throws ServiceSpecificException 484 * @hide 485 */ setupGlobalVendorTagDescriptor()486 public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException { 487 int err = nativeSetupGlobalVendorTagDescriptor(); 488 if (err != 0) { 489 throw new ServiceSpecificException(err, "Failure to set up global vendor tags"); 490 } 491 } 492 493 /** 494 * Set the global client-side vendor tag descriptor to allow use of vendor 495 * tags in camera applications. 496 * 497 * @return int An error code corresponding to one of the 498 * {@link ICameraService} error constants, or 0 on success. 499 */ nativeSetupGlobalVendorTagDescriptor()500 private static native int nativeSetupGlobalVendorTagDescriptor(); 501 502 /** 503 * Set a camera metadata field to a value. The field definitions can be 504 * found in {@link CameraCharacteristics}, {@link CaptureResult}, and 505 * {@link CaptureRequest}. 506 * 507 * @param key The metadata field to write. 508 * @param value The value to set the field to, which must be of a matching 509 * type to the key. 510 */ set(Key<T> key, T value)511 public <T> void set(Key<T> key, T value) { 512 SetCommand s = sSetCommandMap.get(key); 513 if (s != null) { 514 s.setValue(this, value); 515 return; 516 } 517 518 setBase(key, value); 519 } 520 set(CaptureRequest.Key<T> key, T value)521 public <T> void set(CaptureRequest.Key<T> key, T value) { 522 set(key.getNativeKey(), value); 523 } 524 set(CaptureResult.Key<T> key, T value)525 public <T> void set(CaptureResult.Key<T> key, T value) { 526 set(key.getNativeKey(), value); 527 } 528 set(CameraCharacteristics.Key<T> key, T value)529 public <T> void set(CameraCharacteristics.Key<T> key, T value) { 530 set(key.getNativeKey(), value); 531 } 532 533 // Keep up-to-date with camera_metadata.h 534 /** 535 * @hide 536 */ 537 public static final int TYPE_BYTE = 0; 538 /** 539 * @hide 540 */ 541 public static final int TYPE_INT32 = 1; 542 /** 543 * @hide 544 */ 545 public static final int TYPE_FLOAT = 2; 546 /** 547 * @hide 548 */ 549 public static final int TYPE_INT64 = 3; 550 /** 551 * @hide 552 */ 553 public static final int TYPE_DOUBLE = 4; 554 /** 555 * @hide 556 */ 557 public static final int TYPE_RATIONAL = 5; 558 /** 559 * @hide 560 */ 561 public static final int NUM_TYPES = 6; 562 close()563 private void close() { 564 // Delete native pointer, but does not clear it 565 nativeClose(mMetadataPtr); 566 mMetadataPtr = 0; 567 568 if (mBufferSize > 0) { 569 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 570 } 571 mBufferSize = 0; 572 } 573 getBase(CameraCharacteristics.Key<T> key)574 private <T> T getBase(CameraCharacteristics.Key<T> key) { 575 return getBase(key.getNativeKey()); 576 } 577 getBase(CaptureResult.Key<T> key)578 private <T> T getBase(CaptureResult.Key<T> key) { 579 return getBase(key.getNativeKey()); 580 } 581 getBase(CaptureRequest.Key<T> key)582 private <T> T getBase(CaptureRequest.Key<T> key) { 583 return getBase(key.getNativeKey()); 584 } 585 getBase(Key<T> key)586 private <T> T getBase(Key<T> key) { 587 int tag; 588 if (key.hasTag()) { 589 tag = key.getTag(); 590 } else { 591 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 592 key.cacheTag(tag); 593 } 594 byte[] values = readValues(tag); 595 if (values == null) { 596 // If the key returns null, use the fallback key if exists. 597 // This is to support old key names for the newly published keys. 598 if (key.mFallbackName == null) { 599 return null; 600 } 601 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.mFallbackName); 602 values = readValues(tag); 603 if (values == null) { 604 return null; 605 } 606 } 607 608 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 609 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 610 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 611 return marshaler.unmarshal(buffer); 612 } 613 614 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 615 // metadata. 616 private static final HashMap<Key<?>, GetCommand> sGetCommandMap = 617 new HashMap<Key<?>, GetCommand>(); 618 static { 619 sGetCommandMap.put( GetCommand()620 CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() { 621 @Override 622 @SuppressWarnings("unchecked") 623 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 624 return (T) metadata.getAvailableFormats(); 625 } 626 }); 627 sGetCommandMap.put( GetCommand()628 CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() { 629 @Override 630 @SuppressWarnings("unchecked") 631 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 632 return (T) metadata.getFaces(); 633 } 634 }); 635 sGetCommandMap.put( GetCommand()636 CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() { 637 @Override 638 @SuppressWarnings("unchecked") 639 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 640 return (T) metadata.getFaceRectangles(); 641 } 642 }); 643 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey()644 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(), 645 new GetCommand() { 646 @Override 647 @SuppressWarnings("unchecked") 648 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 649 return (T) metadata.getStreamConfigurationMap(); 650 } 651 }); 652 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey()653 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey(), 654 new GetCommand() { 655 @Override 656 @SuppressWarnings("unchecked") 657 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 658 return (T) metadata.getStreamConfigurationMapMaximumResolution(); 659 } 660 }); 661 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey()662 CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey(), 663 new GetCommand() { 664 @Override 665 @SuppressWarnings("unchecked") 666 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 667 return (T) metadata.getMandatoryStreamCombinations(); 668 } 669 }); 670 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey()671 CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey(), 672 new GetCommand() { 673 @Override 674 @SuppressWarnings("unchecked") 675 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 676 return (T) metadata.getMandatoryConcurrentStreamCombinations(); 677 } 678 }); 679 680 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey()681 CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey(), 682 new GetCommand() { 683 @Override 684 @SuppressWarnings("unchecked") 685 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 686 return (T) metadata.getMandatoryMaximumResolutionStreamCombinations(); 687 } 688 }); 689 690 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey()691 CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() { 692 @Override 693 @SuppressWarnings("unchecked") 694 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 695 return (T) metadata.getMaxRegions(key); 696 } 697 }); 698 sGetCommandMap.put( GetCommand()699 CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() { 700 @Override 701 @SuppressWarnings("unchecked") 702 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 703 return (T) metadata.getMaxRegions(key); 704 } 705 }); 706 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey()707 CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() { 708 @Override 709 @SuppressWarnings("unchecked") 710 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 711 return (T) metadata.getMaxRegions(key); 712 } 713 }); 714 sGetCommandMap.put( GetCommand()715 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() { 716 @Override 717 @SuppressWarnings("unchecked") 718 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 719 return (T) metadata.getMaxNumOutputs(key); 720 } 721 }); 722 sGetCommandMap.put( GetCommand()723 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() { 724 @Override 725 @SuppressWarnings("unchecked") 726 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 727 return (T) metadata.getMaxNumOutputs(key); 728 } 729 }); 730 sGetCommandMap.put( CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey()731 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(), 732 new GetCommand() { 733 @Override 734 @SuppressWarnings("unchecked") 735 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 736 return (T) metadata.getMaxNumOutputs(key); 737 } 738 }); 739 sGetCommandMap.put( GetCommand()740 CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() { 741 @Override 742 @SuppressWarnings("unchecked") 743 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 744 return (T) metadata.getTonemapCurve(); 745 } 746 }); 747 sGetCommandMap.put( GetCommand()748 CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() { 749 @Override 750 @SuppressWarnings("unchecked") 751 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 752 return (T) metadata.getGpsLocation(); 753 } 754 }); 755 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()756 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 757 new GetCommand() { 758 @Override 759 @SuppressWarnings("unchecked") 760 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 761 return (T) metadata.getLensShadingMap(); 762 } 763 }); 764 sGetCommandMap.put( CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey()765 CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey(), 766 new GetCommand() { 767 @Override 768 @SuppressWarnings("unchecked") 769 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 770 return (T) metadata.getDeviceStateOrientationMap(); 771 } 772 }); 773 sGetCommandMap.put( CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey()774 CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey(), 775 new GetCommand() { 776 @Override 777 @SuppressWarnings("unchecked") 778 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 779 return (T) metadata.getOisSamples(); 780 } 781 }); 782 sGetCommandMap.put( CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey()783 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey(), 784 new GetCommand() { 785 @Override 786 @SuppressWarnings("unchecked") 787 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 788 return (T) metadata.getExtendedSceneModeCapabilities(); 789 } 790 }); 791 sGetCommandMap.put( CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey()792 CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey(), 793 new GetCommand() { 794 @Override 795 @SuppressWarnings("unchecked") 796 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 797 return (T) metadata.getMultiResolutionStreamConfigurationMap(); 798 } 799 }); 800 } 801 getAvailableFormats()802 private int[] getAvailableFormats() { 803 int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS); 804 if (availableFormats != null) { 805 for (int i = 0; i < availableFormats.length; i++) { 806 // JPEG has different value between native and managed side, need override. 807 if (availableFormats[i] == NATIVE_JPEG_FORMAT) { 808 availableFormats[i] = ImageFormat.JPEG; 809 } 810 } 811 } 812 813 return availableFormats; 814 } 815 setFaces(Face[] faces)816 private boolean setFaces(Face[] faces) { 817 if (faces == null) { 818 return false; 819 } 820 821 int numFaces = faces.length; 822 823 // Detect if all faces are SIMPLE or not; count # of valid faces 824 boolean fullMode = true; 825 for (Face face : faces) { 826 if (face == null) { 827 numFaces--; 828 Log.w(TAG, "setFaces - null face detected, skipping"); 829 continue; 830 } 831 832 if (face.getId() == Face.ID_UNSUPPORTED) { 833 fullMode = false; 834 } 835 } 836 837 Rect[] faceRectangles = new Rect[numFaces]; 838 byte[] faceScores = new byte[numFaces]; 839 int[] faceIds = null; 840 int[] faceLandmarks = null; 841 842 if (fullMode) { 843 faceIds = new int[numFaces]; 844 faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; 845 } 846 847 int i = 0; 848 for (Face face : faces) { 849 if (face == null) { 850 continue; 851 } 852 853 faceRectangles[i] = face.getBounds(); 854 faceScores[i] = (byte)face.getScore(); 855 856 if (fullMode) { 857 faceIds[i] = face.getId(); 858 859 int j = 0; 860 861 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; 862 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; 863 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; 864 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; 865 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; 866 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; 867 } 868 869 i++; 870 } 871 872 set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); 873 set(CaptureResult.STATISTICS_FACE_IDS, faceIds); 874 set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); 875 set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); 876 877 return true; 878 } 879 getFaces()880 private Face[] getFaces() { 881 Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); 882 byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES); 883 Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES); 884 int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS); 885 int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS); 886 887 if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) { 888 return null; 889 } 890 891 if (faceDetectMode == null) { 892 Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); 893 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 894 } else if (faceDetectMode > CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 895 // Face detect mode is larger than FULL, assuming the mode is FULL 896 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL; 897 } else { 898 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 899 return new Face[0]; 900 } 901 if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && 902 faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 903 Log.w(TAG, "Unknown face detect mode: " + faceDetectMode); 904 return new Face[0]; 905 } 906 } 907 908 // Face scores and rectangles are required by SIMPLE and FULL mode. 909 if (faceScores == null || faceRectangles == null) { 910 Log.w(TAG, "Expect face scores and rectangles to be non-null"); 911 return new Face[0]; 912 } else if (faceScores.length != faceRectangles.length) { 913 Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", 914 faceScores.length, faceRectangles.length)); 915 } 916 917 // To be safe, make number of faces is the minimal of all face info metadata length. 918 int numFaces = Math.min(faceScores.length, faceRectangles.length); 919 // Face id and landmarks are only required by FULL mode. 920 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 921 if (faceIds == null || faceLandmarks == null) { 922 Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + 923 "fallback to SIMPLE mode"); 924 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 925 } else { 926 if (faceIds.length != numFaces || 927 faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) { 928 Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + 929 "match face number(%d)!", 930 faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces)); 931 } 932 // To be safe, make number of faces is the minimal of all face info metadata length. 933 numFaces = Math.min(numFaces, faceIds.length); 934 numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE); 935 } 936 } 937 938 ArrayList<Face> faceList = new ArrayList<Face>(); 939 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 940 for (int i = 0; i < numFaces; i++) { 941 if (faceScores[i] <= Face.SCORE_MAX && 942 faceScores[i] >= Face.SCORE_MIN) { 943 faceList.add(new Face(faceRectangles[i], faceScores[i])); 944 } 945 } 946 } else { 947 // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL 948 for (int i = 0; i < numFaces; i++) { 949 if (faceScores[i] <= Face.SCORE_MAX && 950 faceScores[i] >= Face.SCORE_MIN && 951 faceIds[i] >= 0) { 952 Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], 953 faceLandmarks[i*FACE_LANDMARK_SIZE+1]); 954 Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], 955 faceLandmarks[i*FACE_LANDMARK_SIZE+3]); 956 Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], 957 faceLandmarks[i*FACE_LANDMARK_SIZE+5]); 958 Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], 959 leftEye, rightEye, mouth); 960 faceList.add(face); 961 } 962 } 963 } 964 Face[] faces = new Face[faceList.size()]; 965 faceList.toArray(faces); 966 return faces; 967 } 968 969 // Face rectangles are defined as (left, top, right, bottom) instead of 970 // (left, top, width, height) at the native level, so the normal Rect 971 // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo 972 // that conversion here for just the faces. getFaceRectangles()973 private Rect[] getFaceRectangles() { 974 Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES); 975 if (faceRectangles == null) return null; 976 977 Rect[] fixedFaceRectangles = new Rect[faceRectangles.length]; 978 for (int i = 0; i < faceRectangles.length; i++) { 979 fixedFaceRectangles[i] = new Rect( 980 faceRectangles[i].left, 981 faceRectangles[i].top, 982 faceRectangles[i].right - faceRectangles[i].left, 983 faceRectangles[i].bottom - faceRectangles[i].top); 984 } 985 return fixedFaceRectangles; 986 } 987 getLensShadingMap()988 private LensShadingMap getLensShadingMap() { 989 float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP); 990 Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE); 991 992 // Do not warn if lsmArray is null while s is not. This is valid. 993 if (lsmArray == null) { 994 return null; 995 } 996 997 if (s == null) { 998 Log.w(TAG, "getLensShadingMap - Lens shading map size was null."); 999 return null; 1000 } 1001 1002 LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth()); 1003 return map; 1004 } 1005 getDeviceStateOrientationMap()1006 private DeviceStateSensorOrientationMap getDeviceStateOrientationMap() { 1007 long[] mapArray = getBase(CameraCharacteristics.INFO_DEVICE_STATE_ORIENTATIONS); 1008 1009 // Do not warn if map is null while s is not. This is valid. 1010 if (mapArray == null) { 1011 return null; 1012 } 1013 1014 DeviceStateSensorOrientationMap map = new DeviceStateSensorOrientationMap(mapArray); 1015 return map; 1016 } 1017 getGpsLocation()1018 private Location getGpsLocation() { 1019 String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD); 1020 double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES); 1021 Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP); 1022 1023 if (areValuesAllNull(processingMethod, coords, timeStamp)) { 1024 return null; 1025 } 1026 1027 Location l = new Location(translateProcessToLocationProvider(processingMethod)); 1028 if (timeStamp != null) { 1029 // Location expects timestamp in [ms.] 1030 l.setTime(timeStamp * 1000); 1031 } else { 1032 Log.w(TAG, "getGpsLocation - No timestamp for GPS location."); 1033 } 1034 1035 if (coords != null) { 1036 l.setLatitude(coords[0]); 1037 l.setLongitude(coords[1]); 1038 l.setAltitude(coords[2]); 1039 } else { 1040 Log.w(TAG, "getGpsLocation - No coordinates for GPS location"); 1041 } 1042 1043 return l; 1044 } 1045 setGpsLocation(Location l)1046 private boolean setGpsLocation(Location l) { 1047 if (l == null) { 1048 return false; 1049 } 1050 1051 double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() }; 1052 String processMethod = translateLocationProviderToProcess(l.getProvider()); 1053 //JPEG_GPS_TIMESTAMP expects sec. instead of msec. 1054 long timestamp = l.getTime() / 1000; 1055 1056 set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp); 1057 set(CaptureRequest.JPEG_GPS_COORDINATES, coords); 1058 1059 if (processMethod == null) { 1060 Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" + 1061 "provider"); 1062 } else { 1063 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod); 1064 } 1065 return true; 1066 } 1067 parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, StreamConfigurationMap fullMap, boolean isDepth, ArrayList<ArrayList<StreamConfiguration>> streamConfigList, ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList, ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList, boolean[] supportsPrivate)1068 private void parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, 1069 StreamConfigurationMap fullMap, boolean isDepth, 1070 ArrayList<ArrayList<StreamConfiguration>> /*out*/streamConfigList, 1071 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamDurationList, 1072 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamStallList, 1073 boolean[] /*out*/supportsPrivate) { 1074 1075 streamConfigList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1076 streamDurationList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1077 streamStallList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1078 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1079 streamConfigList.add(new ArrayList<StreamConfiguration> ()); 1080 streamDurationList.add(new ArrayList<StreamConfigurationDuration> ()); 1081 streamStallList.add(new ArrayList<StreamConfigurationDuration> ()); 1082 } 1083 1084 for (RecommendedStreamConfiguration c : configurations) { 1085 int width = c.getWidth(); 1086 int height = c.getHeight(); 1087 int internalFormat = c.getFormat(); 1088 int publicFormat = 1089 (isDepth) ? StreamConfigurationMap.depthFormatToPublic(internalFormat) : 1090 StreamConfigurationMap.imageFormatToPublic(internalFormat); 1091 Size sz = new Size(width, height); 1092 int usecaseBitmap = c.getUsecaseBitmap(); 1093 1094 if (!c.isInput()) { 1095 StreamConfigurationDuration minDurationConfiguration = null; 1096 StreamConfigurationDuration stallDurationConfiguration = null; 1097 1098 StreamConfiguration streamConfiguration = new StreamConfiguration(internalFormat, 1099 width, height, /*input*/ false); 1100 1101 long minFrameDuration = fullMap.getOutputMinFrameDuration(publicFormat, sz); 1102 if (minFrameDuration > 0) { 1103 minDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1104 width, height, minFrameDuration); 1105 } 1106 1107 long stallDuration = fullMap.getOutputStallDuration(publicFormat, sz); 1108 if (stallDuration > 0) { 1109 stallDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1110 width, height, stallDuration); 1111 } 1112 1113 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1114 if ((usecaseBitmap & (1 << i)) != 0) { 1115 ArrayList<StreamConfiguration> sc = streamConfigList.get(i); 1116 sc.add(streamConfiguration); 1117 1118 if (minFrameDuration > 0) { 1119 ArrayList<StreamConfigurationDuration> scd = streamDurationList.get(i); 1120 scd.add(minDurationConfiguration); 1121 } 1122 1123 if (stallDuration > 0) { 1124 ArrayList<StreamConfigurationDuration> scs = streamStallList.get(i); 1125 scs.add(stallDurationConfiguration); 1126 } 1127 1128 if ((supportsPrivate != null) && !supportsPrivate[i] && 1129 (publicFormat == ImageFormat.PRIVATE)) { 1130 supportsPrivate[i] = true; 1131 } 1132 } 1133 } 1134 } else { 1135 if (usecaseBitmap != (1 << RecommendedStreamConfigurationMap.USECASE_ZSL)) { 1136 throw new IllegalArgumentException("Recommended input stream configurations " + 1137 "should only be advertised in the ZSL use case!"); 1138 } 1139 1140 ArrayList<StreamConfiguration> sc = streamConfigList.get( 1141 RecommendedStreamConfigurationMap.USECASE_ZSL); 1142 sc.add(new StreamConfiguration(internalFormat, 1143 width, height, /*input*/ true)); 1144 } 1145 } 1146 } 1147 1148 private class StreamConfigurationData { 1149 StreamConfiguration [] streamConfigurationArray = null; 1150 StreamConfigurationDuration [] minDurationArray = null; 1151 StreamConfigurationDuration [] stallDurationArray = null; 1152 } 1153 initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, StreamConfigurationData scData)1154 public void initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, 1155 ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, 1156 StreamConfigurationData /*out*/scData) { 1157 if ((scData == null) || (sc == null)) { 1158 return; 1159 } 1160 1161 scData.streamConfigurationArray = new StreamConfiguration[sc.size()]; 1162 scData.streamConfigurationArray = sc.toArray(scData.streamConfigurationArray); 1163 1164 if ((scd != null) && !scd.isEmpty()) { 1165 scData.minDurationArray = new StreamConfigurationDuration[scd.size()]; 1166 scData.minDurationArray = scd.toArray(scData.minDurationArray); 1167 } else { 1168 scData.minDurationArray = new StreamConfigurationDuration[0]; 1169 } 1170 1171 if ((scs != null) && !scs.isEmpty()) { 1172 scData.stallDurationArray = new StreamConfigurationDuration[scs.size()]; 1173 scData.stallDurationArray = scs.toArray(scData.stallDurationArray); 1174 } else { 1175 scData.stallDurationArray = new StreamConfigurationDuration[0]; 1176 } 1177 } 1178 1179 /** 1180 * Retrieve the list of recommended stream configurations. 1181 * 1182 * @return A list of recommended stream configuration maps for each common use case or null 1183 * in case the recommended stream configurations are invalid or incomplete. 1184 * @hide 1185 */ getRecommendedStreamConfigurations()1186 public ArrayList<RecommendedStreamConfigurationMap> getRecommendedStreamConfigurations() { 1187 RecommendedStreamConfiguration[] configurations = getBase( 1188 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS); 1189 RecommendedStreamConfiguration[] depthConfigurations = getBase( 1190 CameraCharacteristics.DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS); 1191 if ((configurations == null) && (depthConfigurations == null)) { 1192 return null; 1193 } 1194 1195 StreamConfigurationMap fullMap = getStreamConfigurationMap(); 1196 ArrayList<RecommendedStreamConfigurationMap> recommendedConfigurations = 1197 new ArrayList<RecommendedStreamConfigurationMap> (); 1198 1199 ArrayList<ArrayList<StreamConfiguration>> streamConfigList = 1200 new ArrayList<ArrayList<StreamConfiguration>>(); 1201 ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList = 1202 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1203 ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList = 1204 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1205 boolean[] supportsPrivate = 1206 new boolean[RecommendedStreamConfigurationMap.MAX_USECASE_COUNT]; 1207 try { 1208 if (configurations != null) { 1209 parseRecommendedConfigurations(configurations, fullMap, /*isDepth*/ false, 1210 streamConfigList, streamDurationList, streamStallList, supportsPrivate); 1211 } 1212 } catch (IllegalArgumentException e) { 1213 Log.e(TAG, "Failed parsing the recommended stream configurations!"); 1214 return null; 1215 } 1216 1217 ArrayList<ArrayList<StreamConfiguration>> depthStreamConfigList = 1218 new ArrayList<ArrayList<StreamConfiguration>>(); 1219 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamDurationList = 1220 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1221 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamStallList = 1222 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1223 if (depthConfigurations != null) { 1224 try { 1225 parseRecommendedConfigurations(depthConfigurations, fullMap, /*isDepth*/ true, 1226 depthStreamConfigList, depthStreamDurationList, depthStreamStallList, 1227 /*supportsPrivate*/ null); 1228 } catch (IllegalArgumentException e) { 1229 Log.e(TAG, "Failed parsing the recommended depth stream configurations!"); 1230 return null; 1231 } 1232 } 1233 1234 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1235 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP); 1236 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1237 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1238 boolean listHighResolution = isBurstSupported(); 1239 recommendedConfigurations.ensureCapacity( 1240 RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1241 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1242 StreamConfigurationData scData = new StreamConfigurationData(); 1243 if (configurations != null) { 1244 initializeStreamConfigurationData(streamConfigList.get(i), 1245 streamDurationList.get(i), streamStallList.get(i), scData); 1246 } 1247 1248 StreamConfigurationData depthScData = new StreamConfigurationData(); 1249 if (depthConfigurations != null) { 1250 initializeStreamConfigurationData(depthStreamConfigList.get(i), 1251 depthStreamDurationList.get(i), depthStreamStallList.get(i), depthScData); 1252 } 1253 1254 if ((scData.streamConfigurationArray == null || 1255 scData.streamConfigurationArray.length == 0) && 1256 (depthScData.streamConfigurationArray == null || 1257 depthScData.streamConfigurationArray.length == 0)) { 1258 recommendedConfigurations.add(null); 1259 continue; 1260 } 1261 1262 // Dynamic depth streams involve alot of SW processing and currently cannot be 1263 // recommended. 1264 StreamConfigurationMap map = null; 1265 switch (i) { 1266 case RecommendedStreamConfigurationMap.USECASE_PREVIEW: 1267 case RecommendedStreamConfigurationMap.USECASE_RAW: 1268 case RecommendedStreamConfigurationMap.USECASE_LOW_LATENCY_SNAPSHOT: 1269 case RecommendedStreamConfigurationMap.USECASE_VIDEO_SNAPSHOT: 1270 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1271 scData.minDurationArray, scData.stallDurationArray, 1272 /*depthconfiguration*/ null, /*depthminduration*/ null, 1273 /*depthstallduration*/ null, 1274 /*dynamicDepthConfigurations*/ null, 1275 /*dynamicDepthMinFrameDurations*/ null, 1276 /*dynamicDepthStallDurations*/ null, 1277 /*heicconfiguration*/ null, 1278 /*heicminduration*/ null, 1279 /*heicstallduration*/ null, 1280 /*highspeedvideoconfigurations*/ null, 1281 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1282 break; 1283 case RecommendedStreamConfigurationMap.USECASE_RECORD: 1284 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1285 scData.minDurationArray, scData.stallDurationArray, 1286 /*depthconfiguration*/ null, /*depthminduration*/ null, 1287 /*depthstallduration*/ null, 1288 /*dynamicDepthConfigurations*/ null, 1289 /*dynamicDepthMinFrameDurations*/ null, 1290 /*dynamicDepthStallDurations*/ null, 1291 /*heicconfiguration*/ null, 1292 /*heicminduration*/ null, 1293 /*heicstallduration*/ null, 1294 highSpeedVideoConfigurations, 1295 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1296 break; 1297 case RecommendedStreamConfigurationMap.USECASE_ZSL: 1298 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1299 scData.minDurationArray, scData.stallDurationArray, 1300 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1301 depthScData.stallDurationArray, 1302 /*dynamicDepthConfigurations*/ null, 1303 /*dynamicDepthMinFrameDurations*/ null, 1304 /*dynamicDepthStallDurations*/ null, 1305 /*heicconfiguration*/ null, 1306 /*heicminduration*/ null, 1307 /*heicstallduration*/ null, 1308 /*highSpeedVideoConfigurations*/ null, 1309 inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); 1310 break; 1311 default: 1312 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1313 scData.minDurationArray, scData.stallDurationArray, 1314 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1315 depthScData.stallDurationArray, 1316 /*dynamicDepthConfigurations*/ null, 1317 /*dynamicDepthMinFrameDurations*/ null, 1318 /*dynamicDepthStallDurations*/ null, 1319 /*heicconfiguration*/ null, 1320 /*heicminduration*/ null, 1321 /*heicstallduration*/ null, 1322 /*highSpeedVideoConfigurations*/ null, 1323 /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); 1324 } 1325 1326 recommendedConfigurations.add(new RecommendedStreamConfigurationMap(map, /*usecase*/i, 1327 supportsPrivate[i])); 1328 } 1329 1330 return recommendedConfigurations; 1331 } 1332 isCapabilitySupported(int capabilityRequested)1333 private boolean isCapabilitySupported(int capabilityRequested) { 1334 boolean ret = false; 1335 1336 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1337 for (int capability : capabilities) { 1338 if (capabilityRequested == capability) { 1339 ret = true; 1340 break; 1341 } 1342 } 1343 1344 return ret; 1345 } 1346 1347 /** 1348 * @hide 1349 */ isUltraHighResolutionSensor()1350 public boolean isUltraHighResolutionSensor() { 1351 return isCapabilitySupported( 1352 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR); 1353 1354 } isBurstSupported()1355 private boolean isBurstSupported() { 1356 return isCapabilitySupported( 1357 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); 1358 } 1359 getMandatoryStreamCombinationsHelper( int mandatoryStreamsType)1360 private MandatoryStreamCombination[] getMandatoryStreamCombinationsHelper( 1361 int mandatoryStreamsType) { 1362 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1363 ArrayList<Integer> caps = new ArrayList<Integer>(); 1364 caps.ensureCapacity(capabilities.length); 1365 for (int c : capabilities) { 1366 caps.add(new Integer(c)); 1367 } 1368 int hwLevel = getBase(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 1369 MandatoryStreamCombination.Builder build = new MandatoryStreamCombination.Builder( 1370 mCameraId, hwLevel, mDisplaySize, caps, getStreamConfigurationMap(), 1371 getStreamConfigurationMapMaximumResolution()); 1372 1373 List<MandatoryStreamCombination> combs = null; 1374 switch (mandatoryStreamsType) { 1375 case MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT: 1376 combs = build.getAvailableMandatoryConcurrentStreamCombinations(); 1377 break; 1378 case MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION: 1379 combs = build.getAvailableMandatoryMaximumResolutionStreamCombinations(); 1380 break; 1381 default: 1382 combs = build.getAvailableMandatoryStreamCombinations(); 1383 } 1384 if ((combs != null) && (!combs.isEmpty())) { 1385 MandatoryStreamCombination[] combArray = new MandatoryStreamCombination[combs.size()]; 1386 combArray = combs.toArray(combArray); 1387 return combArray; 1388 } 1389 return null; 1390 } 1391 getMandatoryConcurrentStreamCombinations()1392 private MandatoryStreamCombination[] getMandatoryConcurrentStreamCombinations() { 1393 if (!mHasMandatoryConcurrentStreams) { 1394 return null; 1395 } 1396 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT); 1397 } 1398 getMandatoryMaximumResolutionStreamCombinations()1399 private MandatoryStreamCombination[] getMandatoryMaximumResolutionStreamCombinations() { 1400 if (!isUltraHighResolutionSensor()) { 1401 return null; 1402 } 1403 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION); 1404 } 1405 getMandatoryStreamCombinations()1406 private MandatoryStreamCombination[] getMandatoryStreamCombinations() { 1407 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_DEFAULT); 1408 } 1409 getStreamConfigurationMap()1410 private StreamConfigurationMap getStreamConfigurationMap() { 1411 StreamConfiguration[] configurations = getBase( 1412 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS); 1413 StreamConfigurationDuration[] minFrameDurations = getBase( 1414 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS); 1415 StreamConfigurationDuration[] stallDurations = getBase( 1416 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS); 1417 StreamConfiguration[] depthConfigurations = getBase( 1418 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS); 1419 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1420 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); 1421 StreamConfigurationDuration[] depthStallDurations = getBase( 1422 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); 1423 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1424 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS); 1425 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1426 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS); 1427 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1428 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS); 1429 StreamConfiguration[] heicConfigurations = getBase( 1430 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); 1431 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1432 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); 1433 StreamConfigurationDuration[] heicStallDurations = getBase( 1434 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); 1435 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1436 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1437 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1438 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP); 1439 boolean listHighResolution = isBurstSupported(); 1440 return new StreamConfigurationMap( 1441 configurations, minFrameDurations, stallDurations, 1442 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1443 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1444 dynamicDepthStallDurations, heicConfigurations, 1445 heicMinFrameDurations, heicStallDurations, 1446 highSpeedVideoConfigurations, inputOutputFormatsMap, 1447 listHighResolution); 1448 } 1449 getStreamConfigurationMapMaximumResolution()1450 private StreamConfigurationMap getStreamConfigurationMapMaximumResolution() { 1451 if (!isUltraHighResolutionSensor()) { 1452 return null; 1453 } 1454 StreamConfiguration[] configurations = getBase( 1455 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1456 StreamConfigurationDuration[] minFrameDurations = getBase( 1457 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1458 StreamConfigurationDuration[] stallDurations = getBase( 1459 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1460 StreamConfiguration[] depthConfigurations = getBase( 1461 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1462 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1463 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1464 StreamConfigurationDuration[] depthStallDurations = getBase( 1465 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1466 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1467 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1468 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1469 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1470 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1471 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1472 StreamConfiguration[] heicConfigurations = getBase( 1473 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1474 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1475 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1476 StreamConfigurationDuration[] heicStallDurations = getBase( 1477 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1478 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1479 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1480 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1481 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION); 1482 // TODO: Is this correct, burst capability shouldn't necessarily correspond to max res mode 1483 boolean listHighResolution = isBurstSupported(); 1484 return new StreamConfigurationMap( 1485 configurations, minFrameDurations, stallDurations, 1486 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1487 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1488 dynamicDepthStallDurations, heicConfigurations, 1489 heicMinFrameDurations, heicStallDurations, 1490 highSpeedVideoConfigurations, inputOutputFormatsMap, 1491 listHighResolution, false); 1492 } 1493 getMaxRegions(Key<T> key)1494 private <T> Integer getMaxRegions(Key<T> key) { 1495 final int AE = 0; 1496 final int AWB = 1; 1497 final int AF = 2; 1498 1499 // The order of the elements is: (AE, AWB, AF) 1500 int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); 1501 1502 if (maxRegions == null) { 1503 return null; 1504 } 1505 1506 if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { 1507 return maxRegions[AE]; 1508 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { 1509 return maxRegions[AWB]; 1510 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { 1511 return maxRegions[AF]; 1512 } else { 1513 throw new AssertionError("Invalid key " + key); 1514 } 1515 } 1516 getMaxNumOutputs(Key<T> key)1517 private <T> Integer getMaxNumOutputs(Key<T> key) { 1518 final int RAW = 0; 1519 final int PROC = 1; 1520 final int PROC_STALLING = 2; 1521 1522 // The order of the elements is: (raw, proc+nonstalling, proc+stalling) 1523 int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); 1524 1525 if (maxNumOutputs == null) { 1526 return null; 1527 } 1528 1529 if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { 1530 return maxNumOutputs[RAW]; 1531 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { 1532 return maxNumOutputs[PROC]; 1533 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { 1534 return maxNumOutputs[PROC_STALLING]; 1535 } else { 1536 throw new AssertionError("Invalid key " + key); 1537 } 1538 } 1539 getTonemapCurve()1540 private <T> TonemapCurve getTonemapCurve() { 1541 float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED); 1542 float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN); 1543 float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE); 1544 1545 if (areValuesAllNull(red, green, blue)) { 1546 return null; 1547 } 1548 1549 if (red == null || green == null || blue == null) { 1550 Log.w(TAG, "getTonemapCurve - missing tone curve components"); 1551 return null; 1552 } 1553 TonemapCurve tc = new TonemapCurve(red, green, blue); 1554 return tc; 1555 } 1556 getOisSamples()1557 private OisSample[] getOisSamples() { 1558 long[] timestamps = getBase(CaptureResult.STATISTICS_OIS_TIMESTAMPS); 1559 float[] xShifts = getBase(CaptureResult.STATISTICS_OIS_X_SHIFTS); 1560 float[] yShifts = getBase(CaptureResult.STATISTICS_OIS_Y_SHIFTS); 1561 1562 if (timestamps == null) { 1563 if (xShifts != null) { 1564 throw new AssertionError("timestamps is null but xShifts is not"); 1565 } 1566 1567 if (yShifts != null) { 1568 throw new AssertionError("timestamps is null but yShifts is not"); 1569 } 1570 1571 return null; 1572 } 1573 1574 if (xShifts == null) { 1575 throw new AssertionError("timestamps is not null but xShifts is"); 1576 } 1577 1578 if (yShifts == null) { 1579 throw new AssertionError("timestamps is not null but yShifts is"); 1580 } 1581 1582 if (xShifts.length != timestamps.length) { 1583 throw new AssertionError(String.format( 1584 "timestamps has %d entries but xShifts has %d", timestamps.length, 1585 xShifts.length)); 1586 } 1587 1588 if (yShifts.length != timestamps.length) { 1589 throw new AssertionError(String.format( 1590 "timestamps has %d entries but yShifts has %d", timestamps.length, 1591 yShifts.length)); 1592 } 1593 1594 OisSample[] samples = new OisSample[timestamps.length]; 1595 for (int i = 0; i < timestamps.length; i++) { 1596 samples[i] = new OisSample(timestamps[i], xShifts[i], yShifts[i]); 1597 } 1598 return samples; 1599 } 1600 getExtendedSceneModeCapabilities()1601 private Capability[] getExtendedSceneModeCapabilities() { 1602 int[] maxSizes = 1603 getBase(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES); 1604 float[] zoomRanges = getBase( 1605 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES); 1606 Range<Float> zoomRange = getBase(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); 1607 float maxDigitalZoom = getBase(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); 1608 1609 if (maxSizes == null) { 1610 return null; 1611 } 1612 if (maxSizes.length % 3 != 0) { 1613 throw new AssertionError("availableExtendedSceneModeMaxSizes must be tuples of " 1614 + "[mode, width, height]"); 1615 } 1616 int numExtendedSceneModes = maxSizes.length / 3; 1617 int numExtendedSceneModeZoomRanges = 0; 1618 if (zoomRanges != null) { 1619 if (zoomRanges.length % 2 != 0) { 1620 throw new AssertionError("availableExtendedSceneModeZoomRanges must be tuples of " 1621 + "[minZoom, maxZoom]"); 1622 } 1623 numExtendedSceneModeZoomRanges = zoomRanges.length / 2; 1624 if (numExtendedSceneModes - numExtendedSceneModeZoomRanges != 1) { 1625 throw new AssertionError("Number of extended scene mode zoom ranges must be 1 " 1626 + "less than number of supported modes"); 1627 } 1628 } 1629 1630 float modeOffMinZoomRatio = 1.0f; 1631 float modeOffMaxZoomRatio = maxDigitalZoom; 1632 if (zoomRange != null) { 1633 modeOffMinZoomRatio = zoomRange.getLower(); 1634 modeOffMaxZoomRatio = zoomRange.getUpper(); 1635 } 1636 1637 Capability[] capabilities = new Capability[numExtendedSceneModes]; 1638 for (int i = 0, j = 0; i < numExtendedSceneModes; i++) { 1639 int mode = maxSizes[3 * i]; 1640 int width = maxSizes[3 * i + 1]; 1641 int height = maxSizes[3 * i + 2]; 1642 if (mode != CameraMetadata.CONTROL_EXTENDED_SCENE_MODE_DISABLED 1643 && j < numExtendedSceneModeZoomRanges) { 1644 capabilities[i] = new Capability(mode, width, height, zoomRanges[2 * j], 1645 zoomRanges[2 * j + 1]); 1646 j++; 1647 } else { 1648 capabilities[i] = new Capability(mode, width, height, modeOffMinZoomRatio, 1649 modeOffMaxZoomRatio); 1650 } 1651 } 1652 1653 return capabilities; 1654 } 1655 setBase(CameraCharacteristics.Key<T> key, T value)1656 private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { 1657 setBase(key.getNativeKey(), value); 1658 } 1659 setBase(CaptureResult.Key<T> key, T value)1660 private <T> void setBase(CaptureResult.Key<T> key, T value) { 1661 setBase(key.getNativeKey(), value); 1662 } 1663 setBase(CaptureRequest.Key<T> key, T value)1664 private <T> void setBase(CaptureRequest.Key<T> key, T value) { 1665 setBase(key.getNativeKey(), value); 1666 } 1667 setBase(Key<T> key, T value)1668 private <T> void setBase(Key<T> key, T value) { 1669 int tag; 1670 if (key.hasTag()) { 1671 tag = key.getTag(); 1672 } else { 1673 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 1674 key.cacheTag(tag); 1675 } 1676 if (value == null) { 1677 // Erase the entry 1678 writeValues(tag, /*src*/null); 1679 return; 1680 } // else update the entry to a new value 1681 1682 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 1683 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 1684 int size = marshaler.calculateMarshalSize(value); 1685 1686 // TODO: Optimization. Cache the byte[] and reuse if the size is big enough. 1687 byte[] values = new byte[size]; 1688 1689 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 1690 marshaler.marshal(value, buffer); 1691 1692 writeValues(tag, values); 1693 } 1694 1695 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 1696 // metadata. 1697 private static final HashMap<Key<?>, SetCommand> sSetCommandMap = 1698 new HashMap<Key<?>, SetCommand>(); 1699 static { CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey()1700 sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), 1701 new SetCommand() { 1702 @Override 1703 public <T> void setValue(CameraMetadataNative metadata, T value) { 1704 metadata.setAvailableFormats((int[]) value); 1705 } 1706 }); CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey()1707 sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), 1708 new SetCommand() { 1709 @Override 1710 public <T> void setValue(CameraMetadataNative metadata, T value) { 1711 metadata.setFaceRectangles((Rect[]) value); 1712 } 1713 }); CaptureResult.STATISTICS_FACES.getNativeKey()1714 sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), 1715 new SetCommand() { 1716 @Override 1717 public <T> void setValue(CameraMetadataNative metadata, T value) { 1718 metadata.setFaces((Face[])value); 1719 } 1720 }); CaptureRequest.TONEMAP_CURVE.getNativeKey()1721 sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { 1722 @Override 1723 public <T> void setValue(CameraMetadataNative metadata, T value) { 1724 metadata.setTonemapCurve((TonemapCurve) value); 1725 } 1726 }); CaptureResult.JPEG_GPS_LOCATION.getNativeKey()1727 sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() { 1728 @Override 1729 public <T> void setValue(CameraMetadataNative metadata, T value) { 1730 metadata.setGpsLocation((Location) value); 1731 } 1732 }); CaptureRequest.SCALER_CROP_REGION.getNativeKey()1733 sSetCommandMap.put(CaptureRequest.SCALER_CROP_REGION.getNativeKey(), 1734 new SetCommand() { 1735 @Override 1736 public <T> void setValue(CameraMetadataNative metadata, T value) { 1737 metadata.setScalerCropRegion((Rect) value); 1738 } 1739 }); CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey()1740 sSetCommandMap.put(CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey(), 1741 new SetCommand() { 1742 @Override 1743 public <T> void setValue(CameraMetadataNative metadata, T value) { 1744 metadata.setAWBRegions(value); 1745 } 1746 }); CaptureRequest.CONTROL_AF_REGIONS.getNativeKey()1747 sSetCommandMap.put(CaptureRequest.CONTROL_AF_REGIONS.getNativeKey(), 1748 new SetCommand() { 1749 @Override 1750 public <T> void setValue(CameraMetadataNative metadata, T value) { 1751 metadata.setAFRegions(value); 1752 } 1753 }); CaptureRequest.CONTROL_AE_REGIONS.getNativeKey()1754 sSetCommandMap.put(CaptureRequest.CONTROL_AE_REGIONS.getNativeKey(), 1755 new SetCommand() { 1756 @Override 1757 public <T> void setValue(CameraMetadataNative metadata, T value) { 1758 metadata.setAERegions(value); 1759 } 1760 }); 1761 } 1762 setAvailableFormats(int[] value)1763 private boolean setAvailableFormats(int[] value) { 1764 int[] availableFormat = value; 1765 if (value == null) { 1766 // Let setBase() to handle the null value case. 1767 return false; 1768 } 1769 1770 int[] newValues = new int[availableFormat.length]; 1771 for (int i = 0; i < availableFormat.length; i++) { 1772 newValues[i] = availableFormat[i]; 1773 if (availableFormat[i] == ImageFormat.JPEG) { 1774 newValues[i] = NATIVE_JPEG_FORMAT; 1775 } 1776 } 1777 1778 setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues); 1779 return true; 1780 } 1781 1782 /** 1783 * Convert Face Rectangles from managed side to native side as they have different definitions. 1784 * <p> 1785 * Managed side face rectangles are defined as: left, top, width, height. 1786 * Native side face rectangles are defined as: left, top, right, bottom. 1787 * The input face rectangle need to be converted to native side definition when set is called. 1788 * </p> 1789 * 1790 * @param faceRects Input face rectangles. 1791 * @return true if face rectangles can be set successfully. Otherwise, Let the caller 1792 * (setBase) to handle it appropriately. 1793 */ setFaceRectangles(Rect[] faceRects)1794 private boolean setFaceRectangles(Rect[] faceRects) { 1795 if (faceRects == null) { 1796 return false; 1797 } 1798 1799 Rect[] newFaceRects = new Rect[faceRects.length]; 1800 for (int i = 0; i < newFaceRects.length; i++) { 1801 newFaceRects[i] = new Rect( 1802 faceRects[i].left, 1803 faceRects[i].top, 1804 faceRects[i].right + faceRects[i].left, 1805 faceRects[i].bottom + faceRects[i].top); 1806 } 1807 1808 setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects); 1809 return true; 1810 } 1811 setTonemapCurve(TonemapCurve tc)1812 private <T> boolean setTonemapCurve(TonemapCurve tc) { 1813 if (tc == null) { 1814 return false; 1815 } 1816 1817 float[][] curve = new float[3][]; 1818 for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) { 1819 int pointCount = tc.getPointCount(i); 1820 curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE]; 1821 tc.copyColorCurve(i, curve[i], 0); 1822 } 1823 setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]); 1824 setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]); 1825 setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]); 1826 1827 return true; 1828 } 1829 setScalerCropRegion(Rect cropRegion)1830 private <T> boolean setScalerCropRegion(Rect cropRegion) { 1831 if (cropRegion == null) { 1832 return false; 1833 } 1834 setBase(CaptureRequest.SCALER_CROP_REGION_SET, true); 1835 setBase(CaptureRequest.SCALER_CROP_REGION, cropRegion); 1836 return true; 1837 } 1838 setAFRegions(T afRegions)1839 private <T> boolean setAFRegions(T afRegions) { 1840 if (afRegions == null) { 1841 return false; 1842 } 1843 setBase(CaptureRequest.CONTROL_AF_REGIONS_SET, true); 1844 // The cast to CaptureRequest.Key is needed since java does not support template 1845 // specialization and we need to route this method to 1846 // setBase(CaptureRequest.Key<T> key, T value) 1847 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AF_REGIONS, afRegions); 1848 return true; 1849 } 1850 setAERegions(T aeRegions)1851 private <T> boolean setAERegions(T aeRegions) { 1852 if (aeRegions == null) { 1853 return false; 1854 } 1855 setBase(CaptureRequest.CONTROL_AE_REGIONS_SET, true); 1856 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AE_REGIONS, aeRegions); 1857 return true; 1858 } 1859 setAWBRegions(T awbRegions)1860 private <T> boolean setAWBRegions(T awbRegions) { 1861 if (awbRegions == null) { 1862 return false; 1863 } 1864 setBase(CaptureRequest.CONTROL_AWB_REGIONS_SET, true); 1865 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AWB_REGIONS, awbRegions); 1866 return true; 1867 } 1868 updateNativeAllocation()1869 private void updateNativeAllocation() { 1870 long currentBufferSize = nativeGetBufferSize(mMetadataPtr); 1871 1872 if (currentBufferSize != mBufferSize) { 1873 if (mBufferSize > 0) { 1874 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 1875 } 1876 1877 mBufferSize = currentBufferSize; 1878 1879 if (mBufferSize > 0) { 1880 VMRuntime.getRuntime().registerNativeAllocation(mBufferSize); 1881 } 1882 } 1883 } 1884 1885 private int mCameraId = -1; 1886 private boolean mHasMandatoryConcurrentStreams = false; 1887 private Size mDisplaySize = new Size(0, 0); 1888 private long mBufferSize = 0; 1889 private MultiResolutionStreamConfigurationMap mMultiResolutionStreamConfigurationMap = null; 1890 1891 /** 1892 * Set the current camera Id. 1893 * 1894 * @param cameraId Current camera id. 1895 * 1896 * @hide 1897 */ setCameraId(int cameraId)1898 public void setCameraId(int cameraId) { 1899 mCameraId = cameraId; 1900 } 1901 1902 /** 1903 * Set the current camera Id. 1904 * 1905 * @param hasMandatoryConcurrentStreams whether the metadata advertises mandatory concurrent 1906 * streams. 1907 * 1908 * @hide 1909 */ setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams)1910 public void setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams) { 1911 mHasMandatoryConcurrentStreams = hasMandatoryConcurrentStreams; 1912 } 1913 1914 /** 1915 * Set the current display size. 1916 * 1917 * @param displaySize The current display size. 1918 * 1919 * @hide 1920 */ setDisplaySize(Size displaySize)1921 public void setDisplaySize(Size displaySize) { 1922 mDisplaySize = displaySize; 1923 } 1924 1925 /** 1926 * Set the multi-resolution stream configuration map. 1927 * 1928 * @param multiResolutionMap The multi-resolution stream configuration map. 1929 * 1930 * @hide 1931 */ setMultiResolutionStreamConfigurationMap( @onNull Map<String, StreamConfiguration[]> multiResolutionMap)1932 public void setMultiResolutionStreamConfigurationMap( 1933 @NonNull Map<String, StreamConfiguration[]> multiResolutionMap) { 1934 mMultiResolutionStreamConfigurationMap = 1935 new MultiResolutionStreamConfigurationMap(multiResolutionMap); 1936 } 1937 1938 /** 1939 * Get the multi-resolution stream configuration map. 1940 * 1941 * @return The multi-resolution stream configuration map. 1942 * 1943 * @hide 1944 */ getMultiResolutionStreamConfigurationMap()1945 public MultiResolutionStreamConfigurationMap getMultiResolutionStreamConfigurationMap() { 1946 return mMultiResolutionStreamConfigurationMap; 1947 } 1948 1949 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 1950 private long mMetadataPtr; // native std::shared_ptr<CameraMetadata>* 1951 1952 @FastNative nativeAllocate()1953 private static native long nativeAllocate(); 1954 @FastNative nativeAllocateCopy(long ptr)1955 private static native long nativeAllocateCopy(long ptr) 1956 throws NullPointerException; 1957 1958 1959 @FastNative nativeUpdate(long dst, long src)1960 private static native void nativeUpdate(long dst, long src); nativeWriteToParcel(Parcel dest, long ptr)1961 private static synchronized native void nativeWriteToParcel(Parcel dest, long ptr); nativeReadFromParcel(Parcel source, long ptr)1962 private static synchronized native void nativeReadFromParcel(Parcel source, long ptr); nativeSwap(long ptr, long otherPtr)1963 private static synchronized native void nativeSwap(long ptr, long otherPtr) 1964 throws NullPointerException; 1965 @FastNative nativeSetVendorId(long ptr, long vendorId)1966 private static native void nativeSetVendorId(long ptr, long vendorId); nativeClose(long ptr)1967 private static synchronized native void nativeClose(long ptr); nativeIsEmpty(long ptr)1968 private static synchronized native boolean nativeIsEmpty(long ptr); nativeGetEntryCount(long ptr)1969 private static synchronized native int nativeGetEntryCount(long ptr); nativeGetBufferSize(long ptr)1970 private static synchronized native long nativeGetBufferSize(long ptr); 1971 1972 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeReadValues(int tag, long ptr)1973 private static synchronized native byte[] nativeReadValues(int tag, long ptr); nativeWriteValues(int tag, byte[] src, long ptr)1974 private static synchronized native void nativeWriteValues(int tag, byte[] src, long ptr); nativeDump(long ptr)1975 private static synchronized native void nativeDump(long ptr) throws IOException; // dump to LOGD 1976 nativeGetAllVendorKeys(long ptr, Class keyClass)1977 private static synchronized native ArrayList nativeGetAllVendorKeys(long ptr, Class keyClass); 1978 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTagFromKeyLocal(long ptr, String keyName)1979 private static synchronized native int nativeGetTagFromKeyLocal(long ptr, String keyName) 1980 throws IllegalArgumentException; 1981 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTypeFromTagLocal(long ptr, int tag)1982 private static synchronized native int nativeGetTypeFromTagLocal(long ptr, int tag) 1983 throws IllegalArgumentException; 1984 @FastNative nativeGetTagFromKey(String keyName, long vendorId)1985 private static native int nativeGetTagFromKey(String keyName, long vendorId) 1986 throws IllegalArgumentException; 1987 @FastNative nativeGetTypeFromTag(int tag, long vendorId)1988 private static native int nativeGetTypeFromTag(int tag, long vendorId) 1989 throws IllegalArgumentException; 1990 1991 /** 1992 * <p>Perform a 0-copy swap of the internal metadata with another object.</p> 1993 * 1994 * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p> 1995 * 1996 * @param other Metadata to swap with 1997 * @throws NullPointerException if other was null 1998 * @hide 1999 */ swap(CameraMetadataNative other)2000 public void swap(CameraMetadataNative other) { 2001 nativeSwap(mMetadataPtr, other.mMetadataPtr); 2002 mCameraId = other.mCameraId; 2003 mHasMandatoryConcurrentStreams = other.mHasMandatoryConcurrentStreams; 2004 mDisplaySize = other.mDisplaySize; 2005 mMultiResolutionStreamConfigurationMap = other.mMultiResolutionStreamConfigurationMap; 2006 updateNativeAllocation(); 2007 other.updateNativeAllocation(); 2008 } 2009 2010 /** 2011 * Set the native metadata vendor id. 2012 * 2013 * @hide 2014 */ setVendorId(long vendorId)2015 public void setVendorId(long vendorId) { 2016 nativeSetVendorId(mMetadataPtr, vendorId); 2017 } 2018 2019 /** 2020 * @hide 2021 */ getEntryCount()2022 public int getEntryCount() { 2023 return nativeGetEntryCount(mMetadataPtr); 2024 } 2025 2026 /** 2027 * Does this metadata contain at least 1 entry? 2028 * 2029 * @hide 2030 */ isEmpty()2031 public boolean isEmpty() { 2032 return nativeIsEmpty(mMetadataPtr); 2033 } 2034 2035 2036 /** 2037 * Retrieves the pointer to the native shared_ptr<CameraMetadata> as a Java long. 2038 * 2039 * @hide 2040 */ getMetadataPtr()2041 public long getMetadataPtr() { 2042 return mMetadataPtr; 2043 } 2044 2045 /** 2046 * Return a list containing keys of the given key class for all defined vendor tags. 2047 * 2048 * @hide 2049 */ getAllVendorKeys(Class<K> keyClass)2050 public <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) { 2051 if (keyClass == null) { 2052 throw new NullPointerException(); 2053 } 2054 return (ArrayList<K>) nativeGetAllVendorKeys(mMetadataPtr, keyClass); 2055 } 2056 2057 /** 2058 * Convert a key string into the equivalent native tag. 2059 * 2060 * @throws IllegalArgumentException if the key was not recognized 2061 * @throws NullPointerException if the key was null 2062 * 2063 * @hide 2064 */ getTag(String key)2065 public static int getTag(String key) { 2066 return nativeGetTagFromKey(key, Long.MAX_VALUE); 2067 } 2068 2069 /** 2070 * Convert a key string into the equivalent native tag. 2071 * 2072 * @throws IllegalArgumentException if the key was not recognized 2073 * @throws NullPointerException if the key was null 2074 * 2075 * @hide 2076 */ getTag(String key, long vendorId)2077 public static int getTag(String key, long vendorId) { 2078 return nativeGetTagFromKey(key, vendorId); 2079 } 2080 2081 /** 2082 * Get the underlying native type for a tag. 2083 * 2084 * @param tag An integer tag, see e.g. {@link #getTag} 2085 * @param vendorId A vendor tag provider id 2086 * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE} 2087 * 2088 * @hide 2089 */ getNativeType(int tag, long vendorId)2090 public static int getNativeType(int tag, long vendorId) { 2091 return nativeGetTypeFromTag(tag, vendorId); 2092 } 2093 2094 /** 2095 * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing 2096 * the entry if src was null.</p> 2097 * 2098 * <p>An empty array can be passed in to update the entry to 0 elements.</p> 2099 * 2100 * @param tag An integer tag, see e.g. {@link #getTag} 2101 * @param src An array of bytes, or null to erase the entry 2102 * 2103 * @hide 2104 */ writeValues(int tag, byte[] src)2105 public void writeValues(int tag, byte[] src) { 2106 nativeWriteValues(tag, src, mMetadataPtr); 2107 } 2108 2109 /** 2110 * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize 2111 * the data properly.</p> 2112 * 2113 * <p>An empty array can be returned to denote an existing entry with 0 elements.</p> 2114 * 2115 * @param tag An integer tag, see e.g. {@link #getTag} 2116 * 2117 * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise. 2118 * @hide 2119 */ readValues(int tag)2120 public byte[] readValues(int tag) { 2121 // TODO: Optimization. Native code returns a ByteBuffer instead. 2122 return nativeReadValues(tag, mMetadataPtr); 2123 } 2124 2125 /** 2126 * Dumps the native metadata contents to logcat. 2127 * 2128 * <p>Visibility for testing/debugging only. The results will not 2129 * include any synthesized keys, as they are invisible to the native layer.</p> 2130 * 2131 * @hide 2132 */ dumpToLog()2133 public void dumpToLog() { 2134 try { 2135 nativeDump(mMetadataPtr); 2136 } catch (IOException e) { 2137 Log.wtf(TAG, "Dump logging failed", e); 2138 } 2139 } 2140 2141 @Override finalize()2142 protected void finalize() throws Throwable { 2143 try { 2144 close(); 2145 } finally { 2146 super.finalize(); 2147 } 2148 } 2149 2150 /** 2151 * Get the marshaler compatible with the {@code key} and type {@code T}. 2152 * 2153 * @throws UnsupportedOperationException 2154 * if the native/managed type combination for {@code key} is not supported 2155 */ getMarshalerForKey(Key<T> key, int nativeType)2156 private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) { 2157 return MarshalRegistry.getMarshaler(key.getTypeReference(), 2158 nativeType); 2159 } 2160 2161 @SuppressWarnings({ "unchecked", "rawtypes" }) registerAllMarshalers()2162 private static void registerAllMarshalers() { 2163 if (DEBUG) { 2164 Log.v(TAG, "Shall register metadata marshalers"); 2165 } 2166 2167 MarshalQueryable[] queryList = new MarshalQueryable[] { 2168 // marshalers for standard types 2169 new MarshalQueryablePrimitive(), 2170 new MarshalQueryableEnum(), 2171 new MarshalQueryableArray(), 2172 2173 // pseudo standard types, that expand/narrow the native type into a managed type 2174 new MarshalQueryableBoolean(), 2175 new MarshalQueryableNativeByteToInteger(), 2176 2177 // marshalers for custom types 2178 new MarshalQueryableRect(), 2179 new MarshalQueryableSize(), 2180 new MarshalQueryableSizeF(), 2181 new MarshalQueryableString(), 2182 new MarshalQueryableReprocessFormatsMap(), 2183 new MarshalQueryableRange(), 2184 new MarshalQueryablePair(), 2185 new MarshalQueryableMeteringRectangle(), 2186 new MarshalQueryableColorSpaceTransform(), 2187 new MarshalQueryableStreamConfiguration(), 2188 new MarshalQueryableStreamConfigurationDuration(), 2189 new MarshalQueryableRggbChannelVector(), 2190 new MarshalQueryableBlackLevelPattern(), 2191 new MarshalQueryableHighSpeedVideoConfiguration(), 2192 new MarshalQueryableRecommendedStreamConfiguration(), 2193 2194 // generic parcelable marshaler (MUST BE LAST since it has lowest priority) 2195 new MarshalQueryableParcelable(), 2196 }; 2197 2198 for (MarshalQueryable query : queryList) { 2199 MarshalRegistry.registerMarshalQueryable(query); 2200 } 2201 if (DEBUG) { 2202 Log.v(TAG, "Registered metadata marshalers"); 2203 } 2204 } 2205 2206 /** Check if input arguments are all {@code null}. 2207 * 2208 * @param objs Input arguments for null check 2209 * @return {@code true} if input arguments are all {@code null}, otherwise {@code false} 2210 */ areValuesAllNull(Object... objs)2211 private static boolean areValuesAllNull(Object... objs) { 2212 for (Object o : objs) { 2213 if (o != null) return false; 2214 } 2215 return true; 2216 } 2217 2218 /** 2219 * Return the set of physical camera ids that this logical {@link CameraDevice} is made 2220 * up of. 2221 * 2222 * If the camera device isn't a logical camera, return an empty set. 2223 * 2224 * @hide 2225 */ getPhysicalCameraIds()2226 public Set<String> getPhysicalCameraIds() { 2227 int[] availableCapabilities = get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2228 if (availableCapabilities == null) { 2229 throw new AssertionError("android.request.availableCapabilities must be non-null " 2230 + "in the characteristics"); 2231 } 2232 2233 if (!ArrayUtils.contains(availableCapabilities, 2234 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)) { 2235 return Collections.emptySet(); 2236 } 2237 byte[] physicalCamIds = get(CameraCharacteristics.LOGICAL_MULTI_CAMERA_PHYSICAL_IDS); 2238 2239 String physicalCamIdString = null; 2240 try { 2241 physicalCamIdString = new String(physicalCamIds, "UTF-8"); 2242 } catch (java.io.UnsupportedEncodingException e) { 2243 throw new AssertionError("android.logicalCam.physicalIds must be UTF-8 string"); 2244 } 2245 String[] physicalCameraIdArray = physicalCamIdString.split("\0"); 2246 2247 return Collections.unmodifiableSet( 2248 new HashSet<String>(Arrays.asList(physicalCameraIdArray))); 2249 } 2250 2251 static { registerAllMarshalers()2252 registerAllMarshalers(); 2253 } 2254 } 2255