1 /*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "DngCreator_JNI"
19 #include <inttypes.h>
20 #include <string.h>
21 #include <algorithm>
22 #include <array>
23 #include <memory>
24 #include <vector>
25 #include <cmath>
26
27 #include <android-base/properties.h>
28 #include <utils/Log.h>
29 #include <utils/Errors.h>
30 #include <utils/StrongPointer.h>
31 #include <utils/RefBase.h>
32 #include <utils/Vector.h>
33 #include <utils/String8.h>
34 #include <system/camera_metadata.h>
35 #include <camera/CameraMetadata.h>
36 #include <img_utils/DngUtils.h>
37 #include <img_utils/TagDefinitions.h>
38 #include <img_utils/TiffIfd.h>
39 #include <img_utils/TiffWriter.h>
40 #include <img_utils/Output.h>
41 #include <img_utils/Input.h>
42 #include <img_utils/StripSource.h>
43
44 #include "core_jni_helpers.h"
45
46 #include "android_runtime/AndroidRuntime.h"
47 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
48
49 #include <jni.h>
50 #include <nativehelper/JNIHelp.h>
51
52 using namespace android;
53 using namespace img_utils;
54 using android::base::GetProperty;
55
56 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
57 if ((expr) != OK) { \
58 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
59 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
60 return false; \
61 }
62
63
64 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
65 if ((expr) != OK) { \
66 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
67 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
68 return nullptr; \
69 }
70
71
72 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
73 if ((expr) != OK) { \
74 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
75 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
76 return -1; \
77 }
78
79 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
80 if ((entry).count == 0) { \
81 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
82 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
83 return nullptr; \
84 }
85
86 #define BAIL_IF_EMPTY_RET_BOOL(entry, jnienv, tagId, writer) \
87 if ((entry).count == 0) { \
88 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
89 "Missing metadata fields for tag %s (%x)", \
90 (writer)->getTagName(tagId), (tagId)); \
91 return false; \
92 }
93
94 #define BAIL_IF_EMPTY_RET_STATUS(entry, jnienv, tagId, writer) \
95 if ((entry).count == 0) { \
96 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
97 "Missing metadata fields for tag %s (%x)", \
98 (writer)->getTagName(tagId), (tagId)); \
99 return BAD_VALUE; \
100 }
101
102 #define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
103 if (expr) { \
104 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
105 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
106 return nullptr; \
107 }
108
109
110 #define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext"
111
112 static struct {
113 jfieldID mNativeContext;
114 } gDngCreatorClassInfo;
115
116 static struct {
117 jmethodID mWriteMethod;
118 } gOutputStreamClassInfo;
119
120 static struct {
121 jmethodID mReadMethod;
122 jmethodID mSkipMethod;
123 } gInputStreamClassInfo;
124
125 static struct {
126 jmethodID mGetMethod;
127 } gInputByteBufferClassInfo;
128
129 enum {
130 BITS_PER_SAMPLE = 16,
131 BYTES_PER_SAMPLE = 2,
132 BYTES_PER_RGB_PIXEL = 3,
133 BITS_PER_RGB_SAMPLE = 8,
134 BYTES_PER_RGB_SAMPLE = 1,
135 SAMPLES_PER_RGB_PIXEL = 3,
136 SAMPLES_PER_RAW_PIXEL = 1,
137 TIFF_IFD_0 = 0,
138 TIFF_IFD_SUB1 = 1,
139 TIFF_IFD_GPSINFO = 2,
140 };
141
142
143 /**
144 * POD container class for GPS tag data.
145 */
146 class GpsData {
147 public:
148 enum {
149 GPS_VALUE_LENGTH = 6,
150 GPS_REF_LENGTH = 2,
151 GPS_DATE_LENGTH = 11,
152 };
153
154 uint32_t mLatitude[GPS_VALUE_LENGTH];
155 uint32_t mLongitude[GPS_VALUE_LENGTH];
156 uint32_t mTimestamp[GPS_VALUE_LENGTH];
157 uint8_t mLatitudeRef[GPS_REF_LENGTH];
158 uint8_t mLongitudeRef[GPS_REF_LENGTH];
159 uint8_t mDate[GPS_DATE_LENGTH];
160 };
161
162 // ----------------------------------------------------------------------------
163
164 /**
165 * Container class for the persistent native context.
166 */
167
168 class NativeContext : public LightRefBase<NativeContext> {
169 public:
170 enum {
171 DATETIME_COUNT = 20,
172 };
173
174 NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
175 virtual ~NativeContext();
176
177 TiffWriter* getWriter();
178
179 std::shared_ptr<const CameraMetadata> getCharacteristics() const;
180 std::shared_ptr<const CameraMetadata> getResult() const;
181
182 uint32_t getThumbnailWidth() const;
183 uint32_t getThumbnailHeight() const;
184 const uint8_t* getThumbnail() const;
185 bool hasThumbnail() const;
186
187 bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
188
189 void setOrientation(uint16_t orientation);
190 uint16_t getOrientation() const;
191
192 void setDescription(const String8& desc);
193 String8 getDescription() const;
194 bool hasDescription() const;
195
196 void setGpsData(const GpsData& data);
197 GpsData getGpsData() const;
198 bool hasGpsData() const;
199
200 void setCaptureTime(const String8& formattedCaptureTime);
201 String8 getCaptureTime() const;
202 bool hasCaptureTime() const;
203
204 private:
205 Vector<uint8_t> mCurrentThumbnail;
206 TiffWriter mWriter;
207 std::shared_ptr<CameraMetadata> mCharacteristics;
208 std::shared_ptr<CameraMetadata> mResult;
209 uint32_t mThumbnailWidth;
210 uint32_t mThumbnailHeight;
211 uint16_t mOrientation;
212 bool mThumbnailSet;
213 bool mGpsSet;
214 bool mDescriptionSet;
215 bool mCaptureTimeSet;
216 String8 mDescription;
217 GpsData mGpsData;
218 String8 mFormattedCaptureTime;
219 };
220
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)221 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
222 mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
223 mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
224 mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false),
225 mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {}
226
~NativeContext()227 NativeContext::~NativeContext() {}
228
getWriter()229 TiffWriter* NativeContext::getWriter() {
230 return &mWriter;
231 }
232
getCharacteristics() const233 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
234 return mCharacteristics;
235 }
236
getResult() const237 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
238 return mResult;
239 }
240
getThumbnailWidth() const241 uint32_t NativeContext::getThumbnailWidth() const {
242 return mThumbnailWidth;
243 }
244
getThumbnailHeight() const245 uint32_t NativeContext::getThumbnailHeight() const {
246 return mThumbnailHeight;
247 }
248
getThumbnail() const249 const uint8_t* NativeContext::getThumbnail() const {
250 return mCurrentThumbnail.array();
251 }
252
hasThumbnail() const253 bool NativeContext::hasThumbnail() const {
254 return mThumbnailSet;
255 }
256
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)257 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
258 mThumbnailWidth = width;
259 mThumbnailHeight = height;
260
261 size_t size = BYTES_PER_RGB_PIXEL * width * height;
262 if (mCurrentThumbnail.resize(size) < 0) {
263 ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
264 return false;
265 }
266
267 uint8_t* thumb = mCurrentThumbnail.editArray();
268 memcpy(thumb, buffer, size);
269 mThumbnailSet = true;
270 return true;
271 }
272
setOrientation(uint16_t orientation)273 void NativeContext::setOrientation(uint16_t orientation) {
274 mOrientation = orientation;
275 }
276
getOrientation() const277 uint16_t NativeContext::getOrientation() const {
278 return mOrientation;
279 }
280
setDescription(const String8 & desc)281 void NativeContext::setDescription(const String8& desc) {
282 mDescription = desc;
283 mDescriptionSet = true;
284 }
285
getDescription() const286 String8 NativeContext::getDescription() const {
287 return mDescription;
288 }
289
hasDescription() const290 bool NativeContext::hasDescription() const {
291 return mDescriptionSet;
292 }
293
setGpsData(const GpsData & data)294 void NativeContext::setGpsData(const GpsData& data) {
295 mGpsData = data;
296 mGpsSet = true;
297 }
298
getGpsData() const299 GpsData NativeContext::getGpsData() const {
300 return mGpsData;
301 }
302
hasGpsData() const303 bool NativeContext::hasGpsData() const {
304 return mGpsSet;
305 }
306
setCaptureTime(const String8 & formattedCaptureTime)307 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
308 mFormattedCaptureTime = formattedCaptureTime;
309 mCaptureTimeSet = true;
310 }
311
getCaptureTime() const312 String8 NativeContext::getCaptureTime() const {
313 return mFormattedCaptureTime;
314 }
315
hasCaptureTime() const316 bool NativeContext::hasCaptureTime() const {
317 return mCaptureTimeSet;
318 }
319
320 // End of NativeContext
321 // ----------------------------------------------------------------------------
322
323 /**
324 * Wrapper class for a Java OutputStream.
325 *
326 * This class is not intended to be used across JNI calls.
327 */
328 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
329 public:
330 JniOutputStream(JNIEnv* env, jobject outStream);
331
332 virtual ~JniOutputStream();
333
334 status_t open();
335
336 status_t write(const uint8_t* buf, size_t offset, size_t count);
337
338 status_t close();
339 private:
340 enum {
341 BYTE_ARRAY_LENGTH = 4096
342 };
343 jobject mOutputStream;
344 JNIEnv* mEnv;
345 jbyteArray mByteArray;
346 };
347
JniOutputStream(JNIEnv * env,jobject outStream)348 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
349 mEnv(env) {
350 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
351 if (mByteArray == nullptr) {
352 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
353 }
354 }
355
~JniOutputStream()356 JniOutputStream::~JniOutputStream() {
357 mEnv->DeleteLocalRef(mByteArray);
358 }
359
open()360 status_t JniOutputStream::open() {
361 // Do nothing
362 return OK;
363 }
364
write(const uint8_t * buf,size_t offset,size_t count)365 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
366 while(count > 0) {
367 size_t len = BYTE_ARRAY_LENGTH;
368 len = (count > len) ? len : count;
369 mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
370
371 if (mEnv->ExceptionCheck()) {
372 return BAD_VALUE;
373 }
374
375 mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
376 0, len);
377
378 if (mEnv->ExceptionCheck()) {
379 return BAD_VALUE;
380 }
381
382 count -= len;
383 offset += len;
384 }
385 return OK;
386 }
387
close()388 status_t JniOutputStream::close() {
389 // Do nothing
390 return OK;
391 }
392
393 // End of JniOutputStream
394 // ----------------------------------------------------------------------------
395
396 /**
397 * Wrapper class for a Java InputStream.
398 *
399 * This class is not intended to be used across JNI calls.
400 */
401 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
402 public:
403 JniInputStream(JNIEnv* env, jobject inStream);
404
405 status_t open();
406
407 status_t close();
408
409 ssize_t read(uint8_t* buf, size_t offset, size_t count);
410
411 ssize_t skip(size_t count);
412
413 virtual ~JniInputStream();
414 private:
415 enum {
416 BYTE_ARRAY_LENGTH = 4096
417 };
418 jobject mInStream;
419 JNIEnv* mEnv;
420 jbyteArray mByteArray;
421
422 };
423
JniInputStream(JNIEnv * env,jobject inStream)424 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
425 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
426 if (mByteArray == nullptr) {
427 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
428 }
429 }
430
~JniInputStream()431 JniInputStream::~JniInputStream() {
432 mEnv->DeleteLocalRef(mByteArray);
433 }
434
read(uint8_t * buf,size_t offset,size_t count)435 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
436
437 jint realCount = BYTE_ARRAY_LENGTH;
438 if (count < BYTE_ARRAY_LENGTH) {
439 realCount = count;
440 }
441 jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
442 realCount);
443
444 if (actual < 0) {
445 return NOT_ENOUGH_DATA;
446 }
447
448 if (mEnv->ExceptionCheck()) {
449 return BAD_VALUE;
450 }
451
452 mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
453 if (mEnv->ExceptionCheck()) {
454 return BAD_VALUE;
455 }
456 return actual;
457 }
458
skip(size_t count)459 ssize_t JniInputStream::skip(size_t count) {
460 jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
461 static_cast<jlong>(count));
462
463 if (mEnv->ExceptionCheck()) {
464 return BAD_VALUE;
465 }
466 if (actual < 0) {
467 return NOT_ENOUGH_DATA;
468 }
469 return actual;
470 }
471
open()472 status_t JniInputStream::open() {
473 // Do nothing
474 return OK;
475 }
476
close()477 status_t JniInputStream::close() {
478 // Do nothing
479 return OK;
480 }
481
482 // End of JniInputStream
483 // ----------------------------------------------------------------------------
484
485 /**
486 * Wrapper class for a non-direct Java ByteBuffer.
487 *
488 * This class is not intended to be used across JNI calls.
489 */
490 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
491 public:
492 JniInputByteBuffer(JNIEnv* env, jobject inBuf);
493
494 status_t open();
495
496 status_t close();
497
498 ssize_t read(uint8_t* buf, size_t offset, size_t count);
499
500 virtual ~JniInputByteBuffer();
501 private:
502 enum {
503 BYTE_ARRAY_LENGTH = 4096
504 };
505 jobject mInBuf;
506 JNIEnv* mEnv;
507 jbyteArray mByteArray;
508 };
509
JniInputByteBuffer(JNIEnv * env,jobject inBuf)510 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
511 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
512 if (mByteArray == nullptr) {
513 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
514 }
515 }
516
~JniInputByteBuffer()517 JniInputByteBuffer::~JniInputByteBuffer() {
518 mEnv->DeleteLocalRef(mByteArray);
519 }
520
read(uint8_t * buf,size_t offset,size_t count)521 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
522 jint realCount = BYTE_ARRAY_LENGTH;
523 if (count < BYTE_ARRAY_LENGTH) {
524 realCount = count;
525 }
526
527 jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
528 mByteArray, 0, realCount);
529 mEnv->DeleteLocalRef(chainingBuf);
530
531 if (mEnv->ExceptionCheck()) {
532 ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
533 return BAD_VALUE;
534 }
535
536 mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
537 if (mEnv->ExceptionCheck()) {
538 ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
539 return BAD_VALUE;
540 }
541 return realCount;
542 }
543
open()544 status_t JniInputByteBuffer::open() {
545 // Do nothing
546 return OK;
547 }
548
close()549 status_t JniInputByteBuffer::close() {
550 // Do nothing
551 return OK;
552 }
553
554 // End of JniInputByteBuffer
555 // ----------------------------------------------------------------------------
556
557 /**
558 * StripSource subclass for Input types.
559 *
560 * This class is not intended to be used across JNI calls.
561 */
562
563 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
564 public:
565 InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
566 uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
567 uint32_t samplesPerPixel);
568
569 virtual ~InputStripSource();
570
571 virtual status_t writeToStream(Output& stream, uint32_t count);
572
573 virtual uint32_t getIfd() const;
574 protected:
575 uint32_t mIfd;
576 Input* mInput;
577 uint32_t mWidth;
578 uint32_t mHeight;
579 uint32_t mPixStride;
580 uint32_t mRowStride;
581 uint64_t mOffset;
582 JNIEnv* mEnv;
583 uint32_t mBytesPerSample;
584 uint32_t mSamplesPerPixel;
585 };
586
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)587 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
588 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
589 uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
590 mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
591 mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
592 mSamplesPerPixel(samplesPerPixel) {}
593
~InputStripSource()594 InputStripSource::~InputStripSource() {}
595
writeToStream(Output & stream,uint32_t count)596 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
597 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
598 jlong offset = mOffset;
599
600 if (fullSize != count) {
601 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
602 fullSize);
603 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
604 return BAD_VALUE;
605 }
606
607 // Skip offset
608 while (offset > 0) {
609 ssize_t skipped = mInput->skip(offset);
610 if (skipped <= 0) {
611 if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
612 jniThrowExceptionFmt(mEnv, "java/io/IOException",
613 "Early EOF encountered in skip, not enough pixel data for image of size %u",
614 fullSize);
615 skipped = NOT_ENOUGH_DATA;
616 } else {
617 if (!mEnv->ExceptionCheck()) {
618 jniThrowException(mEnv, "java/io/IOException",
619 "Error encountered while skip bytes in input stream.");
620 }
621 }
622
623 return skipped;
624 }
625 offset -= skipped;
626 }
627
628 Vector<uint8_t> row;
629 if (row.resize(mRowStride) < 0) {
630 jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
631 return BAD_VALUE;
632 }
633
634 uint8_t* rowBytes = row.editArray();
635
636 for (uint32_t i = 0; i < mHeight; ++i) {
637 size_t rowFillAmt = 0;
638 size_t rowSize = mRowStride;
639
640 while (rowFillAmt < mRowStride) {
641 ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
642 if (bytesRead <= 0) {
643 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
644 ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
645 __FUNCTION__, i, bytesRead);
646 jniThrowExceptionFmt(mEnv, "java/io/IOException",
647 "Early EOF encountered, not enough pixel data for image of size %"
648 PRIu32, fullSize);
649 bytesRead = NOT_ENOUGH_DATA;
650 } else {
651 if (!mEnv->ExceptionCheck()) {
652 jniThrowException(mEnv, "java/io/IOException",
653 "Error encountered while reading");
654 }
655 }
656 return bytesRead;
657 }
658 rowFillAmt += bytesRead;
659 rowSize -= bytesRead;
660 }
661
662 if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
663 ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
664
665 if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
666 mEnv->ExceptionCheck()) {
667 if (!mEnv->ExceptionCheck()) {
668 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
669 }
670 return BAD_VALUE;
671 }
672 } else {
673 ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
674 jniThrowException(mEnv, "java/lang/IllegalStateException",
675 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
676 return BAD_VALUE;
677
678 // TODO: Add support for non-contiguous pixels if needed.
679 }
680 }
681 return OK;
682 }
683
getIfd() const684 uint32_t InputStripSource::getIfd() const {
685 return mIfd;
686 }
687
688 // End of InputStripSource
689 // ----------------------------------------------------------------------------
690
691 /**
692 * StripSource subclass for direct buffer types.
693 *
694 * This class is not intended to be used across JNI calls.
695 */
696
697 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
698 public:
699 DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
700 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
701 uint32_t bytesPerSample, uint32_t samplesPerPixel);
702
703 virtual ~DirectStripSource();
704
705 virtual status_t writeToStream(Output& stream, uint32_t count);
706
707 virtual uint32_t getIfd() const;
708 protected:
709 uint32_t mIfd;
710 const uint8_t* mPixelBytes;
711 uint32_t mWidth;
712 uint32_t mHeight;
713 uint32_t mPixStride;
714 uint32_t mRowStride;
715 uint16_t mOffset;
716 JNIEnv* mEnv;
717 uint32_t mBytesPerSample;
718 uint32_t mSamplesPerPixel;
719 };
720
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)721 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
722 uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
723 uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
724 mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
725 mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
726 mSamplesPerPixel(samplesPerPixel) {}
727
~DirectStripSource()728 DirectStripSource::~DirectStripSource() {}
729
writeToStream(Output & stream,uint32_t count)730 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
731 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
732
733 if (fullSize != count) {
734 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
735 fullSize);
736 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
737 return BAD_VALUE;
738 }
739
740
741 if (mPixStride == mBytesPerSample * mSamplesPerPixel
742 && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
743 ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
744
745 if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
746 if (!mEnv->ExceptionCheck()) {
747 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
748 }
749 return BAD_VALUE;
750 }
751 } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
752 ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
753
754 for (size_t i = 0; i < mHeight; ++i) {
755 if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
756 mEnv->ExceptionCheck()) {
757 if (!mEnv->ExceptionCheck()) {
758 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
759 }
760 return BAD_VALUE;
761 }
762 }
763 } else {
764 ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
765
766 jniThrowException(mEnv, "java/lang/IllegalStateException",
767 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
768 return BAD_VALUE;
769
770 // TODO: Add support for non-contiguous pixels if needed.
771 }
772 return OK;
773
774 }
775
getIfd() const776 uint32_t DirectStripSource::getIfd() const {
777 return mIfd;
778 }
779
780 // End of DirectStripSource
781 // ----------------------------------------------------------------------------
782
783 // Get the appropriate tag corresponding to default / maximum resolution mode.
getAppropriateModeTag(int32_t tag,bool maximumResolution)784 static int32_t getAppropriateModeTag(int32_t tag, bool maximumResolution) {
785 if (!maximumResolution) {
786 return tag;
787 }
788 switch (tag) {
789 case ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE:
790 return ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION;
791 case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
792 return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
793 case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
794 return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
795 default:
796 ALOGE("%s: Tag %d doesn't have sensor info related maximum resolution counterpart",
797 __FUNCTION__, tag);
798 return -1;
799 }
800 }
801
isMaximumResolutionModeImage(const CameraMetadata & characteristics,uint32_t imageWidth,uint32_t imageHeight,const sp<TiffWriter> writer,JNIEnv * env)802 static bool isMaximumResolutionModeImage(const CameraMetadata& characteristics, uint32_t imageWidth,
803 uint32_t imageHeight, const sp<TiffWriter> writer,
804 JNIEnv* env) {
805 // If this isn't an ultra-high resolution sensor, return false;
806 camera_metadata_ro_entry capabilitiesEntry =
807 characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
808 size_t capsCount = capabilitiesEntry.count;
809 const uint8_t* caps = capabilitiesEntry.data.u8;
810 if (std::find(caps, caps + capsCount,
811 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) ==
812 caps + capsCount) {
813 // not an ultra-high resolution sensor, cannot have a maximum resolution
814 // mode image.
815 return false;
816 }
817
818 // If the image width and height are either the maximum resolution
819 // pre-correction active array size or the maximum resolution pixel array
820 // size, this image is a maximum resolution RAW_SENSOR image.
821
822 // Check dimensions
823 camera_metadata_ro_entry entry = characteristics.find(
824 ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION);
825
826 BAIL_IF_EMPTY_RET_BOOL(entry, env, TAG_IMAGEWIDTH, writer);
827
828 uint32_t preWidth = static_cast<uint32_t>(entry.data.i32[2]);
829 uint32_t preHeight = static_cast<uint32_t>(entry.data.i32[3]);
830
831 camera_metadata_ro_entry pixelArrayEntry =
832 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION);
833
834 BAIL_IF_EMPTY_RET_BOOL(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
835
836 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
837 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
838
839 return (imageWidth == preWidth && imageHeight == preHeight) ||
840 (imageWidth == pixWidth && imageHeight == pixHeight);
841 }
842
843 /**
844 * Calculate the default crop relative to the "active area" of the image sensor (this active area
845 * will always be the pre-correction active area rectangle), and set this.
846 */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer,bool maximumResolutionMode)847 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
848 sp<TiffWriter> writer, bool maximumResolutionMode) {
849 camera_metadata_ro_entry entry = characteristics.find(
850 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
851 maximumResolutionMode));
852 BAIL_IF_EMPTY_RET_STATUS(entry, env, TAG_IMAGEWIDTH, writer);
853 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
854 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
855
856 const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
857
858 if (width < margin * 2 || height < margin * 2) {
859 ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
860 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
861 jniThrowException(env, "java/lang/IllegalStateException",
862 "Pre-correction active area is too small.");
863 return BAD_VALUE;
864 }
865
866 uint32_t defaultCropOrigin[] = {margin, margin};
867 uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
868 height - defaultCropOrigin[1] - margin};
869
870 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
871 TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
872 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
873 TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
874
875 return OK;
876 }
877
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)878 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
879 const CameraMetadata& characteristics, jint width, jint height) {
880 if (width <= 0) {
881 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
882 "Image width %d is invalid", width);
883 return false;
884 }
885
886 if (height <= 0) {
887 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
888 "Image height %d is invalid", height);
889 return false;
890 }
891 bool isMaximumResolutionMode =
892 isMaximumResolutionModeImage(characteristics, static_cast<uint32_t>(width),
893 static_cast<uint32_t>(height), writer, env);
894
895 camera_metadata_ro_entry preCorrectionEntry = characteristics.find(
896 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
897 isMaximumResolutionMode));
898 BAIL_IF_EMPTY_RET_BOOL(preCorrectionEntry, env, TAG_IMAGEWIDTH, writer);
899
900 camera_metadata_ro_entry pixelArrayEntry = characteristics.find(
901 getAppropriateModeTag(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, isMaximumResolutionMode));
902 BAIL_IF_EMPTY_RET_BOOL(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
903
904 int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
905 int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
906 int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
907 int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
908
909 bool matchesPixelArray = (pWidth == width && pHeight == height);
910 bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
911
912 if (!(matchesPixelArray || matchesPreCorrectionArray)) {
913 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
914 "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
915 "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
916 width, height, pWidth, pHeight, cWidth, cHeight);
917 return false;
918 }
919
920 return true;
921 }
922
moveEntries(sp<TiffWriter> writer,uint32_t ifdFrom,uint32_t ifdTo,const Vector<uint16_t> & entries)923 static status_t moveEntries(sp<TiffWriter> writer, uint32_t ifdFrom, uint32_t ifdTo,
924 const Vector<uint16_t>& entries) {
925 for (size_t i = 0; i < entries.size(); ++i) {
926 uint16_t tagId = entries[i];
927 sp<TiffEntry> entry = writer->getEntry(tagId, ifdFrom);
928 if (entry.get() == nullptr) {
929 ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId,
930 ifdFrom);
931 return BAD_VALUE;
932 }
933 if (writer->addEntry(entry, ifdTo) != OK) {
934 ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId,
935 ifdFrom);
936 return BAD_VALUE;
937 }
938 writer->removeEntry(tagId, ifdFrom);
939 }
940 return OK;
941 }
942
943 /**
944 * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4.
945 * Returns OK on success, or a negative error code if the CFA enum was invalid.
946 */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)947 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
948 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
949 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
950 cfaEnum);
951 switch(cfa) {
952 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
953 cfaOut[0] = 0;
954 cfaOut[1] = 1;
955 cfaOut[2] = 1;
956 cfaOut[3] = 2;
957 break;
958 }
959 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
960 cfaOut[0] = 1;
961 cfaOut[1] = 0;
962 cfaOut[2] = 2;
963 cfaOut[3] = 1;
964 break;
965 }
966 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
967 cfaOut[0] = 1;
968 cfaOut[1] = 2;
969 cfaOut[2] = 0;
970 cfaOut[3] = 1;
971 break;
972 }
973 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
974 cfaOut[0] = 2;
975 cfaOut[1] = 1;
976 cfaOut[2] = 1;
977 cfaOut[3] = 0;
978 break;
979 }
980 // MONO and NIR are degenerate case of RGGB pattern: only Red channel
981 // will be used.
982 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO:
983 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: {
984 cfaOut[0] = 0;
985 break;
986 }
987 default: {
988 return BAD_VALUE;
989 }
990 }
991 return OK;
992 }
993
994 /**
995 * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
996 * RGGB for an unknown enum.
997 */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)998 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
999 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
1000 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
1001 cfaEnum);
1002 switch(cfa) {
1003 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
1004 return OpcodeListBuilder::CFA_RGGB;
1005 }
1006 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
1007 return OpcodeListBuilder::CFA_GRBG;
1008 }
1009 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
1010 return OpcodeListBuilder::CFA_GBRG;
1011 }
1012 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
1013 return OpcodeListBuilder::CFA_BGGR;
1014 }
1015 default: {
1016 return OpcodeListBuilder::CFA_RGGB;
1017 }
1018 }
1019 }
1020
1021 /**
1022 * For each color plane, find the corresponding noise profile coefficients given in the
1023 * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color
1024 * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
1025 *
1026 * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
1027 * cfa - numChannels color channels corresponding to each of the per-channel noise profile
1028 * coefficients.
1029 * numChannels - the number of noise profile coefficient pairs and color channels given in
1030 * the perChannelNoiseProfile and cfa arguments, respectively.
1031 * planeColors - the color planes in the noise profile output.
1032 * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
1033 * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
1034 *
1035 * returns OK, or a negative error code on failure.
1036 */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)1037 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
1038 size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
1039 /*out*/double* noiseProfile) {
1040
1041 for (size_t p = 0; p < numPlanes; ++p) {
1042 size_t S = p * 2;
1043 size_t O = p * 2 + 1;
1044
1045 noiseProfile[S] = 0;
1046 noiseProfile[O] = 0;
1047 bool uninitialized = true;
1048 for (size_t c = 0; c < numChannels; ++c) {
1049 if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
1050 noiseProfile[S] = perChannelNoiseProfile[c * 2];
1051 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
1052 uninitialized = false;
1053 }
1054 }
1055 if (uninitialized) {
1056 ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
1057 __FUNCTION__, p);
1058 return BAD_VALUE;
1059 }
1060 }
1061 return OK;
1062 }
1063
undistort(double & x,double & y,const std::array<float,6> & distortion,const float cx,const float cy,const float f)1064 static void undistort(/*inout*/double& x, /*inout*/double& y,
1065 const std::array<float, 6>& distortion,
1066 const float cx, const float cy, const float f) {
1067 double xp = (x - cx) / f;
1068 double yp = (y - cy) / f;
1069
1070 double x2 = xp * xp;
1071 double y2 = yp * yp;
1072 double r2 = x2 + y2;
1073 double xy2 = 2.0 * xp * yp;
1074
1075 const float k0 = distortion[0];
1076 const float k1 = distortion[1];
1077 const float k2 = distortion[2];
1078 const float k3 = distortion[3];
1079 const float p1 = distortion[4];
1080 const float p2 = distortion[5];
1081
1082 double kr = k0 + ((k3 * r2 + k2) * r2 + k1) * r2;
1083 double xpp = xp * kr + p1 * xy2 + p2 * (r2 + 2.0 * x2);
1084 double ypp = yp * kr + p1 * (r2 + 2.0 * y2) + p2 * xy2;
1085
1086 x = xpp * f + cx;
1087 y = ypp * f + cy;
1088 return;
1089 }
1090
unDistortWithinPreCorrArray(double x,double y,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1091 static inline bool unDistortWithinPreCorrArray(
1092 double x, double y,
1093 const std::array<float, 6>& distortion,
1094 const float cx, const float cy, const float f,
1095 const int preCorrW, const int preCorrH, const int xMin, const int yMin) {
1096 undistort(x, y, distortion, cx, cy, f);
1097 // xMin and yMin are inclusive, and xMax and yMax are exclusive.
1098 int xMax = xMin + preCorrW;
1099 int yMax = yMin + preCorrH;
1100 if (x < xMin || y < yMin || x >= xMax || y >= yMax) {
1101 return false;
1102 }
1103 return true;
1104 }
1105
boxWithinPrecorrectionArray(int left,int top,int right,int bottom,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1106 static inline bool boxWithinPrecorrectionArray(
1107 int left, int top, int right, int bottom,
1108 const std::array<float, 6>& distortion,
1109 const float cx, const float cy, const float f,
1110 const int preCorrW, const int preCorrH, const int xMin, const int yMin){
1111 // Top row
1112 if (!unDistortWithinPreCorrArray(left, top,
1113 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1114 return false;
1115 }
1116
1117 if (!unDistortWithinPreCorrArray(cx, top,
1118 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1119 return false;
1120 }
1121
1122 if (!unDistortWithinPreCorrArray(right, top,
1123 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1124 return false;
1125 }
1126
1127 // Middle row
1128 if (!unDistortWithinPreCorrArray(left, cy,
1129 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1130 return false;
1131 }
1132
1133 if (!unDistortWithinPreCorrArray(right, cy,
1134 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1135 return false;
1136 }
1137
1138 // Bottom row
1139 if (!unDistortWithinPreCorrArray(left, bottom,
1140 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1141 return false;
1142 }
1143
1144 if (!unDistortWithinPreCorrArray(cx, bottom,
1145 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1146 return false;
1147 }
1148
1149 if (!unDistortWithinPreCorrArray(right, bottom,
1150 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1151 return false;
1152 }
1153 return true;
1154 }
1155
scaledBoxWithinPrecorrectionArray(double scale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1156 static inline bool scaledBoxWithinPrecorrectionArray(
1157 double scale/*must be <= 1.0*/,
1158 const std::array<float, 6>& distortion,
1159 const float cx, const float cy, const float f,
1160 const int preCorrW, const int preCorrH,
1161 const int xMin, const int yMin){
1162
1163 double left = cx * (1.0 - scale);
1164 double right = (preCorrW - 1) * scale + cx * (1.0 - scale);
1165 double top = cy * (1.0 - scale);
1166 double bottom = (preCorrH - 1) * scale + cy * (1.0 - scale);
1167
1168 return boxWithinPrecorrectionArray(left, top, right, bottom,
1169 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin);
1170 }
1171
findPostCorrectionScale(double stepSize,double minScale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin,double * outScale)1172 static status_t findPostCorrectionScale(
1173 double stepSize, double minScale,
1174 const std::array<float, 6>& distortion,
1175 const float cx, const float cy, const float f,
1176 const int preCorrW, const int preCorrH, const int xMin, const int yMin,
1177 /*out*/ double* outScale) {
1178 if (outScale == nullptr) {
1179 ALOGE("%s: outScale must not be null", __FUNCTION__);
1180 return BAD_VALUE;
1181 }
1182
1183 for (double scale = 1.0; scale > minScale; scale -= stepSize) {
1184 if (scaledBoxWithinPrecorrectionArray(
1185 scale, distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1186 *outScale = scale;
1187 return OK;
1188 }
1189 }
1190 ALOGE("%s: cannot find cropping scale for lens distortion: stepSize %f, minScale %f",
1191 __FUNCTION__, stepSize, minScale);
1192 return BAD_VALUE;
1193 }
1194
1195 // Apply a scale factor to distortion coefficients so that the image is zoomed out and all pixels
1196 // are sampled within the precorrection array
normalizeLensDistortion(std::array<float,6> & distortion,float cx,float cy,float f,int preCorrW,int preCorrH,int xMin=0,int yMin=0)1197 static void normalizeLensDistortion(
1198 /*inout*/std::array<float, 6>& distortion,
1199 float cx, float cy, float f, int preCorrW, int preCorrH, int xMin = 0, int yMin = 0) {
1200 ALOGV("%s: distortion [%f, %f, %f, %f, %f, %f], (cx,cy) (%f, %f), f %f, (W,H) (%d, %d)"
1201 ", (xmin, ymin, xmax, ymax) (%d, %d, %d, %d)",
1202 __FUNCTION__, distortion[0], distortion[1], distortion[2],
1203 distortion[3], distortion[4], distortion[5],
1204 cx, cy, f, preCorrW, preCorrH,
1205 xMin, yMin, xMin + preCorrW - 1, yMin + preCorrH - 1);
1206
1207 // Only update distortion coeffients if we can find a good bounding box
1208 double scale = 1.0;
1209 if (OK == findPostCorrectionScale(0.002, 0.5,
1210 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin,
1211 /*out*/&scale)) {
1212 ALOGV("%s: scaling distortion coefficients by %f", __FUNCTION__, scale);
1213 // The formula:
1214 // xc = xi * (k0 + k1*r^2 + k2*r^4 + k3*r^6) + k4 * (2*xi*yi) + k5 * (r^2 + 2*xi^2)
1215 // To create effective zoom we want to replace xi by xi *m, yi by yi*m and r^2 by r^2*m^2
1216 // Factor the extra m power terms into k0~k6
1217 std::array<float, 6> scalePowers = {1, 3, 5, 7, 2, 2};
1218 for (size_t i = 0; i < 6; i++) {
1219 distortion[i] *= pow(scale, scalePowers[i]);
1220 }
1221 }
1222 return;
1223 }
1224
1225 // ----------------------------------------------------------------------------
1226 extern "C" {
1227
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)1228 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
1229 ALOGV("%s:", __FUNCTION__);
1230 return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
1231 gDngCreatorClassInfo.mNativeContext));
1232 }
1233
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)1234 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
1235 ALOGV("%s:", __FUNCTION__);
1236 NativeContext* current = DngCreator_getNativeContext(env, thiz);
1237
1238 if (context != nullptr) {
1239 context->incStrong((void*) DngCreator_setNativeContext);
1240 }
1241
1242 if (current) {
1243 current->decStrong((void*) DngCreator_setNativeContext);
1244 }
1245
1246 env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
1247 reinterpret_cast<jlong>(context.get()));
1248 }
1249
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)1250 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
1251 ALOGV("%s:", __FUNCTION__);
1252
1253 gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
1254 clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
1255
1256 jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
1257 gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
1258 outputStreamClazz, "write", "([BII)V");
1259
1260 jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
1261 gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
1262 gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1263
1264 jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1265 gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1266 inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1267 }
1268
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1269 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1270 jobject resultsPtr, jstring formattedCaptureTime) {
1271 ALOGV("%s:", __FUNCTION__);
1272 CameraMetadata characteristics;
1273 CameraMetadata results;
1274 if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1275 jniThrowException(env, "java/lang/AssertionError",
1276 "No native metadata defined for camera characteristics.");
1277 return;
1278 }
1279 if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1280 jniThrowException(env, "java/lang/AssertionError",
1281 "No native metadata defined for capture results.");
1282 return;
1283 }
1284
1285 sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1286
1287 const char* captureTime = env->GetStringUTFChars(formattedCaptureTime, nullptr);
1288
1289 size_t len = strlen(captureTime) + 1;
1290 if (len != NativeContext::DATETIME_COUNT) {
1291 jniThrowException(env, "java/lang/IllegalArgumentException",
1292 "Formatted capture time string length is not required 20 characters");
1293 return;
1294 }
1295
1296 nativeContext->setCaptureTime(String8(captureTime));
1297
1298 DngCreator_setNativeContext(env, thiz, nativeContext);
1299 }
1300
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1301 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1302 uint32_t imageHeight) {
1303
1304 NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1305
1306 if (nativeContext == nullptr) {
1307 jniThrowException(env, "java/lang/AssertionError",
1308 "No native context, must call init before other operations.");
1309 return nullptr;
1310 }
1311
1312 CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1313 CameraMetadata results = *(nativeContext->getResult());
1314
1315 sp<TiffWriter> writer = new TiffWriter();
1316
1317 uint32_t preXMin = 0;
1318 uint32_t preYMin = 0;
1319 uint32_t preWidth = 0;
1320 uint32_t preHeight = 0;
1321 uint8_t colorFilter = 0;
1322 bool isBayer = true;
1323 bool isMaximumResolutionMode =
1324 isMaximumResolutionModeImage(characteristics, imageWidth, imageHeight, writer, env);
1325 {
1326 // Check dimensions
1327 camera_metadata_entry entry = characteristics.find(
1328 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1329 isMaximumResolutionMode));
1330 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1331 preXMin = static_cast<uint32_t>(entry.data.i32[0]);
1332 preYMin = static_cast<uint32_t>(entry.data.i32[1]);
1333 preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1334 preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1335
1336 camera_metadata_entry pixelArrayEntry =
1337 characteristics.find(getAppropriateModeTag(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1338 isMaximumResolutionMode));
1339
1340 BAIL_IF_EMPTY_RET_NULL_SP(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
1341 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1342 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1343
1344 if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1345 (imageWidth == pixWidth && imageHeight == pixHeight))) {
1346 jniThrowException(env, "java/lang/AssertionError",
1347 "Height and width of image buffer did not match height and width of"
1348 " either the preCorrectionActiveArraySize or the pixelArraySize.");
1349 return nullptr;
1350 }
1351
1352 camera_metadata_entry colorFilterEntry =
1353 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1354 colorFilter = colorFilterEntry.data.u8[0];
1355 camera_metadata_entry capabilitiesEntry =
1356 characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
1357 size_t capsCount = capabilitiesEntry.count;
1358 uint8_t* caps = capabilitiesEntry.data.u8;
1359 if (std::find(caps, caps+capsCount, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME)
1360 != caps+capsCount) {
1361 isBayer = false;
1362 } else if (colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO ||
1363 colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
1364 jniThrowException(env, "java/lang/AssertionError",
1365 "A camera device with MONO/NIR color filter must have MONOCHROME capability.");
1366 return nullptr;
1367 }
1368 }
1369
1370 writer->addIfd(TIFF_IFD_0);
1371
1372 status_t err = OK;
1373
1374 const uint32_t samplesPerPixel = 1;
1375 const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1376
1377 OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE;
1378 uint8_t cfaPlaneColor[3] = {0, 1, 2};
1379 camera_metadata_entry cfaEntry =
1380 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1381 BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer);
1382 uint8_t cfaEnum = cfaEntry.data.u8[0];
1383
1384 // TODO: Greensplit.
1385 // TODO: Add remaining non-essential tags
1386
1387 // Setup main image tags
1388
1389 {
1390 // Set orientation
1391 uint16_t orientation = TAG_ORIENTATION_NORMAL;
1392 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1393 env, TAG_ORIENTATION, writer);
1394 }
1395
1396 {
1397 // Set subfiletype
1398 uint32_t subfileType = 0; // Main image
1399 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1400 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1401 }
1402
1403 {
1404 // Set bits per sample
1405 uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1406 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1407 TAG_BITSPERSAMPLE, writer);
1408 }
1409
1410 {
1411 // Set compression
1412 uint16_t compression = 1; // None
1413 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1414 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1415 }
1416
1417 {
1418 // Set dimensions
1419 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1420 env, TAG_IMAGEWIDTH, writer);
1421 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1422 env, TAG_IMAGELENGTH, writer);
1423 }
1424
1425 {
1426 // Set photometric interpretation
1427 uint16_t interpretation = isBayer ? 32803 /* CFA */ :
1428 34892; /* Linear Raw */;
1429 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1430 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1431 }
1432
1433 {
1434 uint16_t repeatDim[2] = {2, 2};
1435 if (!isBayer) {
1436 repeatDim[0] = repeatDim[1] = 1;
1437 }
1438 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1439 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1440
1441 // Set blacklevel tags, using dynamic black level if available
1442 camera_metadata_entry entry =
1443 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
1444 uint32_t blackLevelRational[8] = {0};
1445 if (entry.count != 0) {
1446 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1447 for (size_t i = 0; i < entry.count; i++) {
1448 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.f[i] * 100);
1449 blackLevelRational[i * 2 + 1] = 100;
1450 }
1451 } else {
1452 // Fall back to static black level which is guaranteed
1453 entry = characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1454 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1455 for (size_t i = 0; i < entry.count; i++) {
1456 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
1457 blackLevelRational[i * 2 + 1] = 1;
1458 }
1459 }
1460 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1],
1461 blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1462 }
1463
1464 {
1465 // Set samples per pixel
1466 uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1467 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1468 env, TAG_SAMPLESPERPIXEL, writer);
1469 }
1470
1471 {
1472 // Set planar configuration
1473 uint16_t config = 1; // Chunky
1474 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1475 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1476 }
1477
1478 // All CFA pattern tags are not necessary for monochrome cameras.
1479 if (isBayer) {
1480 // Set CFA pattern dimensions
1481 uint16_t repeatDim[2] = {2, 2};
1482 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1483 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1484
1485 // Set CFA pattern
1486 const int cfaLength = 4;
1487 uint8_t cfa[cfaLength];
1488 if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1489 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1490 "Invalid metadata for tag %d", TAG_CFAPATTERN);
1491 }
1492
1493 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1494 env, TAG_CFAPATTERN, writer);
1495
1496 opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1497
1498 // Set CFA plane color
1499 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1500 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1501
1502 // Set CFA layout
1503 uint16_t cfaLayout = 1;
1504 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1505 env, TAG_CFALAYOUT, writer);
1506 }
1507
1508 {
1509 // image description
1510 uint8_t imageDescription = '\0'; // empty
1511 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1512 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1513 }
1514
1515 {
1516 // make
1517 // Use "" to represent unknown make as suggested in TIFF/EP spec.
1518 std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1519 uint32_t count = static_cast<uint32_t>(manufacturer.size()) + 1;
1520
1521 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1522 reinterpret_cast<const uint8_t*>(manufacturer.c_str()), TIFF_IFD_0), env, TAG_MAKE,
1523 writer);
1524 }
1525
1526 {
1527 // model
1528 // Use "" to represent unknown model as suggested in TIFF/EP spec.
1529 std::string model = GetProperty("ro.product.model", "");
1530 uint32_t count = static_cast<uint32_t>(model.size()) + 1;
1531
1532 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1533 reinterpret_cast<const uint8_t*>(model.c_str()), TIFF_IFD_0), env, TAG_MODEL,
1534 writer);
1535 }
1536
1537 {
1538 // x resolution
1539 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1540 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1541 env, TAG_XRESOLUTION, writer);
1542
1543 // y resolution
1544 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1545 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1546 env, TAG_YRESOLUTION, writer);
1547
1548 uint16_t unit = 2; // inches
1549 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1550 env, TAG_RESOLUTIONUNIT, writer);
1551 }
1552
1553 {
1554 // software
1555 std::string software = GetProperty("ro.build.fingerprint", "");
1556 uint32_t count = static_cast<uint32_t>(software.size()) + 1;
1557 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1558 reinterpret_cast<const uint8_t*>(software.c_str()), TIFF_IFD_0), env, TAG_SOFTWARE,
1559 writer);
1560 }
1561
1562 if (nativeContext->hasCaptureTime()) {
1563 // datetime
1564 String8 captureTime = nativeContext->getCaptureTime();
1565
1566 if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1567 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1568 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1569 "Invalid metadata for tag %x", TAG_DATETIME);
1570 return nullptr;
1571 }
1572
1573 // datetime original
1574 if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1575 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1576 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1577 "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1578 return nullptr;
1579 }
1580 }
1581
1582 {
1583 // TIFF/EP standard id
1584 uint8_t standardId[] = { 1, 0, 0, 0 };
1585 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1586 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1587 }
1588
1589 {
1590 // copyright
1591 uint8_t copyright = '\0'; // empty
1592 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right,
1593 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1594 }
1595
1596 {
1597 // exposure time
1598 camera_metadata_entry entry =
1599 results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1600 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1601
1602 int64_t exposureTime = *(entry.data.i64);
1603
1604 if (exposureTime < 0) {
1605 // Should be unreachable
1606 jniThrowException(env, "java/lang/IllegalArgumentException",
1607 "Negative exposure time in metadata");
1608 return nullptr;
1609 }
1610
1611 // Ensure exposure time doesn't overflow (for exposures > 4s)
1612 uint32_t denominator = 1000000000;
1613 while (exposureTime > UINT32_MAX) {
1614 exposureTime >>= 1;
1615 denominator >>= 1;
1616 if (denominator == 0) {
1617 // Should be unreachable
1618 jniThrowException(env, "java/lang/IllegalArgumentException",
1619 "Exposure time too long");
1620 return nullptr;
1621 }
1622 }
1623
1624 uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1625 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1626 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1627
1628 }
1629
1630 {
1631 // ISO speed ratings
1632 camera_metadata_entry entry =
1633 results.find(ANDROID_SENSOR_SENSITIVITY);
1634 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1635
1636 int32_t tempIso = *(entry.data.i32);
1637 if (tempIso < 0) {
1638 jniThrowException(env, "java/lang/IllegalArgumentException",
1639 "Negative ISO value");
1640 return nullptr;
1641 }
1642
1643 if (tempIso > UINT16_MAX) {
1644 ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1645 tempIso = UINT16_MAX;
1646 }
1647
1648 uint16_t iso = static_cast<uint16_t>(tempIso);
1649 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1650 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1651 }
1652
1653 {
1654 // Baseline exposure
1655 camera_metadata_entry entry =
1656 results.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
1657 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer);
1658
1659 // post RAW gain should be boostValue / 100
1660 double postRAWGain = static_cast<double> (entry.data.i32[0]) / 100.f;
1661 // Baseline exposure should be in EV units so log2(gain) =
1662 // log10(gain)/log10(2)
1663 double baselineExposure = std::log(postRAWGain) / std::log(2.0f);
1664 int32_t baseExposureSRat[] = { static_cast<int32_t> (baselineExposure * 100),
1665 100 };
1666 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1,
1667 baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer);
1668 }
1669
1670 {
1671 // focal length
1672 camera_metadata_entry entry =
1673 results.find(ANDROID_LENS_FOCAL_LENGTH);
1674 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1675
1676 uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1677 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1678 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1679 }
1680
1681 {
1682 // f number
1683 camera_metadata_entry entry =
1684 results.find(ANDROID_LENS_APERTURE);
1685 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1686
1687 uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1688 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1689 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1690 }
1691
1692 {
1693 // Set DNG version information
1694 uint8_t version[4] = {1, 4, 0, 0};
1695 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1696 env, TAG_DNGVERSION, writer);
1697
1698 uint8_t backwardVersion[4] = {1, 1, 0, 0};
1699 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1700 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1701 }
1702
1703 {
1704 // Set whitelevel
1705 camera_metadata_entry entry =
1706 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1707 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1708 uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1709 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1710 env, TAG_WHITELEVEL, writer);
1711 }
1712
1713 {
1714 // Set default scale
1715 uint32_t defaultScale[4] = {1, 1, 1, 1};
1716 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1717 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1718 }
1719
1720 bool singleIlluminant = false;
1721 if (isBayer) {
1722 // Set calibration illuminants
1723 camera_metadata_entry entry1 =
1724 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1725 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1726 camera_metadata_entry entry2 =
1727 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1728 if (entry2.count == 0) {
1729 singleIlluminant = true;
1730 }
1731 uint16_t ref1 = entry1.data.u8[0];
1732
1733 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1734 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1735
1736 if (!singleIlluminant) {
1737 uint16_t ref2 = entry2.data.u8[0];
1738 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1739 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1740 }
1741 }
1742
1743 if (isBayer) {
1744 // Set color transforms
1745 camera_metadata_entry entry1 =
1746 characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1747 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1748
1749 int32_t colorTransform1[entry1.count * 2];
1750
1751 size_t ctr = 0;
1752 for(size_t i = 0; i < entry1.count; ++i) {
1753 colorTransform1[ctr++] = entry1.data.r[i].numerator;
1754 colorTransform1[ctr++] = entry1.data.r[i].denominator;
1755 }
1756
1757 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1758 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1759
1760 if (!singleIlluminant) {
1761 camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1762 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1763 int32_t colorTransform2[entry2.count * 2];
1764
1765 ctr = 0;
1766 for(size_t i = 0; i < entry2.count; ++i) {
1767 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1768 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1769 }
1770
1771 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1772 colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1773 }
1774 }
1775
1776 if (isBayer) {
1777 // Set calibration transforms
1778 camera_metadata_entry entry1 =
1779 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1780 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1781
1782 int32_t calibrationTransform1[entry1.count * 2];
1783
1784 size_t ctr = 0;
1785 for(size_t i = 0; i < entry1.count; ++i) {
1786 calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1787 calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1788 }
1789
1790 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1791 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1792
1793 if (!singleIlluminant) {
1794 camera_metadata_entry entry2 =
1795 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1796 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1797 int32_t calibrationTransform2[entry2.count * 2];
1798
1799 ctr = 0;
1800 for(size_t i = 0; i < entry2.count; ++i) {
1801 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1802 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1803 }
1804
1805 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1806 calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer);
1807 }
1808 }
1809
1810 if (isBayer) {
1811 // Set forward transforms
1812 camera_metadata_entry entry1 =
1813 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1814 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1815
1816 int32_t forwardTransform1[entry1.count * 2];
1817
1818 size_t ctr = 0;
1819 for(size_t i = 0; i < entry1.count; ++i) {
1820 forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1821 forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1822 }
1823
1824 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1825 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1826
1827 if (!singleIlluminant) {
1828 camera_metadata_entry entry2 =
1829 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1830 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1831 int32_t forwardTransform2[entry2.count * 2];
1832
1833 ctr = 0;
1834 for(size_t i = 0; i < entry2.count; ++i) {
1835 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1836 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1837 }
1838
1839 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1840 forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer);
1841 }
1842 }
1843
1844 if (isBayer) {
1845 // Set camera neutral
1846 camera_metadata_entry entry =
1847 results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1848 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1849 uint32_t cameraNeutral[entry.count * 2];
1850
1851 size_t ctr = 0;
1852 for(size_t i = 0; i < entry.count; ++i) {
1853 cameraNeutral[ctr++] =
1854 static_cast<uint32_t>(entry.data.r[i].numerator);
1855 cameraNeutral[ctr++] =
1856 static_cast<uint32_t>(entry.data.r[i].denominator);
1857 }
1858
1859 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1860 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1861 }
1862
1863
1864 {
1865 // Set dimensions
1866 if (calculateAndSetCrop(env, characteristics, writer, isMaximumResolutionMode) != OK) {
1867 return nullptr;
1868 }
1869 camera_metadata_entry entry = characteristics.find(
1870 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1871 isMaximumResolutionMode));
1872 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1873 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1874 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1875 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1876 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1877
1878 // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1879 // relative to the pixel array.
1880 if (imageWidth == width && imageHeight == height) {
1881 xmin = 0;
1882 ymin = 0;
1883 }
1884
1885 uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1886 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1887 env, TAG_ACTIVEAREA, writer);
1888 }
1889
1890 {
1891 // Setup unique camera model tag
1892 std::string model = GetProperty("ro.product.model", "");
1893 std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1894 std::string brand = GetProperty("ro.product.brand", "");
1895
1896 String8 cameraModel(model.c_str());
1897 cameraModel += "-";
1898 cameraModel += manufacturer.c_str();
1899 cameraModel += "-";
1900 cameraModel += brand.c_str();
1901
1902 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1903 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env,
1904 TAG_UNIQUECAMERAMODEL, writer);
1905 }
1906
1907 {
1908 // Setup sensor noise model
1909 camera_metadata_entry entry =
1910 results.find(ANDROID_SENSOR_NOISE_PROFILE);
1911
1912 const status_t numPlaneColors = isBayer ? 3 : 1;
1913 const status_t numCfaChannels = isBayer ? 4 : 1;
1914
1915 uint8_t cfaOut[numCfaChannels];
1916 if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1917 jniThrowException(env, "java/lang/IllegalArgumentException",
1918 "Invalid CFA from camera characteristics");
1919 return nullptr;
1920 }
1921
1922 double noiseProfile[numPlaneColors * 2];
1923
1924 if (entry.count > 0) {
1925 if (entry.count != numCfaChannels * 2) {
1926 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1927 "in characteristics, no noise profile tag written...",
1928 __FUNCTION__, entry.count);
1929 } else {
1930 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1931 cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1932
1933 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1934 numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1935 writer);
1936 } else {
1937 ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1938 " tag written...", __FUNCTION__);
1939 }
1940 }
1941 } else {
1942 ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.",
1943 __FUNCTION__);
1944 }
1945 }
1946
1947 {
1948 // Set up opcode List 2
1949 OpcodeListBuilder builder;
1950 status_t err = OK;
1951
1952 // Set up lens shading map
1953 camera_metadata_entry entry1 =
1954 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1955
1956 uint32_t lsmWidth = 0;
1957 uint32_t lsmHeight = 0;
1958
1959 if (entry1.count != 0) {
1960 lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1961 lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1962 }
1963
1964 camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1965
1966 camera_metadata_entry entry = characteristics.find(
1967 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1968 isMaximumResolutionMode));
1969 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1970 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1971 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1972 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1973 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1974 if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1975 // GainMap rectangle is relative to the active area origin.
1976 err = builder.addGainMapsForMetadata(lsmWidth,
1977 lsmHeight,
1978 0,
1979 0,
1980 height,
1981 width,
1982 opcodeCfaLayout,
1983 entry2.data.f);
1984 if (err != OK) {
1985 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1986 jniThrowRuntimeException(env, "failed to add lens shading map.");
1987 return nullptr;
1988 }
1989 }
1990
1991 // Hot pixel map is specific to bayer camera per DNG spec.
1992 if (isBayer) {
1993 // Set up bad pixel correction list
1994 // We first check the capture result. If the hot pixel map is not
1995 // available, as a fallback, try the static characteristics.
1996 camera_metadata_entry entry3 = results.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1997 if (entry3.count == 0) {
1998 entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1999 }
2000
2001 if ((entry3.count % 2) != 0) {
2002 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
2003 __FUNCTION__);
2004 jniThrowRuntimeException(env, "failed to add hotpixel map.");
2005 return nullptr;
2006 }
2007
2008 // Adjust the bad pixel coordinates to be relative to the origin of the active area
2009 // DNG tag
2010 std::vector<uint32_t> v;
2011 for (size_t i = 0; i < entry3.count; i += 2) {
2012 int32_t x = entry3.data.i32[i];
2013 int32_t y = entry3.data.i32[i + 1];
2014 x -= static_cast<int32_t>(xmin);
2015 y -= static_cast<int32_t>(ymin);
2016 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
2017 static_cast<uint32_t>(y) >= height) {
2018 continue;
2019 }
2020 v.push_back(x);
2021 v.push_back(y);
2022 }
2023 const uint32_t* badPixels = &v[0];
2024 uint32_t badPixelCount = v.size();
2025
2026 if (badPixelCount > 0) {
2027 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
2028
2029 if (err != OK) {
2030 ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
2031 jniThrowRuntimeException(env, "failed to add hotpixel map.");
2032 return nullptr;
2033 }
2034 }
2035 }
2036
2037 if (builder.getCount() > 0) {
2038 size_t listSize = builder.getSize();
2039 uint8_t opcodeListBuf[listSize];
2040 err = builder.buildOpList(opcodeListBuf);
2041 if (err == OK) {
2042 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize,
2043 opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
2044 } else {
2045 ALOGE("%s: Could not build list of opcodes for lens shading map and bad pixel "
2046 "correction.", __FUNCTION__);
2047 jniThrowRuntimeException(env, "failed to construct opcode list for lens shading "
2048 "map and bad pixel correction");
2049 return nullptr;
2050 }
2051 }
2052 }
2053
2054 {
2055 // Set up opcode List 3
2056 OpcodeListBuilder builder;
2057 status_t err = OK;
2058
2059 // Set up rectilinear distortion correction
2060 std::array<float, 6> distortion = {1.f, 0.f, 0.f, 0.f, 0.f, 0.f};
2061 bool gotDistortion = false;
2062
2063 // The capture result would have the correct intrinsic calibration
2064 // regardless of the sensor pixel mode.
2065 camera_metadata_entry entry4 =
2066 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
2067
2068 if (entry4.count == 5) {
2069 float cx = entry4.data.f[/*c_x*/2];
2070 float cy = entry4.data.f[/*c_y*/3];
2071 // Assuming f_x = f_y, or at least close enough.
2072 // Also assuming s = 0, or at least close enough.
2073 float f = entry4.data.f[/*f_x*/0];
2074
2075 camera_metadata_entry entry3 =
2076 results.find(ANDROID_LENS_DISTORTION);
2077 if (entry3.count == 5) {
2078 gotDistortion = true;
2079
2080 // Scale the distortion coefficients to create a zoom in warpped image so that all
2081 // pixels are drawn within input image.
2082 for (size_t i = 0; i < entry3.count; i++) {
2083 distortion[i+1] = entry3.data.f[i];
2084 }
2085
2086 if (preWidth == imageWidth && preHeight == imageHeight) {
2087 normalizeLensDistortion(distortion, cx, cy, f, preWidth, preHeight);
2088 } else {
2089 // image size == pixel array size (contains optical black pixels)
2090 // cx/cy is defined in preCorrArray so adding the offset
2091 // Also changes default xmin/ymin so that pixels are only
2092 // sampled within preCorrection array
2093 normalizeLensDistortion(
2094 distortion, cx + preXMin, cy + preYMin, f, preWidth, preHeight,
2095 preXMin, preYMin);
2096 }
2097
2098 float m_x = std::fmaxf(preWidth - cx, cx);
2099 float m_y = std::fmaxf(preHeight - cy, cy);
2100 float m_sq = m_x*m_x + m_y*m_y;
2101 float m = sqrtf(m_sq); // distance to farthest corner from optical center
2102 float f_sq = f * f;
2103 // Conversion factors from Camera2 K factors for new LENS_DISTORTION field
2104 // to DNG spec.
2105 //
2106 // Camera2 / OpenCV assume distortion is applied in a space where focal length
2107 // is factored out, while DNG assumes a normalized space where the distance
2108 // from optical center to the farthest corner is 1.
2109 // Scale from camera2 to DNG spec accordingly.
2110 // distortion[0] is always 1 with the new LENS_DISTORTION field.
2111 const double convCoeff[5] = {
2112 m_sq / f_sq,
2113 pow(m_sq, 2) / pow(f_sq, 2),
2114 pow(m_sq, 3) / pow(f_sq, 3),
2115 m / f,
2116 m / f
2117 };
2118 for (size_t i = 0; i < entry3.count; i++) {
2119 distortion[i+1] *= convCoeff[i];
2120 }
2121 } else {
2122 entry3 = results.find(ANDROID_LENS_RADIAL_DISTORTION);
2123 if (entry3.count == 6) {
2124 gotDistortion = true;
2125 // Conversion factors from Camera2 K factors to DNG spec. K factors:
2126 //
2127 // Note: these are necessary because our unit system assumes a
2128 // normalized max radius of sqrt(2), whereas the DNG spec's
2129 // WarpRectilinear opcode assumes a normalized max radius of 1.
2130 // Thus, each K coefficient must include the domain scaling
2131 // factor (the DNG domain is scaled by sqrt(2) to emulate the
2132 // domain used by the Camera2 specification).
2133 const double convCoeff[6] = {
2134 sqrt(2),
2135 2 * sqrt(2),
2136 4 * sqrt(2),
2137 8 * sqrt(2),
2138 2,
2139 2
2140 };
2141 for (size_t i = 0; i < entry3.count; i++) {
2142 distortion[i] = entry3.data.f[i] * convCoeff[i];
2143 }
2144 }
2145 }
2146 if (gotDistortion) {
2147 err = builder.addWarpRectilinearForMetadata(
2148 distortion.data(), preWidth, preHeight, cx, cy);
2149 if (err != OK) {
2150 ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
2151 jniThrowRuntimeException(env, "failed to add distortion correction.");
2152 return nullptr;
2153 }
2154 }
2155 }
2156
2157 if (builder.getCount() > 0) {
2158 size_t listSize = builder.getSize();
2159 uint8_t opcodeListBuf[listSize];
2160 err = builder.buildOpList(opcodeListBuf);
2161 if (err == OK) {
2162 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize,
2163 opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
2164 } else {
2165 ALOGE("%s: Could not build list of opcodes for distortion correction.",
2166 __FUNCTION__);
2167 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
2168 " correction");
2169 return nullptr;
2170 }
2171 }
2172 }
2173
2174 {
2175 // Set up orientation tags.
2176 // Note: There's only one orientation field for the whole file, in IFD0
2177 // The main image and any thumbnails therefore have the same orientation.
2178 uint16_t orientation = nativeContext->getOrientation();
2179 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
2180 env, TAG_ORIENTATION, writer);
2181
2182 }
2183
2184 if (nativeContext->hasDescription()){
2185 // Set Description
2186 String8 description = nativeContext->getDescription();
2187 size_t len = description.bytes() + 1;
2188 if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
2189 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) {
2190 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
2191 "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
2192 }
2193 }
2194
2195 if (nativeContext->hasGpsData()) {
2196 // Set GPS tags
2197 GpsData gpsData = nativeContext->getGpsData();
2198 if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
2199 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
2200 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
2201 TIFF_IFD_0);
2202 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
2203 return nullptr;
2204 }
2205 }
2206
2207 {
2208 uint8_t version[] = {2, 3, 0, 0};
2209 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
2210 TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
2211 }
2212
2213 {
2214 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
2215 GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
2216 TAG_GPSLATITUDEREF, writer);
2217 }
2218
2219 {
2220 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
2221 GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
2222 TAG_GPSLONGITUDEREF, writer);
2223 }
2224
2225 {
2226 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
2227 TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
2228 }
2229
2230 {
2231 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
2232 TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
2233 }
2234
2235 {
2236 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
2237 TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
2238 }
2239
2240 {
2241 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
2242 GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
2243 TAG_GPSDATESTAMP, writer);
2244 }
2245 }
2246
2247
2248 if (nativeContext->hasThumbnail()) {
2249 if (!writer->hasIfd(TIFF_IFD_SUB1)) {
2250 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
2251 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
2252 TIFF_IFD_0);
2253 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
2254 return nullptr;
2255 }
2256 }
2257
2258 Vector<uint16_t> tagsToMove;
2259 tagsToMove.add(TAG_NEWSUBFILETYPE);
2260 tagsToMove.add(TAG_ACTIVEAREA);
2261 tagsToMove.add(TAG_BITSPERSAMPLE);
2262 tagsToMove.add(TAG_COMPRESSION);
2263 tagsToMove.add(TAG_IMAGEWIDTH);
2264 tagsToMove.add(TAG_IMAGELENGTH);
2265 tagsToMove.add(TAG_PHOTOMETRICINTERPRETATION);
2266 tagsToMove.add(TAG_BLACKLEVEL);
2267 tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
2268 tagsToMove.add(TAG_SAMPLESPERPIXEL);
2269 tagsToMove.add(TAG_PLANARCONFIGURATION);
2270 if (isBayer) {
2271 tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
2272 tagsToMove.add(TAG_CFAPATTERN);
2273 tagsToMove.add(TAG_CFAPLANECOLOR);
2274 tagsToMove.add(TAG_CFALAYOUT);
2275 }
2276 tagsToMove.add(TAG_XRESOLUTION);
2277 tagsToMove.add(TAG_YRESOLUTION);
2278 tagsToMove.add(TAG_RESOLUTIONUNIT);
2279 tagsToMove.add(TAG_WHITELEVEL);
2280 tagsToMove.add(TAG_DEFAULTSCALE);
2281 tagsToMove.add(TAG_DEFAULTCROPORIGIN);
2282 tagsToMove.add(TAG_DEFAULTCROPSIZE);
2283
2284 if (nullptr != writer->getEntry(TAG_OPCODELIST2, TIFF_IFD_0).get()) {
2285 tagsToMove.add(TAG_OPCODELIST2);
2286 }
2287
2288 if (nullptr != writer->getEntry(TAG_OPCODELIST3, TIFF_IFD_0).get()) {
2289 tagsToMove.add(TAG_OPCODELIST3);
2290 }
2291
2292 if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) {
2293 jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries");
2294 return nullptr;
2295 }
2296
2297 // Setup thumbnail tags
2298
2299 {
2300 // Set photometric interpretation
2301 uint16_t interpretation = 2; // RGB
2302 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
2303 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
2304 }
2305
2306 {
2307 // Set planar configuration
2308 uint16_t config = 1; // Chunky
2309 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
2310 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
2311 }
2312
2313 {
2314 // Set samples per pixel
2315 uint16_t samples = SAMPLES_PER_RGB_PIXEL;
2316 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
2317 TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer);
2318 }
2319
2320 {
2321 // Set bits per sample
2322 uint16_t bits[SAMPLES_PER_RGB_PIXEL];
2323 for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE;
2324 BAIL_IF_INVALID_RET_NULL_SP(
2325 writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_0),
2326 env, TAG_BITSPERSAMPLE, writer);
2327 }
2328
2329 {
2330 // Set subfiletype
2331 uint32_t subfileType = 1; // Thumbnail image
2332 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
2333 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
2334 }
2335
2336 {
2337 // Set compression
2338 uint16_t compression = 1; // None
2339 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
2340 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
2341 }
2342
2343 {
2344 // Set dimensions
2345 uint32_t uWidth = nativeContext->getThumbnailWidth();
2346 uint32_t uHeight = nativeContext->getThumbnailHeight();
2347 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0),
2348 env, TAG_IMAGEWIDTH, writer);
2349 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0),
2350 env, TAG_IMAGELENGTH, writer);
2351 }
2352
2353 {
2354 // x resolution
2355 uint32_t xres[] = { 72, 1 }; // default 72 ppi
2356 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
2357 env, TAG_XRESOLUTION, writer);
2358
2359 // y resolution
2360 uint32_t yres[] = { 72, 1 }; // default 72 ppi
2361 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
2362 env, TAG_YRESOLUTION, writer);
2363
2364 uint16_t unit = 2; // inches
2365 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
2366 env, TAG_RESOLUTIONUNIT, writer);
2367 }
2368 }
2369
2370 if (writer->addStrip(TIFF_IFD_0) != OK) {
2371 ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__);
2372 jniThrowException(env, "java/lang/IllegalStateException",
2373 "Failed to setup thumbnail strip tags.");
2374 return nullptr;
2375 }
2376
2377 if (writer->hasIfd(TIFF_IFD_SUB1)) {
2378 if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
2379 ALOGE("%s: Could not main image strip tags.", __FUNCTION__);
2380 jniThrowException(env, "java/lang/IllegalStateException",
2381 "Failed to setup main image strip tags.");
2382 return nullptr;
2383 }
2384 }
2385 return writer;
2386 }
2387
DngCreator_destroy(JNIEnv * env,jobject thiz)2388 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
2389 ALOGV("%s:", __FUNCTION__);
2390 DngCreator_setNativeContext(env, thiz, nullptr);
2391 }
2392
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)2393 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
2394 ALOGV("%s:", __FUNCTION__);
2395
2396 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2397 if (context == nullptr) {
2398 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2399 jniThrowException(env, "java/lang/AssertionError",
2400 "setOrientation called with uninitialized DngCreator");
2401 return;
2402 }
2403
2404 uint16_t orientation = static_cast<uint16_t>(orient);
2405 context->setOrientation(orientation);
2406 }
2407
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)2408 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
2409 ALOGV("%s:", __FUNCTION__);
2410
2411 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2412 if (context == nullptr) {
2413 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2414 jniThrowException(env, "java/lang/AssertionError",
2415 "setDescription called with uninitialized DngCreator");
2416 return;
2417 }
2418
2419 const char* desc = env->GetStringUTFChars(description, nullptr);
2420 context->setDescription(String8(desc));
2421 env->ReleaseStringUTFChars(description, desc);
2422 }
2423
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)2424 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
2425 jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
2426 ALOGV("%s:", __FUNCTION__);
2427
2428 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2429 if (context == nullptr) {
2430 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2431 jniThrowException(env, "java/lang/AssertionError",
2432 "setGpsTags called with uninitialized DngCreator");
2433 return;
2434 }
2435
2436 GpsData data;
2437
2438 jsize latLen = env->GetArrayLength(latTag);
2439 jsize longLen = env->GetArrayLength(longTag);
2440 jsize timeLen = env->GetArrayLength(timeTag);
2441 if (latLen != GpsData::GPS_VALUE_LENGTH) {
2442 jniThrowException(env, "java/lang/IllegalArgumentException",
2443 "invalid latitude tag length");
2444 return;
2445 } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
2446 jniThrowException(env, "java/lang/IllegalArgumentException",
2447 "invalid longitude tag length");
2448 return;
2449 } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2450 jniThrowException(env, "java/lang/IllegalArgumentException",
2451 "invalid time tag length");
2452 return;
2453 }
2454
2455 env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2456 reinterpret_cast<jint*>(&data.mLatitude));
2457 env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2458 reinterpret_cast<jint*>(&data.mLongitude));
2459 env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2460 reinterpret_cast<jint*>(&data.mTimestamp));
2461
2462
2463 env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2464 data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2465 env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2466 data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2467 env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2468 reinterpret_cast<char*>(&data.mDate));
2469 data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2470
2471 context->setGpsData(data);
2472 }
2473
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2474 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2475 jint height) {
2476 ALOGV("%s:", __FUNCTION__);
2477
2478 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2479 if (context == nullptr) {
2480 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2481 jniThrowException(env, "java/lang/AssertionError",
2482 "setThumbnail called with uninitialized DngCreator");
2483 return;
2484 }
2485
2486 size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2487 jlong capacity = env->GetDirectBufferCapacity(buffer);
2488 if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2489 jniThrowExceptionFmt(env, "java/lang/AssertionError",
2490 "Invalid size %d for thumbnail, expected size was %d",
2491 capacity, fullSize);
2492 return;
2493 }
2494
2495 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2496 if (pixelBytes == nullptr) {
2497 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2498 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2499 return;
2500 }
2501
2502 if (!context->setThumbnail(pixelBytes, width, height)) {
2503 jniThrowException(env, "java/lang/IllegalStateException",
2504 "Failed to set thumbnail.");
2505 return;
2506 }
2507 }
2508
2509 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2510 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2511 jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2512 jboolean isDirect) {
2513 ALOGV("%s:", __FUNCTION__);
2514 ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2515 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2516 height, rowStride, pixStride, offset);
2517 uint32_t rStride = static_cast<uint32_t>(rowStride);
2518 uint32_t pStride = static_cast<uint32_t>(pixStride);
2519 uint32_t uWidth = static_cast<uint32_t>(width);
2520 uint32_t uHeight = static_cast<uint32_t>(height);
2521 uint64_t uOffset = static_cast<uint64_t>(offset);
2522
2523 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2524 if(env->ExceptionCheck()) {
2525 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2526 return;
2527 }
2528
2529 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2530 if (context == nullptr) {
2531 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2532 jniThrowException(env, "java/lang/AssertionError",
2533 "Write called with uninitialized DngCreator");
2534 return;
2535 }
2536 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2537
2538 if (writer.get() == nullptr) {
2539 return;
2540 }
2541
2542 // Validate DNG size
2543 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2544 return;
2545 }
2546
2547 sp<JniInputByteBuffer> inBuf;
2548 Vector<StripSource*> sources;
2549 sp<DirectStripSource> thumbnailSource;
2550 uint32_t targetIfd = TIFF_IFD_0;
2551
2552 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2553
2554 if (hasThumbnail) {
2555 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2556 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2557 uint32_t thumbWidth = context->getThumbnailWidth();
2558 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2559 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2560 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2561 SAMPLES_PER_RGB_PIXEL);
2562 sources.add(thumbnailSource.get());
2563 targetIfd = TIFF_IFD_SUB1;
2564 }
2565
2566 if (isDirect) {
2567 size_t fullSize = rStride * uHeight;
2568 jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2569 if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2570 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2571 "Invalid size %d for Image, size given in metadata is %d at current stride",
2572 capacity, fullSize);
2573 return;
2574 }
2575
2576 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2577 if (pixelBytes == nullptr) {
2578 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2579 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2580 return;
2581 }
2582
2583 ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2584 DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2585 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2586 sources.add(&stripSource);
2587
2588 status_t ret = OK;
2589 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2590 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2591 if (!env->ExceptionCheck()) {
2592 jniThrowExceptionFmt(env, "java/io/IOException",
2593 "Encountered error %d while writing file.", ret);
2594 }
2595 return;
2596 }
2597 } else {
2598 inBuf = new JniInputByteBuffer(env, inBuffer);
2599
2600 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2601 InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2602 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2603 sources.add(&stripSource);
2604
2605 status_t ret = OK;
2606 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2607 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2608 if (!env->ExceptionCheck()) {
2609 jniThrowExceptionFmt(env, "java/io/IOException",
2610 "Encountered error %d while writing file.", ret);
2611 }
2612 return;
2613 }
2614 }
2615 }
2616
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2617 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2618 jobject inStream, jint width, jint height, jlong offset) {
2619 ALOGV("%s:", __FUNCTION__);
2620
2621 uint32_t rowStride = width * BYTES_PER_SAMPLE;
2622 uint32_t pixStride = BYTES_PER_SAMPLE;
2623 uint32_t uWidth = static_cast<uint32_t>(width);
2624 uint32_t uHeight = static_cast<uint32_t>(height);
2625 uint64_t uOffset = static_cast<uint32_t>(offset);
2626
2627 ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2628 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2629 height, rowStride, pixStride, offset);
2630
2631 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2632 if (env->ExceptionCheck()) {
2633 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2634 return;
2635 }
2636
2637 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2638 if (context == nullptr) {
2639 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2640 jniThrowException(env, "java/lang/AssertionError",
2641 "Write called with uninitialized DngCreator");
2642 return;
2643 }
2644 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2645
2646 if (writer.get() == nullptr) {
2647 return;
2648 }
2649
2650 // Validate DNG size
2651 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2652 return;
2653 }
2654
2655 sp<DirectStripSource> thumbnailSource;
2656 uint32_t targetIfd = TIFF_IFD_0;
2657 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2658 Vector<StripSource*> sources;
2659
2660 if (hasThumbnail) {
2661 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2662 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2663 uint32_t width = context->getThumbnailWidth();
2664 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2665 width, context->getThumbnailHeight(), bytesPerPixel,
2666 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2667 SAMPLES_PER_RGB_PIXEL);
2668 sources.add(thumbnailSource.get());
2669 targetIfd = TIFF_IFD_SUB1;
2670 }
2671
2672 sp<JniInputStream> in = new JniInputStream(env, inStream);
2673
2674 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2675 InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2676 rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2677 sources.add(&stripSource);
2678
2679 status_t ret = OK;
2680 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2681 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2682 if (!env->ExceptionCheck()) {
2683 jniThrowExceptionFmt(env, "java/io/IOException",
2684 "Encountered error %d while writing file.", ret);
2685 }
2686 return;
2687 }
2688 }
2689
2690 } /*extern "C" */
2691
2692 static const JNINativeMethod gDngCreatorMethods[] = {
2693 {"nativeClassInit", "()V", (void*) DngCreator_nativeClassInit},
2694 {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2695 "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2696 (void*) DngCreator_init},
2697 {"nativeDestroy", "()V", (void*) DngCreator_destroy},
2698 {"nativeSetOrientation", "(I)V", (void*) DngCreator_nativeSetOrientation},
2699 {"nativeSetDescription", "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
2700 {"nativeSetGpsTags", "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2701 (void*) DngCreator_nativeSetGpsTags},
2702 {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
2703 {"nativeWriteImage", "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2704 (void*) DngCreator_nativeWriteImage},
2705 {"nativeWriteInputStream", "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2706 (void*) DngCreator_nativeWriteInputStream},
2707 };
2708
register_android_hardware_camera2_DngCreator(JNIEnv * env)2709 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2710 return RegisterMethodsOrDie(env,
2711 "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2712 }
2713