1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 //#define LOG_NDEBUG 0 18 #define LOG_TAG "DngCreator_JNI" 19 #include <inttypes.h> 20 #include <string.h> 21 #include <algorithm> 22 #include <memory> 23 #include <vector> 24 25 #include <utils/Log.h> 26 #include <utils/Errors.h> 27 #include <utils/StrongPointer.h> 28 #include <utils/RefBase.h> 29 #include <utils/Vector.h> 30 #include <utils/String8.h> 31 #include <cutils/properties.h> 32 #include <system/camera_metadata.h> 33 #include <camera/CameraMetadata.h> 34 #include <img_utils/DngUtils.h> 35 #include <img_utils/TagDefinitions.h> 36 #include <img_utils/TiffIfd.h> 37 #include <img_utils/TiffWriter.h> 38 #include <img_utils/Output.h> 39 #include <img_utils/Input.h> 40 #include <img_utils/StripSource.h> 41 42 #include "core_jni_helpers.h" 43 44 #include "android_runtime/AndroidRuntime.h" 45 #include "android_runtime/android_hardware_camera2_CameraMetadata.h" 46 47 #include <jni.h> 48 #include <JNIHelp.h> 49 50 using namespace android; 51 using namespace img_utils; 52 53 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \ 54 if ((expr) != OK) { \ 55 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 56 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 57 return false; \ 58 } 59 60 61 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \ 62 if ((expr) != OK) { \ 63 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 64 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 65 return nullptr; \ 66 } 67 68 69 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \ 70 if ((expr) != OK) { \ 71 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 72 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 73 return -1; \ 74 } 75 76 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \ 77 if (entry.count == 0) { \ 78 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 79 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 80 return nullptr; \ 81 } 82 83 #define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \ 84 if (expr) { \ 85 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 86 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 87 return nullptr; \ 88 } 89 90 91 #define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext" 92 93 static struct { 94 jfieldID mNativeContext; 95 } gDngCreatorClassInfo; 96 97 static struct { 98 jmethodID mWriteMethod; 99 } gOutputStreamClassInfo; 100 101 static struct { 102 jmethodID mReadMethod; 103 jmethodID mSkipMethod; 104 } gInputStreamClassInfo; 105 106 static struct { 107 jmethodID mGetMethod; 108 } gInputByteBufferClassInfo; 109 110 enum { 111 BITS_PER_SAMPLE = 16, 112 BYTES_PER_SAMPLE = 2, 113 BYTES_PER_RGB_PIXEL = 3, 114 BITS_PER_RGB_SAMPLE = 8, 115 BYTES_PER_RGB_SAMPLE = 1, 116 SAMPLES_PER_RGB_PIXEL = 3, 117 SAMPLES_PER_RAW_PIXEL = 1, 118 TIFF_IFD_0 = 0, 119 TIFF_IFD_SUB1 = 1, 120 TIFF_IFD_GPSINFO = 2, 121 }; 122 123 124 /** 125 * POD container class for GPS tag data. 126 */ 127 class GpsData { 128 public: 129 enum { 130 GPS_VALUE_LENGTH = 6, 131 GPS_REF_LENGTH = 2, 132 GPS_DATE_LENGTH = 11, 133 }; 134 135 uint32_t mLatitude[GPS_VALUE_LENGTH]; 136 uint32_t mLongitude[GPS_VALUE_LENGTH]; 137 uint32_t mTimestamp[GPS_VALUE_LENGTH]; 138 uint8_t mLatitudeRef[GPS_REF_LENGTH]; 139 uint8_t mLongitudeRef[GPS_REF_LENGTH]; 140 uint8_t mDate[GPS_DATE_LENGTH]; 141 }; 142 143 // ---------------------------------------------------------------------------- 144 145 /** 146 * Container class for the persistent native context. 147 */ 148 149 class NativeContext : public LightRefBase<NativeContext> { 150 public: 151 enum { 152 DATETIME_COUNT = 20, 153 }; 154 155 NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result); 156 virtual ~NativeContext(); 157 158 TiffWriter* getWriter(); 159 160 std::shared_ptr<const CameraMetadata> getCharacteristics() const; 161 std::shared_ptr<const CameraMetadata> getResult() const; 162 163 uint32_t getThumbnailWidth() const; 164 uint32_t getThumbnailHeight() const; 165 const uint8_t* getThumbnail() const; 166 bool hasThumbnail() const; 167 168 bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height); 169 170 void setOrientation(uint16_t orientation); 171 uint16_t getOrientation() const; 172 173 void setDescription(const String8& desc); 174 String8 getDescription() const; 175 bool hasDescription() const; 176 177 void setGpsData(const GpsData& data); 178 GpsData getGpsData() const; 179 bool hasGpsData() const; 180 181 void setCaptureTime(const String8& formattedCaptureTime); 182 String8 getCaptureTime() const; 183 bool hasCaptureTime() const; 184 185 private: 186 Vector<uint8_t> mCurrentThumbnail; 187 TiffWriter mWriter; 188 std::shared_ptr<CameraMetadata> mCharacteristics; 189 std::shared_ptr<CameraMetadata> mResult; 190 uint32_t mThumbnailWidth; 191 uint32_t mThumbnailHeight; 192 uint16_t mOrientation; 193 bool mThumbnailSet; 194 bool mGpsSet; 195 bool mDescriptionSet; 196 bool mCaptureTimeSet; 197 String8 mDescription; 198 GpsData mGpsData; 199 String8 mFormattedCaptureTime; 200 }; 201 202 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) : 203 mCharacteristics(std::make_shared<CameraMetadata>(characteristics)), 204 mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0), 205 mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false), 206 mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {} 207 208 NativeContext::~NativeContext() {} 209 210 TiffWriter* NativeContext::getWriter() { 211 return &mWriter; 212 } 213 214 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const { 215 return mCharacteristics; 216 } 217 218 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const { 219 return mResult; 220 } 221 222 uint32_t NativeContext::getThumbnailWidth() const { 223 return mThumbnailWidth; 224 } 225 226 uint32_t NativeContext::getThumbnailHeight() const { 227 return mThumbnailHeight; 228 } 229 230 const uint8_t* NativeContext::getThumbnail() const { 231 return mCurrentThumbnail.array(); 232 } 233 234 bool NativeContext::hasThumbnail() const { 235 return mThumbnailSet; 236 } 237 238 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) { 239 mThumbnailWidth = width; 240 mThumbnailHeight = height; 241 242 size_t size = BYTES_PER_RGB_PIXEL * width * height; 243 if (mCurrentThumbnail.resize(size) < 0) { 244 ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__); 245 return false; 246 } 247 248 uint8_t* thumb = mCurrentThumbnail.editArray(); 249 memcpy(thumb, buffer, size); 250 mThumbnailSet = true; 251 return true; 252 } 253 254 void NativeContext::setOrientation(uint16_t orientation) { 255 mOrientation = orientation; 256 } 257 258 uint16_t NativeContext::getOrientation() const { 259 return mOrientation; 260 } 261 262 void NativeContext::setDescription(const String8& desc) { 263 mDescription = desc; 264 mDescriptionSet = true; 265 } 266 267 String8 NativeContext::getDescription() const { 268 return mDescription; 269 } 270 271 bool NativeContext::hasDescription() const { 272 return mDescriptionSet; 273 } 274 275 void NativeContext::setGpsData(const GpsData& data) { 276 mGpsData = data; 277 mGpsSet = true; 278 } 279 280 GpsData NativeContext::getGpsData() const { 281 return mGpsData; 282 } 283 284 bool NativeContext::hasGpsData() const { 285 return mGpsSet; 286 } 287 288 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) { 289 mFormattedCaptureTime = formattedCaptureTime; 290 mCaptureTimeSet = true; 291 } 292 293 String8 NativeContext::getCaptureTime() const { 294 return mFormattedCaptureTime; 295 } 296 297 bool NativeContext::hasCaptureTime() const { 298 return mCaptureTimeSet; 299 } 300 301 // End of NativeContext 302 // ---------------------------------------------------------------------------- 303 304 /** 305 * Wrapper class for a Java OutputStream. 306 * 307 * This class is not intended to be used across JNI calls. 308 */ 309 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> { 310 public: 311 JniOutputStream(JNIEnv* env, jobject outStream); 312 313 virtual ~JniOutputStream(); 314 315 status_t open(); 316 317 status_t write(const uint8_t* buf, size_t offset, size_t count); 318 319 status_t close(); 320 private: 321 enum { 322 BYTE_ARRAY_LENGTH = 4096 323 }; 324 jobject mOutputStream; 325 JNIEnv* mEnv; 326 jbyteArray mByteArray; 327 }; 328 329 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream), 330 mEnv(env) { 331 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH); 332 if (mByteArray == nullptr) { 333 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array."); 334 } 335 } 336 337 JniOutputStream::~JniOutputStream() { 338 mEnv->DeleteLocalRef(mByteArray); 339 } 340 341 status_t JniOutputStream::open() { 342 // Do nothing 343 return OK; 344 } 345 346 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) { 347 while(count > 0) { 348 size_t len = BYTE_ARRAY_LENGTH; 349 len = (count > len) ? len : count; 350 mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset)); 351 352 if (mEnv->ExceptionCheck()) { 353 return BAD_VALUE; 354 } 355 356 mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray, 357 0, len); 358 359 if (mEnv->ExceptionCheck()) { 360 return BAD_VALUE; 361 } 362 363 count -= len; 364 offset += len; 365 } 366 return OK; 367 } 368 369 status_t JniOutputStream::close() { 370 // Do nothing 371 return OK; 372 } 373 374 // End of JniOutputStream 375 // ---------------------------------------------------------------------------- 376 377 /** 378 * Wrapper class for a Java InputStream. 379 * 380 * This class is not intended to be used across JNI calls. 381 */ 382 class JniInputStream : public Input, public LightRefBase<JniInputStream> { 383 public: 384 JniInputStream(JNIEnv* env, jobject inStream); 385 386 status_t open(); 387 388 status_t close(); 389 390 ssize_t read(uint8_t* buf, size_t offset, size_t count); 391 392 ssize_t skip(size_t count); 393 394 virtual ~JniInputStream(); 395 private: 396 enum { 397 BYTE_ARRAY_LENGTH = 4096 398 }; 399 jobject mInStream; 400 JNIEnv* mEnv; 401 jbyteArray mByteArray; 402 403 }; 404 405 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) { 406 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH); 407 if (mByteArray == nullptr) { 408 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array."); 409 } 410 } 411 412 JniInputStream::~JniInputStream() { 413 mEnv->DeleteLocalRef(mByteArray); 414 } 415 416 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) { 417 418 jint realCount = BYTE_ARRAY_LENGTH; 419 if (count < BYTE_ARRAY_LENGTH) { 420 realCount = count; 421 } 422 jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0, 423 realCount); 424 425 if (actual < 0) { 426 return NOT_ENOUGH_DATA; 427 } 428 429 if (mEnv->ExceptionCheck()) { 430 return BAD_VALUE; 431 } 432 433 mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset)); 434 if (mEnv->ExceptionCheck()) { 435 return BAD_VALUE; 436 } 437 return actual; 438 } 439 440 ssize_t JniInputStream::skip(size_t count) { 441 jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod, 442 static_cast<jlong>(count)); 443 444 if (mEnv->ExceptionCheck()) { 445 return BAD_VALUE; 446 } 447 if (actual < 0) { 448 return NOT_ENOUGH_DATA; 449 } 450 return actual; 451 } 452 453 status_t JniInputStream::open() { 454 // Do nothing 455 return OK; 456 } 457 458 status_t JniInputStream::close() { 459 // Do nothing 460 return OK; 461 } 462 463 // End of JniInputStream 464 // ---------------------------------------------------------------------------- 465 466 /** 467 * Wrapper class for a non-direct Java ByteBuffer. 468 * 469 * This class is not intended to be used across JNI calls. 470 */ 471 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> { 472 public: 473 JniInputByteBuffer(JNIEnv* env, jobject inBuf); 474 475 status_t open(); 476 477 status_t close(); 478 479 ssize_t read(uint8_t* buf, size_t offset, size_t count); 480 481 virtual ~JniInputByteBuffer(); 482 private: 483 enum { 484 BYTE_ARRAY_LENGTH = 4096 485 }; 486 jobject mInBuf; 487 JNIEnv* mEnv; 488 jbyteArray mByteArray; 489 }; 490 491 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) { 492 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH); 493 if (mByteArray == nullptr) { 494 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array."); 495 } 496 } 497 498 JniInputByteBuffer::~JniInputByteBuffer() { 499 mEnv->DeleteLocalRef(mByteArray); 500 } 501 502 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) { 503 jint realCount = BYTE_ARRAY_LENGTH; 504 if (count < BYTE_ARRAY_LENGTH) { 505 realCount = count; 506 } 507 508 jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod, 509 mByteArray, 0, realCount); 510 mEnv->DeleteLocalRef(chainingBuf); 511 512 if (mEnv->ExceptionCheck()) { 513 ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__); 514 return BAD_VALUE; 515 } 516 517 mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset)); 518 if (mEnv->ExceptionCheck()) { 519 ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__); 520 return BAD_VALUE; 521 } 522 return realCount; 523 } 524 525 status_t JniInputByteBuffer::open() { 526 // Do nothing 527 return OK; 528 } 529 530 status_t JniInputByteBuffer::close() { 531 // Do nothing 532 return OK; 533 } 534 535 // End of JniInputByteBuffer 536 // ---------------------------------------------------------------------------- 537 538 /** 539 * StripSource subclass for Input types. 540 * 541 * This class is not intended to be used across JNI calls. 542 */ 543 544 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> { 545 public: 546 InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height, 547 uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample, 548 uint32_t samplesPerPixel); 549 550 virtual ~InputStripSource(); 551 552 virtual status_t writeToStream(Output& stream, uint32_t count); 553 554 virtual uint32_t getIfd() const; 555 protected: 556 uint32_t mIfd; 557 Input* mInput; 558 uint32_t mWidth; 559 uint32_t mHeight; 560 uint32_t mPixStride; 561 uint32_t mRowStride; 562 uint64_t mOffset; 563 JNIEnv* mEnv; 564 uint32_t mBytesPerSample; 565 uint32_t mSamplesPerPixel; 566 }; 567 568 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, 569 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset, 570 uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input), 571 mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride), 572 mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample), 573 mSamplesPerPixel(samplesPerPixel) {} 574 575 InputStripSource::~InputStripSource() {} 576 577 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) { 578 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel; 579 jlong offset = mOffset; 580 581 if (fullSize != count) { 582 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count, 583 fullSize); 584 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write"); 585 return BAD_VALUE; 586 } 587 588 // Skip offset 589 while (offset > 0) { 590 ssize_t skipped = mInput->skip(offset); 591 if (skipped <= 0) { 592 if (skipped == NOT_ENOUGH_DATA || skipped == 0) { 593 jniThrowExceptionFmt(mEnv, "java/io/IOException", 594 "Early EOF encountered in skip, not enough pixel data for image of size %u", 595 fullSize); 596 skipped = NOT_ENOUGH_DATA; 597 } else { 598 if (!mEnv->ExceptionCheck()) { 599 jniThrowException(mEnv, "java/io/IOException", 600 "Error encountered while skip bytes in input stream."); 601 } 602 } 603 604 return skipped; 605 } 606 offset -= skipped; 607 } 608 609 Vector<uint8_t> row; 610 if (row.resize(mRowStride) < 0) { 611 jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector."); 612 return BAD_VALUE; 613 } 614 615 uint8_t* rowBytes = row.editArray(); 616 617 for (uint32_t i = 0; i < mHeight; ++i) { 618 size_t rowFillAmt = 0; 619 size_t rowSize = mRowStride; 620 621 while (rowFillAmt < mRowStride) { 622 ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize); 623 if (bytesRead <= 0) { 624 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) { 625 ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd", 626 __FUNCTION__, i, bytesRead); 627 jniThrowExceptionFmt(mEnv, "java/io/IOException", 628 "Early EOF encountered, not enough pixel data for image of size %" 629 PRIu32, fullSize); 630 bytesRead = NOT_ENOUGH_DATA; 631 } else { 632 if (!mEnv->ExceptionCheck()) { 633 jniThrowException(mEnv, "java/io/IOException", 634 "Error encountered while reading"); 635 } 636 } 637 return bytesRead; 638 } 639 rowFillAmt += bytesRead; 640 rowSize -= bytesRead; 641 } 642 643 if (mPixStride == mBytesPerSample * mSamplesPerPixel) { 644 ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__); 645 646 if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK || 647 mEnv->ExceptionCheck()) { 648 if (!mEnv->ExceptionCheck()) { 649 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); 650 } 651 return BAD_VALUE; 652 } 653 } else { 654 ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__); 655 jniThrowException(mEnv, "java/lang/IllegalStateException", 656 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous"); 657 return BAD_VALUE; 658 659 // TODO: Add support for non-contiguous pixels if needed. 660 } 661 } 662 return OK; 663 } 664 665 uint32_t InputStripSource::getIfd() const { 666 return mIfd; 667 } 668 669 // End of InputStripSource 670 // ---------------------------------------------------------------------------- 671 672 /** 673 * StripSource subclass for direct buffer types. 674 * 675 * This class is not intended to be used across JNI calls. 676 */ 677 678 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> { 679 public: 680 DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width, 681 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset, 682 uint32_t bytesPerSample, uint32_t samplesPerPixel); 683 684 virtual ~DirectStripSource(); 685 686 virtual status_t writeToStream(Output& stream, uint32_t count); 687 688 virtual uint32_t getIfd() const; 689 protected: 690 uint32_t mIfd; 691 const uint8_t* mPixelBytes; 692 uint32_t mWidth; 693 uint32_t mHeight; 694 uint32_t mPixStride; 695 uint32_t mRowStride; 696 uint16_t mOffset; 697 JNIEnv* mEnv; 698 uint32_t mBytesPerSample; 699 uint32_t mSamplesPerPixel; 700 }; 701 702 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, 703 uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride, 704 uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), 705 mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride), 706 mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample), 707 mSamplesPerPixel(samplesPerPixel) {} 708 709 DirectStripSource::~DirectStripSource() {} 710 711 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) { 712 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel; 713 714 if (fullSize != count) { 715 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count, 716 fullSize); 717 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write"); 718 return BAD_VALUE; 719 } 720 721 722 if (mPixStride == mBytesPerSample * mSamplesPerPixel 723 && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) { 724 ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__); 725 726 if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) { 727 if (!mEnv->ExceptionCheck()) { 728 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); 729 } 730 return BAD_VALUE; 731 } 732 } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) { 733 ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__); 734 735 for (size_t i = 0; i < mHeight; ++i) { 736 if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK || 737 mEnv->ExceptionCheck()) { 738 if (!mEnv->ExceptionCheck()) { 739 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); 740 } 741 return BAD_VALUE; 742 } 743 } 744 } else { 745 ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__); 746 747 jniThrowException(mEnv, "java/lang/IllegalStateException", 748 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous"); 749 return BAD_VALUE; 750 751 // TODO: Add support for non-contiguous pixels if needed. 752 } 753 return OK; 754 755 } 756 757 uint32_t DirectStripSource::getIfd() const { 758 return mIfd; 759 } 760 761 // End of DirectStripSource 762 // ---------------------------------------------------------------------------- 763 764 /** 765 * Calculate the default crop relative to the "active area" of the image sensor (this active area 766 * will always be the pre-correction active area rectangle), and set this. 767 */ 768 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics, 769 sp<TiffWriter> writer) { 770 771 camera_metadata_ro_entry entry = 772 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 773 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]); 774 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]); 775 776 const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation. 777 778 if (width < margin * 2 || height < margin * 2) { 779 ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too" 780 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width); 781 jniThrowException(env, "java/lang/IllegalStateException", 782 "Pre-correction active area is too small."); 783 return BAD_VALUE; 784 } 785 786 uint32_t defaultCropOrigin[] = {margin, margin}; 787 uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin, 788 height - defaultCropOrigin[1] - margin}; 789 790 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin, 791 TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer); 792 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize, 793 TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer); 794 795 return OK; 796 } 797 798 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer, 799 const CameraMetadata& characteristics, jint width, jint height) { 800 if (width <= 0) { 801 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \ 802 "Image width %d is invalid", width); 803 return false; 804 } 805 806 if (height <= 0) { 807 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \ 808 "Image height %d is invalid", height); 809 return false; 810 } 811 812 camera_metadata_ro_entry preCorrectionEntry = 813 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 814 camera_metadata_ro_entry pixelArrayEntry = 815 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE); 816 817 int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]); 818 int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]); 819 int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]); 820 int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]); 821 822 bool matchesPixelArray = (pWidth == width && pHeight == height); 823 bool matchesPreCorrectionArray = (cWidth == width && cHeight == height); 824 825 if (!(matchesPixelArray || matchesPreCorrectionArray)) { 826 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \ 827 "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel " 828 "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)", 829 width, height, pWidth, pHeight, cWidth, cHeight); 830 return false; 831 } 832 833 return true; 834 } 835 836 static status_t moveEntries(sp<TiffWriter> writer, uint32_t ifdFrom, uint32_t ifdTo, 837 const Vector<uint16_t>& entries) { 838 for (size_t i = 0; i < entries.size(); ++i) { 839 uint16_t tagId = entries[i]; 840 sp<TiffEntry> entry = writer->getEntry(tagId, ifdFrom); 841 if (entry.get() == nullptr) { 842 ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId, 843 ifdFrom); 844 return BAD_VALUE; 845 } 846 if (writer->addEntry(entry, ifdTo) != OK) { 847 ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId, 848 ifdFrom); 849 return BAD_VALUE; 850 } 851 writer->removeEntry(tagId, ifdFrom); 852 } 853 return OK; 854 } 855 856 /** 857 * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4. 858 * Returns OK on success, or a negative error code if the CFA enum was invalid. 859 */ 860 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) { 861 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa = 862 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>( 863 cfaEnum); 864 switch(cfa) { 865 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: { 866 cfaOut[0] = 0; 867 cfaOut[1] = 1; 868 cfaOut[2] = 1; 869 cfaOut[3] = 2; 870 break; 871 } 872 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: { 873 cfaOut[0] = 1; 874 cfaOut[1] = 0; 875 cfaOut[2] = 2; 876 cfaOut[3] = 1; 877 break; 878 } 879 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: { 880 cfaOut[0] = 1; 881 cfaOut[1] = 2; 882 cfaOut[2] = 0; 883 cfaOut[3] = 1; 884 break; 885 } 886 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: { 887 cfaOut[0] = 2; 888 cfaOut[1] = 1; 889 cfaOut[2] = 1; 890 cfaOut[3] = 0; 891 break; 892 } 893 default: { 894 return BAD_VALUE; 895 } 896 } 897 return OK; 898 } 899 900 /** 901 * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to 902 * RGGB for an unknown enum. 903 */ 904 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) { 905 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa = 906 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>( 907 cfaEnum); 908 switch(cfa) { 909 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: { 910 return OpcodeListBuilder::CFA_RGGB; 911 } 912 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: { 913 return OpcodeListBuilder::CFA_GRBG; 914 } 915 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: { 916 return OpcodeListBuilder::CFA_GBRG; 917 } 918 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: { 919 return OpcodeListBuilder::CFA_BGGR; 920 } 921 default: { 922 return OpcodeListBuilder::CFA_RGGB; 923 } 924 } 925 } 926 927 /** 928 * For each color plane, find the corresponding noise profile coefficients given in the 929 * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color 930 * plane, this method takes the pair of noise profile coefficients with the higher S coefficient. 931 * 932 * perChannelNoiseProfile - numChannels * 2 noise profile coefficients. 933 * cfa - numChannels color channels corresponding to each of the per-channel noise profile 934 * coefficients. 935 * numChannels - the number of noise profile coefficient pairs and color channels given in 936 * the perChannelNoiseProfile and cfa arguments, respectively. 937 * planeColors - the color planes in the noise profile output. 938 * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile. 939 * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients. 940 * 941 * returns OK, or a negative error code on failure. 942 */ 943 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa, 944 size_t numChannels, const uint8_t* planeColors, size_t numPlanes, 945 /*out*/double* noiseProfile) { 946 947 for (size_t p = 0; p < numPlanes; ++p) { 948 size_t S = p * 2; 949 size_t O = p * 2 + 1; 950 951 noiseProfile[S] = 0; 952 noiseProfile[O] = 0; 953 bool uninitialized = true; 954 for (size_t c = 0; c < numChannels; ++c) { 955 if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) { 956 noiseProfile[S] = perChannelNoiseProfile[c * 2]; 957 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1]; 958 uninitialized = false; 959 } 960 } 961 if (uninitialized) { 962 ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu", 963 __FUNCTION__, p); 964 return BAD_VALUE; 965 } 966 } 967 return OK; 968 } 969 970 // ---------------------------------------------------------------------------- 971 extern "C" { 972 973 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) { 974 ALOGV("%s:", __FUNCTION__); 975 return reinterpret_cast<NativeContext*>(env->GetLongField(thiz, 976 gDngCreatorClassInfo.mNativeContext)); 977 } 978 979 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) { 980 ALOGV("%s:", __FUNCTION__); 981 NativeContext* current = DngCreator_getNativeContext(env, thiz); 982 983 if (context != nullptr) { 984 context->incStrong((void*) DngCreator_setNativeContext); 985 } 986 987 if (current) { 988 current->decStrong((void*) DngCreator_setNativeContext); 989 } 990 991 env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext, 992 reinterpret_cast<jlong>(context.get())); 993 } 994 995 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) { 996 ALOGV("%s:", __FUNCTION__); 997 998 gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env, 999 clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J"); 1000 1001 jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream"); 1002 gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env, 1003 outputStreamClazz, "write", "([BII)V"); 1004 1005 jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream"); 1006 gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I"); 1007 gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J"); 1008 1009 jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer"); 1010 gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env, 1011 inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;"); 1012 } 1013 1014 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr, 1015 jobject resultsPtr, jstring formattedCaptureTime) { 1016 ALOGV("%s:", __FUNCTION__); 1017 CameraMetadata characteristics; 1018 CameraMetadata results; 1019 if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) { 1020 jniThrowException(env, "java/lang/AssertionError", 1021 "No native metadata defined for camera characteristics."); 1022 return; 1023 } 1024 if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) { 1025 jniThrowException(env, "java/lang/AssertionError", 1026 "No native metadata defined for capture results."); 1027 return; 1028 } 1029 1030 sp<NativeContext> nativeContext = new NativeContext(characteristics, results); 1031 1032 const char* captureTime = env->GetStringUTFChars(formattedCaptureTime, nullptr); 1033 1034 size_t len = strlen(captureTime) + 1; 1035 if (len != NativeContext::DATETIME_COUNT) { 1036 jniThrowException(env, "java/lang/IllegalArgumentException", 1037 "Formatted capture time string length is not required 20 characters"); 1038 return; 1039 } 1040 1041 nativeContext->setCaptureTime(String8(captureTime)); 1042 1043 DngCreator_setNativeContext(env, thiz, nativeContext); 1044 } 1045 1046 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth, 1047 uint32_t imageHeight) { 1048 1049 NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz); 1050 1051 if (nativeContext == nullptr) { 1052 jniThrowException(env, "java/lang/AssertionError", 1053 "No native context, must call init before other operations."); 1054 return nullptr; 1055 } 1056 1057 CameraMetadata characteristics = *(nativeContext->getCharacteristics()); 1058 CameraMetadata results = *(nativeContext->getResult()); 1059 1060 sp<TiffWriter> writer = new TiffWriter(); 1061 1062 uint32_t preWidth = 0; 1063 uint32_t preHeight = 0; 1064 { 1065 // Check dimensions 1066 camera_metadata_entry entry = 1067 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 1068 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer); 1069 preWidth = static_cast<uint32_t>(entry.data.i32[2]); 1070 preHeight = static_cast<uint32_t>(entry.data.i32[3]); 1071 1072 camera_metadata_entry pixelArrayEntry = 1073 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE); 1074 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]); 1075 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]); 1076 1077 if (!((imageWidth == preWidth && imageHeight == preHeight) || 1078 (imageWidth == pixWidth && imageHeight == pixHeight))) { 1079 jniThrowException(env, "java/lang/AssertionError", 1080 "Height and width of imate buffer did not match height and width of" 1081 "either the preCorrectionActiveArraySize or the pixelArraySize."); 1082 return nullptr; 1083 } 1084 } 1085 1086 1087 1088 writer->addIfd(TIFF_IFD_0); 1089 1090 status_t err = OK; 1091 1092 const uint32_t samplesPerPixel = 1; 1093 const uint32_t bitsPerSample = BITS_PER_SAMPLE; 1094 1095 OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB; 1096 uint8_t cfaPlaneColor[3] = {0, 1, 2}; 1097 uint8_t cfaEnum = -1; 1098 1099 // TODO: Greensplit. 1100 // TODO: Add remaining non-essential tags 1101 1102 // Setup main image tags 1103 1104 { 1105 // Set orientation 1106 uint16_t orientation = TAG_ORIENTATION_NORMAL; 1107 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0), 1108 env, TAG_ORIENTATION, writer); 1109 } 1110 1111 { 1112 // Set subfiletype 1113 uint32_t subfileType = 0; // Main image 1114 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType, 1115 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer); 1116 } 1117 1118 { 1119 // Set bits per sample 1120 uint16_t bits = static_cast<uint16_t>(bitsPerSample); 1121 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env, 1122 TAG_BITSPERSAMPLE, writer); 1123 } 1124 1125 { 1126 // Set compression 1127 uint16_t compression = 1; // None 1128 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression, 1129 TIFF_IFD_0), env, TAG_COMPRESSION, writer); 1130 } 1131 1132 { 1133 // Set dimensions 1134 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0), 1135 env, TAG_IMAGEWIDTH, writer); 1136 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0), 1137 env, TAG_IMAGELENGTH, writer); 1138 } 1139 1140 { 1141 // Set photometric interpretation 1142 uint16_t interpretation = 32803; // CFA 1143 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1, 1144 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer); 1145 } 1146 1147 { 1148 // Set blacklevel tags, using dynamic black level if available 1149 camera_metadata_entry entry = 1150 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL); 1151 uint32_t blackLevelRational[8] = {0}; 1152 if (entry.count != 0) { 1153 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer); 1154 for (size_t i = 0; i < entry.count; i++) { 1155 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.f[i] * 100); 1156 blackLevelRational[i * 2 + 1] = 100; 1157 } 1158 } else { 1159 // Fall back to static black level which is guaranteed 1160 entry = characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN); 1161 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer); 1162 for (size_t i = 0; i < entry.count; i++) { 1163 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]); 1164 blackLevelRational[i * 2 + 1] = 1; 1165 } 1166 1167 } 1168 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, 4, blackLevelRational, 1169 TIFF_IFD_0), env, TAG_BLACKLEVEL, writer); 1170 1171 uint16_t repeatDim[2] = {2, 2}; 1172 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim, 1173 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer); 1174 } 1175 1176 { 1177 // Set samples per pixel 1178 uint16_t samples = static_cast<uint16_t>(samplesPerPixel); 1179 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0), 1180 env, TAG_SAMPLESPERPIXEL, writer); 1181 } 1182 1183 { 1184 // Set planar configuration 1185 uint16_t config = 1; // Chunky 1186 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config, 1187 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer); 1188 } 1189 1190 { 1191 // Set CFA pattern dimensions 1192 uint16_t repeatDim[2] = {2, 2}; 1193 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim, 1194 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer); 1195 } 1196 1197 { 1198 // Set CFA pattern 1199 camera_metadata_entry entry = 1200 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT); 1201 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer); 1202 1203 const int cfaLength = 4; 1204 cfaEnum = entry.data.u8[0]; 1205 uint8_t cfa[cfaLength]; 1206 if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) { 1207 jniThrowExceptionFmt(env, "java/lang/IllegalStateException", 1208 "Invalid metadata for tag %d", TAG_CFAPATTERN); 1209 } 1210 1211 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0), 1212 env, TAG_CFAPATTERN, writer); 1213 1214 opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum); 1215 } 1216 1217 { 1218 // Set CFA plane color 1219 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor, 1220 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer); 1221 } 1222 1223 { 1224 // Set CFA layout 1225 uint16_t cfaLayout = 1; 1226 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0), 1227 env, TAG_CFALAYOUT, writer); 1228 } 1229 1230 { 1231 // image description 1232 uint8_t imageDescription = '\0'; // empty 1233 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription, 1234 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer); 1235 } 1236 1237 { 1238 // make 1239 char manufacturer[PROPERTY_VALUE_MAX]; 1240 1241 // Use "" to represent unknown make as suggested in TIFF/EP spec. 1242 property_get("ro.product.manufacturer", manufacturer, ""); 1243 uint32_t count = static_cast<uint32_t>(strlen(manufacturer)) + 1; 1244 1245 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count, 1246 reinterpret_cast<uint8_t*>(manufacturer), TIFF_IFD_0), env, TAG_MAKE, writer); 1247 } 1248 1249 { 1250 // model 1251 char model[PROPERTY_VALUE_MAX]; 1252 1253 // Use "" to represent unknown model as suggested in TIFF/EP spec. 1254 property_get("ro.product.model", model, ""); 1255 uint32_t count = static_cast<uint32_t>(strlen(model)) + 1; 1256 1257 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count, 1258 reinterpret_cast<uint8_t*>(model), TIFF_IFD_0), env, TAG_MODEL, writer); 1259 } 1260 1261 { 1262 // x resolution 1263 uint32_t xres[] = { 72, 1 }; // default 72 ppi 1264 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0), 1265 env, TAG_XRESOLUTION, writer); 1266 1267 // y resolution 1268 uint32_t yres[] = { 72, 1 }; // default 72 ppi 1269 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0), 1270 env, TAG_YRESOLUTION, writer); 1271 1272 uint16_t unit = 2; // inches 1273 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0), 1274 env, TAG_RESOLUTIONUNIT, writer); 1275 } 1276 1277 { 1278 // software 1279 char software[PROPERTY_VALUE_MAX]; 1280 property_get("ro.build.fingerprint", software, ""); 1281 uint32_t count = static_cast<uint32_t>(strlen(software)) + 1; 1282 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count, 1283 reinterpret_cast<uint8_t*>(software), TIFF_IFD_0), env, TAG_SOFTWARE, writer); 1284 } 1285 1286 if (nativeContext->hasCaptureTime()) { 1287 // datetime 1288 String8 captureTime = nativeContext->getCaptureTime(); 1289 1290 if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT, 1291 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) { 1292 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", 1293 "Invalid metadata for tag %x", TAG_DATETIME); 1294 return nullptr; 1295 } 1296 1297 // datetime original 1298 if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT, 1299 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) { 1300 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", 1301 "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL); 1302 return nullptr; 1303 } 1304 } 1305 1306 { 1307 // TIFF/EP standard id 1308 uint8_t standardId[] = { 1, 0, 0, 0 }; 1309 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId, 1310 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer); 1311 } 1312 1313 { 1314 // copyright 1315 uint8_t copyright = '\0'; // empty 1316 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right, 1317 TIFF_IFD_0), env, TAG_COPYRIGHT, writer); 1318 } 1319 1320 { 1321 // exposure time 1322 camera_metadata_entry entry = 1323 results.find(ANDROID_SENSOR_EXPOSURE_TIME); 1324 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer); 1325 1326 int64_t exposureTime = *(entry.data.i64); 1327 1328 if (exposureTime < 0) { 1329 // Should be unreachable 1330 jniThrowException(env, "java/lang/IllegalArgumentException", 1331 "Negative exposure time in metadata"); 1332 return nullptr; 1333 } 1334 1335 // Ensure exposure time doesn't overflow (for exposures > 4s) 1336 uint32_t denominator = 1000000000; 1337 while (exposureTime > UINT32_MAX) { 1338 exposureTime >>= 1; 1339 denominator >>= 1; 1340 if (denominator == 0) { 1341 // Should be unreachable 1342 jniThrowException(env, "java/lang/IllegalArgumentException", 1343 "Exposure time too long"); 1344 return nullptr; 1345 } 1346 } 1347 1348 uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator }; 1349 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure, 1350 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer); 1351 1352 } 1353 1354 { 1355 // ISO speed ratings 1356 camera_metadata_entry entry = 1357 results.find(ANDROID_SENSOR_SENSITIVITY); 1358 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer); 1359 1360 int32_t tempIso = *(entry.data.i32); 1361 if (tempIso < 0) { 1362 jniThrowException(env, "java/lang/IllegalArgumentException", 1363 "Negative ISO value"); 1364 return nullptr; 1365 } 1366 1367 if (tempIso > UINT16_MAX) { 1368 ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__); 1369 tempIso = UINT16_MAX; 1370 } 1371 1372 uint16_t iso = static_cast<uint16_t>(tempIso); 1373 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso, 1374 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer); 1375 } 1376 1377 { 1378 // focal length 1379 camera_metadata_entry entry = 1380 results.find(ANDROID_LENS_FOCAL_LENGTH); 1381 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer); 1382 1383 uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 }; 1384 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength, 1385 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer); 1386 } 1387 1388 { 1389 // f number 1390 camera_metadata_entry entry = 1391 results.find(ANDROID_LENS_APERTURE); 1392 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer); 1393 1394 uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 }; 1395 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum, 1396 TIFF_IFD_0), env, TAG_FNUMBER, writer); 1397 } 1398 1399 { 1400 // Set DNG version information 1401 uint8_t version[4] = {1, 4, 0, 0}; 1402 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0), 1403 env, TAG_DNGVERSION, writer); 1404 1405 uint8_t backwardVersion[4] = {1, 1, 0, 0}; 1406 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion, 1407 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer); 1408 } 1409 1410 { 1411 // Set whitelevel 1412 camera_metadata_entry entry = 1413 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL); 1414 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer); 1415 uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]); 1416 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0), 1417 env, TAG_WHITELEVEL, writer); 1418 } 1419 1420 { 1421 // Set default scale 1422 uint32_t defaultScale[4] = {1, 1, 1, 1}; 1423 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale, 1424 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer); 1425 } 1426 1427 bool singleIlluminant = false; 1428 { 1429 // Set calibration illuminants 1430 camera_metadata_entry entry1 = 1431 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1); 1432 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer); 1433 camera_metadata_entry entry2 = 1434 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2); 1435 if (entry2.count == 0) { 1436 singleIlluminant = true; 1437 } 1438 uint16_t ref1 = entry1.data.u8[0]; 1439 1440 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1, 1441 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer); 1442 1443 if (!singleIlluminant) { 1444 uint16_t ref2 = entry2.data.u8[0]; 1445 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2, 1446 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer); 1447 } 1448 } 1449 1450 { 1451 // Set color transforms 1452 camera_metadata_entry entry1 = 1453 characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1); 1454 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer); 1455 1456 int32_t colorTransform1[entry1.count * 2]; 1457 1458 size_t ctr = 0; 1459 for(size_t i = 0; i < entry1.count; ++i) { 1460 colorTransform1[ctr++] = entry1.data.r[i].numerator; 1461 colorTransform1[ctr++] = entry1.data.r[i].denominator; 1462 } 1463 1464 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count, 1465 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer); 1466 1467 if (!singleIlluminant) { 1468 camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2); 1469 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer); 1470 int32_t colorTransform2[entry2.count * 2]; 1471 1472 ctr = 0; 1473 for(size_t i = 0; i < entry2.count; ++i) { 1474 colorTransform2[ctr++] = entry2.data.r[i].numerator; 1475 colorTransform2[ctr++] = entry2.data.r[i].denominator; 1476 } 1477 1478 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count, 1479 colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer); 1480 } 1481 } 1482 1483 { 1484 // Set calibration transforms 1485 camera_metadata_entry entry1 = 1486 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1); 1487 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer); 1488 1489 int32_t calibrationTransform1[entry1.count * 2]; 1490 1491 size_t ctr = 0; 1492 for(size_t i = 0; i < entry1.count; ++i) { 1493 calibrationTransform1[ctr++] = entry1.data.r[i].numerator; 1494 calibrationTransform1[ctr++] = entry1.data.r[i].denominator; 1495 } 1496 1497 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count, 1498 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer); 1499 1500 if (!singleIlluminant) { 1501 camera_metadata_entry entry2 = 1502 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2); 1503 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer); 1504 int32_t calibrationTransform2[entry2.count * 2]; 1505 1506 ctr = 0; 1507 for(size_t i = 0; i < entry2.count; ++i) { 1508 calibrationTransform2[ctr++] = entry2.data.r[i].numerator; 1509 calibrationTransform2[ctr++] = entry2.data.r[i].denominator; 1510 } 1511 1512 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count, 1513 calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer); 1514 } 1515 } 1516 1517 { 1518 // Set forward transforms 1519 camera_metadata_entry entry1 = 1520 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1); 1521 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer); 1522 1523 int32_t forwardTransform1[entry1.count * 2]; 1524 1525 size_t ctr = 0; 1526 for(size_t i = 0; i < entry1.count; ++i) { 1527 forwardTransform1[ctr++] = entry1.data.r[i].numerator; 1528 forwardTransform1[ctr++] = entry1.data.r[i].denominator; 1529 } 1530 1531 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count, 1532 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer); 1533 1534 if (!singleIlluminant) { 1535 camera_metadata_entry entry2 = 1536 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2); 1537 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer); 1538 int32_t forwardTransform2[entry2.count * 2]; 1539 1540 ctr = 0; 1541 for(size_t i = 0; i < entry2.count; ++i) { 1542 forwardTransform2[ctr++] = entry2.data.r[i].numerator; 1543 forwardTransform2[ctr++] = entry2.data.r[i].denominator; 1544 } 1545 1546 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count, 1547 forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer); 1548 } 1549 } 1550 1551 { 1552 // Set camera neutral 1553 camera_metadata_entry entry = 1554 results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT); 1555 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer); 1556 uint32_t cameraNeutral[entry.count * 2]; 1557 1558 size_t ctr = 0; 1559 for(size_t i = 0; i < entry.count; ++i) { 1560 cameraNeutral[ctr++] = 1561 static_cast<uint32_t>(entry.data.r[i].numerator); 1562 cameraNeutral[ctr++] = 1563 static_cast<uint32_t>(entry.data.r[i].denominator); 1564 } 1565 1566 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral, 1567 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer); 1568 } 1569 1570 1571 { 1572 // Set dimensions 1573 if (calculateAndSetCrop(env, characteristics, writer) != OK) { 1574 return nullptr; 1575 } 1576 camera_metadata_entry entry = 1577 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 1578 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer); 1579 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]); 1580 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]); 1581 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]); 1582 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]); 1583 1584 // If we only have a buffer containing the pre-correction rectangle, ignore the offset 1585 // relative to the pixel array. 1586 if (imageWidth == width && imageHeight == height) { 1587 xmin = 0; 1588 ymin = 0; 1589 } 1590 1591 uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width}; 1592 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0), 1593 env, TAG_ACTIVEAREA, writer); 1594 } 1595 1596 { 1597 // Setup unique camera model tag 1598 char model[PROPERTY_VALUE_MAX]; 1599 property_get("ro.product.model", model, ""); 1600 1601 char manufacturer[PROPERTY_VALUE_MAX]; 1602 property_get("ro.product.manufacturer", manufacturer, ""); 1603 1604 char brand[PROPERTY_VALUE_MAX]; 1605 property_get("ro.product.brand", brand, ""); 1606 1607 String8 cameraModel(model); 1608 cameraModel += "-"; 1609 cameraModel += manufacturer; 1610 cameraModel += "-"; 1611 cameraModel += brand; 1612 1613 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1, 1614 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env, 1615 TAG_UNIQUECAMERAMODEL, writer); 1616 } 1617 1618 { 1619 // Setup sensor noise model 1620 camera_metadata_entry entry = 1621 results.find(ANDROID_SENSOR_NOISE_PROFILE); 1622 1623 const status_t numPlaneColors = 3; 1624 const status_t numCfaChannels = 4; 1625 1626 uint8_t cfaOut[numCfaChannels]; 1627 if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) { 1628 jniThrowException(env, "java/lang/IllegalArgumentException", 1629 "Invalid CFA from camera characteristics"); 1630 return nullptr; 1631 } 1632 1633 double noiseProfile[numPlaneColors * 2]; 1634 1635 if (entry.count > 0) { 1636 if (entry.count != numCfaChannels * 2) { 1637 ALOGW("%s: Invalid entry count %zu for noise profile returned " 1638 "in characteristics, no noise profile tag written...", 1639 __FUNCTION__, entry.count); 1640 } else { 1641 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels, 1642 cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) { 1643 1644 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE, 1645 numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE, 1646 writer); 1647 } else { 1648 ALOGW("%s: Error converting coefficients for noise profile, no noise profile" 1649 " tag written...", __FUNCTION__); 1650 } 1651 } 1652 } else { 1653 ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.", 1654 __FUNCTION__); 1655 } 1656 } 1657 1658 { 1659 // Set up opcode List 2 1660 OpcodeListBuilder builder; 1661 status_t err = OK; 1662 1663 // Set up lens shading map 1664 camera_metadata_entry entry1 = 1665 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE); 1666 1667 uint32_t lsmWidth = 0; 1668 uint32_t lsmHeight = 0; 1669 1670 if (entry1.count != 0) { 1671 lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]); 1672 lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]); 1673 } 1674 1675 camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP); 1676 1677 camera_metadata_entry entry = 1678 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 1679 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer); 1680 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]); 1681 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]); 1682 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]); 1683 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]); 1684 if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) { 1685 // GainMap rectangle is relative to the active area origin. 1686 err = builder.addGainMapsForMetadata(lsmWidth, 1687 lsmHeight, 1688 0, 1689 0, 1690 height, 1691 width, 1692 opcodeCfaLayout, 1693 entry2.data.f); 1694 if (err != OK) { 1695 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__); 1696 jniThrowRuntimeException(env, "failed to add lens shading map."); 1697 return nullptr; 1698 } 1699 } 1700 1701 1702 // Set up bad pixel correction list 1703 camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP); 1704 1705 if ((entry3.count % 2) != 0) { 1706 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!", 1707 __FUNCTION__); 1708 jniThrowRuntimeException(env, "failed to add hotpixel map."); 1709 return nullptr; 1710 } 1711 1712 // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag 1713 std::vector<uint32_t> v; 1714 for (size_t i = 0; i < entry3.count; i+=2) { 1715 int32_t x = entry3.data.i32[i]; 1716 int32_t y = entry3.data.i32[i + 1]; 1717 x -= static_cast<int32_t>(xmin); 1718 y -= static_cast<int32_t>(ymin); 1719 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width || 1720 static_cast<uint32_t>(y) >= width) { 1721 continue; 1722 } 1723 v.push_back(x); 1724 v.push_back(y); 1725 } 1726 const uint32_t* badPixels = &v[0]; 1727 uint32_t badPixelCount = v.size(); 1728 1729 if (badPixelCount > 0) { 1730 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout); 1731 1732 if (err != OK) { 1733 ALOGE("%s: Could not add hotpixel map.", __FUNCTION__); 1734 jniThrowRuntimeException(env, "failed to add hotpixel map."); 1735 return nullptr; 1736 } 1737 } 1738 1739 1740 size_t listSize = builder.getSize(); 1741 uint8_t opcodeListBuf[listSize]; 1742 err = builder.buildOpList(opcodeListBuf); 1743 if (err == OK) { 1744 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize, opcodeListBuf, 1745 TIFF_IFD_0), env, TAG_OPCODELIST2, writer); 1746 } else { 1747 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading" 1748 "map.", __FUNCTION__); 1749 jniThrowRuntimeException(env, "failed to construct opcode list for distortion" 1750 " correction and lens shading map"); 1751 return nullptr; 1752 } 1753 } 1754 1755 { 1756 // Set up opcode List 3 1757 OpcodeListBuilder builder; 1758 status_t err = OK; 1759 1760 // Set up rectilinear distortion correction 1761 camera_metadata_entry entry3 = 1762 results.find(ANDROID_LENS_RADIAL_DISTORTION); 1763 camera_metadata_entry entry4 = 1764 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION); 1765 1766 if (entry3.count == 6 && entry4.count == 5) { 1767 float cx = entry4.data.f[/*c_x*/2]; 1768 float cy = entry4.data.f[/*c_y*/3]; 1769 err = builder.addWarpRectilinearForMetadata(entry3.data.f, preWidth, preHeight, cx, 1770 cy); 1771 if (err != OK) { 1772 ALOGE("%s: Could not add distortion correction.", __FUNCTION__); 1773 jniThrowRuntimeException(env, "failed to add distortion correction."); 1774 return nullptr; 1775 } 1776 } 1777 1778 size_t listSize = builder.getSize(); 1779 uint8_t opcodeListBuf[listSize]; 1780 err = builder.buildOpList(opcodeListBuf); 1781 if (err == OK) { 1782 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize, opcodeListBuf, 1783 TIFF_IFD_0), env, TAG_OPCODELIST3, writer); 1784 } else { 1785 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading" 1786 "map.", __FUNCTION__); 1787 jniThrowRuntimeException(env, "failed to construct opcode list for distortion" 1788 " correction and lens shading map"); 1789 return nullptr; 1790 } 1791 } 1792 1793 { 1794 // Set up orientation tags. 1795 // Note: There's only one orientation field for the whole file, in IFD0 1796 // The main image and any thumbnails therefore have the same orientation. 1797 uint16_t orientation = nativeContext->getOrientation(); 1798 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0), 1799 env, TAG_ORIENTATION, writer); 1800 1801 } 1802 1803 if (nativeContext->hasDescription()){ 1804 // Set Description 1805 String8 description = nativeContext->getDescription(); 1806 size_t len = description.bytes() + 1; 1807 if (writer->addEntry(TAG_IMAGEDESCRIPTION, len, 1808 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) { 1809 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", 1810 "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION); 1811 } 1812 } 1813 1814 if (nativeContext->hasGpsData()) { 1815 // Set GPS tags 1816 GpsData gpsData = nativeContext->getGpsData(); 1817 if (!writer->hasIfd(TIFF_IFD_GPSINFO)) { 1818 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) { 1819 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO, 1820 TIFF_IFD_0); 1821 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO"); 1822 return nullptr; 1823 } 1824 } 1825 1826 { 1827 uint8_t version[] = {2, 3, 0, 0}; 1828 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version, 1829 TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer); 1830 } 1831 1832 { 1833 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF, 1834 GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env, 1835 TAG_GPSLATITUDEREF, writer); 1836 } 1837 1838 { 1839 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF, 1840 GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env, 1841 TAG_GPSLONGITUDEREF, writer); 1842 } 1843 1844 { 1845 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude, 1846 TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer); 1847 } 1848 1849 { 1850 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude, 1851 TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer); 1852 } 1853 1854 { 1855 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp, 1856 TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer); 1857 } 1858 1859 { 1860 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP, 1861 GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env, 1862 TAG_GPSDATESTAMP, writer); 1863 } 1864 } 1865 1866 1867 if (nativeContext->hasThumbnail()) { 1868 if (!writer->hasIfd(TIFF_IFD_SUB1)) { 1869 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) { 1870 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1, 1871 TIFF_IFD_0); 1872 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD"); 1873 return nullptr; 1874 } 1875 } 1876 1877 Vector<uint16_t> tagsToMove; 1878 tagsToMove.add(TAG_NEWSUBFILETYPE); 1879 tagsToMove.add(TAG_ACTIVEAREA); 1880 tagsToMove.add(TAG_BITSPERSAMPLE); 1881 tagsToMove.add(TAG_COMPRESSION); 1882 tagsToMove.add(TAG_IMAGEWIDTH); 1883 tagsToMove.add(TAG_IMAGELENGTH); 1884 tagsToMove.add(TAG_PHOTOMETRICINTERPRETATION); 1885 tagsToMove.add(TAG_BLACKLEVEL); 1886 tagsToMove.add(TAG_BLACKLEVELREPEATDIM); 1887 tagsToMove.add(TAG_SAMPLESPERPIXEL); 1888 tagsToMove.add(TAG_PLANARCONFIGURATION); 1889 tagsToMove.add(TAG_CFAREPEATPATTERNDIM); 1890 tagsToMove.add(TAG_CFAPATTERN); 1891 tagsToMove.add(TAG_CFAPLANECOLOR); 1892 tagsToMove.add(TAG_CFALAYOUT); 1893 tagsToMove.add(TAG_XRESOLUTION); 1894 tagsToMove.add(TAG_YRESOLUTION); 1895 tagsToMove.add(TAG_RESOLUTIONUNIT); 1896 tagsToMove.add(TAG_WHITELEVEL); 1897 tagsToMove.add(TAG_DEFAULTSCALE); 1898 tagsToMove.add(TAG_DEFAULTCROPORIGIN); 1899 tagsToMove.add(TAG_DEFAULTCROPSIZE); 1900 tagsToMove.add(TAG_OPCODELIST2); 1901 tagsToMove.add(TAG_OPCODELIST3); 1902 1903 if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) { 1904 jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries"); 1905 return nullptr; 1906 } 1907 1908 // Setup thumbnail tags 1909 1910 { 1911 // Set photometric interpretation 1912 uint16_t interpretation = 2; // RGB 1913 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1, 1914 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer); 1915 } 1916 1917 { 1918 // Set planar configuration 1919 uint16_t config = 1; // Chunky 1920 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config, 1921 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer); 1922 } 1923 1924 { 1925 // Set samples per pixel 1926 uint16_t samples = SAMPLES_PER_RGB_PIXEL; 1927 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, 1928 TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer); 1929 } 1930 1931 { 1932 // Set bits per sample 1933 uint16_t bits[SAMPLES_PER_RGB_PIXEL]; 1934 for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE; 1935 BAIL_IF_INVALID_RET_NULL_SP( 1936 writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_0), 1937 env, TAG_BITSPERSAMPLE, writer); 1938 } 1939 1940 { 1941 // Set subfiletype 1942 uint32_t subfileType = 1; // Thumbnail image 1943 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType, 1944 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer); 1945 } 1946 1947 { 1948 // Set compression 1949 uint16_t compression = 1; // None 1950 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression, 1951 TIFF_IFD_0), env, TAG_COMPRESSION, writer); 1952 } 1953 1954 { 1955 // Set dimensions 1956 uint32_t uWidth = nativeContext->getThumbnailWidth(); 1957 uint32_t uHeight = nativeContext->getThumbnailHeight(); 1958 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0), 1959 env, TAG_IMAGEWIDTH, writer); 1960 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0), 1961 env, TAG_IMAGELENGTH, writer); 1962 } 1963 1964 { 1965 // x resolution 1966 uint32_t xres[] = { 72, 1 }; // default 72 ppi 1967 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0), 1968 env, TAG_XRESOLUTION, writer); 1969 1970 // y resolution 1971 uint32_t yres[] = { 72, 1 }; // default 72 ppi 1972 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0), 1973 env, TAG_YRESOLUTION, writer); 1974 1975 uint16_t unit = 2; // inches 1976 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0), 1977 env, TAG_RESOLUTIONUNIT, writer); 1978 } 1979 } 1980 1981 if (writer->addStrip(TIFF_IFD_0) != OK) { 1982 ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__); 1983 jniThrowException(env, "java/lang/IllegalStateException", 1984 "Failed to setup thumbnail strip tags."); 1985 return nullptr; 1986 } 1987 1988 if (writer->hasIfd(TIFF_IFD_SUB1)) { 1989 if (writer->addStrip(TIFF_IFD_SUB1) != OK) { 1990 ALOGE("%s: Could not main image strip tags.", __FUNCTION__); 1991 jniThrowException(env, "java/lang/IllegalStateException", 1992 "Failed to setup main image strip tags."); 1993 return nullptr; 1994 } 1995 } 1996 return writer; 1997 } 1998 1999 static void DngCreator_destroy(JNIEnv* env, jobject thiz) { 2000 ALOGV("%s:", __FUNCTION__); 2001 DngCreator_setNativeContext(env, thiz, nullptr); 2002 } 2003 2004 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) { 2005 ALOGV("%s:", __FUNCTION__); 2006 2007 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2008 if (context == nullptr) { 2009 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2010 jniThrowException(env, "java/lang/AssertionError", 2011 "setOrientation called with uninitialized DngCreator"); 2012 return; 2013 } 2014 2015 uint16_t orientation = static_cast<uint16_t>(orient); 2016 context->setOrientation(orientation); 2017 } 2018 2019 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) { 2020 ALOGV("%s:", __FUNCTION__); 2021 2022 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2023 if (context == nullptr) { 2024 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2025 jniThrowException(env, "java/lang/AssertionError", 2026 "setDescription called with uninitialized DngCreator"); 2027 return; 2028 } 2029 2030 const char* desc = env->GetStringUTFChars(description, nullptr); 2031 context->setDescription(String8(desc)); 2032 env->ReleaseStringUTFChars(description, desc); 2033 } 2034 2035 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag, 2036 jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) { 2037 ALOGV("%s:", __FUNCTION__); 2038 2039 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2040 if (context == nullptr) { 2041 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2042 jniThrowException(env, "java/lang/AssertionError", 2043 "setGpsTags called with uninitialized DngCreator"); 2044 return; 2045 } 2046 2047 GpsData data; 2048 2049 jsize latLen = env->GetArrayLength(latTag); 2050 jsize longLen = env->GetArrayLength(longTag); 2051 jsize timeLen = env->GetArrayLength(timeTag); 2052 if (latLen != GpsData::GPS_VALUE_LENGTH) { 2053 jniThrowException(env, "java/lang/IllegalArgumentException", 2054 "invalid latitude tag length"); 2055 return; 2056 } else if (longLen != GpsData::GPS_VALUE_LENGTH) { 2057 jniThrowException(env, "java/lang/IllegalArgumentException", 2058 "invalid longitude tag length"); 2059 return; 2060 } else if (timeLen != GpsData::GPS_VALUE_LENGTH) { 2061 jniThrowException(env, "java/lang/IllegalArgumentException", 2062 "invalid time tag length"); 2063 return; 2064 } 2065 2066 env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH), 2067 reinterpret_cast<jint*>(&data.mLatitude)); 2068 env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH), 2069 reinterpret_cast<jint*>(&data.mLongitude)); 2070 env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH), 2071 reinterpret_cast<jint*>(&data.mTimestamp)); 2072 2073 2074 env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef)); 2075 data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0'; 2076 env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef)); 2077 data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0'; 2078 env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1, 2079 reinterpret_cast<char*>(&data.mDate)); 2080 data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0'; 2081 2082 context->setGpsData(data); 2083 } 2084 2085 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width, 2086 jint height) { 2087 ALOGV("%s:", __FUNCTION__); 2088 2089 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2090 if (context == nullptr) { 2091 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2092 jniThrowException(env, "java/lang/AssertionError", 2093 "setThumbnail called with uninitialized DngCreator"); 2094 return; 2095 } 2096 2097 size_t fullSize = width * height * BYTES_PER_RGB_PIXEL; 2098 jlong capacity = env->GetDirectBufferCapacity(buffer); 2099 if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) { 2100 jniThrowExceptionFmt(env, "java/lang/AssertionError", 2101 "Invalid size %d for thumbnail, expected size was %d", 2102 capacity, fullSize); 2103 return; 2104 } 2105 2106 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer)); 2107 if (pixelBytes == nullptr) { 2108 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__); 2109 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer"); 2110 return; 2111 } 2112 2113 if (!context->setThumbnail(pixelBytes, width, height)) { 2114 jniThrowException(env, "java/lang/IllegalStateException", 2115 "Failed to set thumbnail."); 2116 return; 2117 } 2118 } 2119 2120 // TODO: Refactor out common preamble for the two nativeWrite methods. 2121 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width, 2122 jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset, 2123 jboolean isDirect) { 2124 ALOGV("%s:", __FUNCTION__); 2125 ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, " 2126 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width, 2127 height, rowStride, pixStride, offset); 2128 uint32_t rStride = static_cast<uint32_t>(rowStride); 2129 uint32_t pStride = static_cast<uint32_t>(pixStride); 2130 uint32_t uWidth = static_cast<uint32_t>(width); 2131 uint32_t uHeight = static_cast<uint32_t>(height); 2132 uint64_t uOffset = static_cast<uint64_t>(offset); 2133 2134 sp<JniOutputStream> out = new JniOutputStream(env, outStream); 2135 if(env->ExceptionCheck()) { 2136 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__); 2137 return; 2138 } 2139 2140 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2141 if (context == nullptr) { 2142 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2143 jniThrowException(env, "java/lang/AssertionError", 2144 "Write called with uninitialized DngCreator"); 2145 return; 2146 } 2147 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight); 2148 2149 if (writer.get() == nullptr) { 2150 return; 2151 } 2152 2153 // Validate DNG size 2154 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) { 2155 return; 2156 } 2157 2158 sp<JniInputByteBuffer> inBuf; 2159 Vector<StripSource*> sources; 2160 sp<DirectStripSource> thumbnailSource; 2161 uint32_t targetIfd = TIFF_IFD_0; 2162 2163 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1); 2164 2165 if (hasThumbnail) { 2166 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__); 2167 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE; 2168 uint32_t thumbWidth = context->getThumbnailWidth(); 2169 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0, 2170 thumbWidth, context->getThumbnailHeight(), bytesPerPixel, 2171 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE, 2172 SAMPLES_PER_RGB_PIXEL); 2173 sources.add(thumbnailSource.get()); 2174 targetIfd = TIFF_IFD_SUB1; 2175 } 2176 2177 if (isDirect) { 2178 size_t fullSize = rStride * uHeight; 2179 jlong capacity = env->GetDirectBufferCapacity(inBuffer); 2180 if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) { 2181 jniThrowExceptionFmt(env, "java/lang/IllegalStateException", 2182 "Invalid size %d for Image, size given in metadata is %d at current stride", 2183 capacity, fullSize); 2184 return; 2185 } 2186 2187 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer)); 2188 if (pixelBytes == nullptr) { 2189 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__); 2190 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer"); 2191 return; 2192 } 2193 2194 ALOGV("%s: Using direct-type strip source.", __FUNCTION__); 2195 DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride, 2196 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); 2197 sources.add(&stripSource); 2198 2199 status_t ret = OK; 2200 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) { 2201 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); 2202 if (!env->ExceptionCheck()) { 2203 jniThrowExceptionFmt(env, "java/io/IOException", 2204 "Encountered error %d while writing file.", ret); 2205 } 2206 return; 2207 } 2208 } else { 2209 inBuf = new JniInputByteBuffer(env, inBuffer); 2210 2211 ALOGV("%s: Using input-type strip source.", __FUNCTION__); 2212 InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride, 2213 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); 2214 sources.add(&stripSource); 2215 2216 status_t ret = OK; 2217 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) { 2218 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); 2219 if (!env->ExceptionCheck()) { 2220 jniThrowExceptionFmt(env, "java/io/IOException", 2221 "Encountered error %d while writing file.", ret); 2222 } 2223 return; 2224 } 2225 } 2226 } 2227 2228 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream, 2229 jobject inStream, jint width, jint height, jlong offset) { 2230 ALOGV("%s:", __FUNCTION__); 2231 2232 uint32_t rowStride = width * BYTES_PER_SAMPLE; 2233 uint32_t pixStride = BYTES_PER_SAMPLE; 2234 uint32_t uWidth = static_cast<uint32_t>(width); 2235 uint32_t uHeight = static_cast<uint32_t>(height); 2236 uint64_t uOffset = static_cast<uint32_t>(offset); 2237 2238 ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, " 2239 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width, 2240 height, rowStride, pixStride, offset); 2241 2242 sp<JniOutputStream> out = new JniOutputStream(env, outStream); 2243 if (env->ExceptionCheck()) { 2244 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__); 2245 return; 2246 } 2247 2248 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2249 if (context == nullptr) { 2250 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2251 jniThrowException(env, "java/lang/AssertionError", 2252 "Write called with uninitialized DngCreator"); 2253 return; 2254 } 2255 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight); 2256 2257 if (writer.get() == nullptr) { 2258 return; 2259 } 2260 2261 // Validate DNG size 2262 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) { 2263 return; 2264 } 2265 2266 sp<DirectStripSource> thumbnailSource; 2267 uint32_t targetIfd = TIFF_IFD_0; 2268 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1); 2269 Vector<StripSource*> sources; 2270 2271 if (hasThumbnail) { 2272 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__); 2273 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE; 2274 uint32_t width = context->getThumbnailWidth(); 2275 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0, 2276 width, context->getThumbnailHeight(), bytesPerPixel, 2277 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE, 2278 SAMPLES_PER_RGB_PIXEL); 2279 sources.add(thumbnailSource.get()); 2280 targetIfd = TIFF_IFD_SUB1; 2281 } 2282 2283 sp<JniInputStream> in = new JniInputStream(env, inStream); 2284 2285 ALOGV("%s: Using input-type strip source.", __FUNCTION__); 2286 InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride, 2287 rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); 2288 sources.add(&stripSource); 2289 2290 status_t ret = OK; 2291 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) { 2292 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); 2293 if (!env->ExceptionCheck()) { 2294 jniThrowExceptionFmt(env, "java/io/IOException", 2295 "Encountered error %d while writing file.", ret); 2296 } 2297 return; 2298 } 2299 } 2300 2301 } /*extern "C" */ 2302 2303 static const JNINativeMethod gDngCreatorMethods[] = { 2304 {"nativeClassInit", "()V", (void*) DngCreator_nativeClassInit}, 2305 {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;" 2306 "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V", 2307 (void*) DngCreator_init}, 2308 {"nativeDestroy", "()V", (void*) DngCreator_destroy}, 2309 {"nativeSetOrientation", "(I)V", (void*) DngCreator_nativeSetOrientation}, 2310 {"nativeSetDescription", "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription}, 2311 {"nativeSetGpsTags", "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V", 2312 (void*) DngCreator_nativeSetGpsTags}, 2313 {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail}, 2314 {"nativeWriteImage", "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V", 2315 (void*) DngCreator_nativeWriteImage}, 2316 {"nativeWriteInputStream", "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V", 2317 (void*) DngCreator_nativeWriteInputStream}, 2318 }; 2319 2320 int register_android_hardware_camera2_DngCreator(JNIEnv *env) { 2321 return RegisterMethodsOrDie(env, 2322 "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods)); 2323 } 2324