diff --git a/app/src/main/cpp/MicroPhoto.cpp b/app/src/main/cpp/MicroPhoto.cpp index 3d5a67b6..45ee32d8 100644 --- a/app/src/main/cpp/MicroPhoto.cpp +++ b/app/src/main/cpp/MicroPhoto.cpp @@ -195,7 +195,7 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved) google_breakpad::ExceptionHandler eh(descriptor, NULL, DumpCallback, NULL, true, -1); #endif - +#if 0 { struct sigaction sig_action = {}; sig_action.sa_sigaction = posix_signal_handler; @@ -223,6 +223,7 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved) // } env->DeleteLocalRef(clazz); +#endif return result; } diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp index f8a2578d..c8cf8381 100644 --- a/app/src/main/cpp/PhoneDevice.cpp +++ b/app/src/main/cpp/PhoneDevice.cpp @@ -46,6 +46,18 @@ extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread); // are normalized to eight bits. static const int kMaxChannelValue = 262143; +class ByteArraysPointer +{ +public: + ByteArraysPointer() + { + } + ~ByteArraysPointer() + { + byteArrays.clear(); + } + std::vector > byteArrays; +}; cv::Mat convert16bit2_8bit_(cv::Mat ans){ if(ans.type()==CV_16UC3){ @@ -181,6 +193,24 @@ bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat& rgb) return false; } +bool CPhoneDevice::CPhoneCamera::onOneCapture(std::shared_ptr characteristics, std::shared_ptr result, uint32_t ldr, cv::Mat rgb) +{ + if (m_dev != NULL) + { + return m_dev->onOneCapture(characteristics, result, ldr, rgb); + } + return false; +} + +bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + if (m_dev != NULL) + { + return m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return false; +} + bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) { if (m_dev != NULL) @@ -211,6 +241,24 @@ CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t { } +bool CPhoneDevice::CJpegCamera::onOneCapture(std::shared_ptr characteristics, std::shared_ptr result, uint32_t ldr, cv::Mat rgb) +{ + if (m_dev != NULL) + { + return m_dev->onOneCapture(characteristics, result, ldr, rgb); + } + return false; +} + +bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + if (m_dev != NULL) + { + m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return true; +} + bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) { if (m_dev != NULL) @@ -1512,9 +1560,9 @@ void DrawOutlineText(cv::Ptr ft2, cv::Mat& mat, const std::st } } -bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics, - std::vector >& results, - uint32_t ldr, std::vector >& frames) +bool CPhoneDevice::onOneCapture(std::shared_ptr characteristics, + std::shared_ptr result, + uint32_t ldr, cv::Mat rgb) { time_t takingTime = time(NULL); if (mPhotoInfo.remedy != 0) @@ -1532,7 +1580,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi std::string path; path.swap(mPath); - std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); + // std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; ACameraMetadata_const_entry e = { 0 }; @@ -1556,137 +1604,458 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi CPhoneCamera* pCamera = mCamera; mCamera = NULL; - std::thread th([=]()mutable - { - cv::Mat rgb; - std::vector > rawFiles; + media_status_t mstatus; - media_status_t mstatus; - std::string cameraInfo; + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + + CPhoneDevice* pThis = this; + std::thread th([pThis, characteristics, result, photoInfo, osds, path, rgb, facing, sensorOrientation, ldr, takingTime]()mutable + { + std::string cameraInfo; + if (photoInfo.outputDbgInfo != 0) + { + NdkCamera::CAPTURE_RESULT captureResult = { 0 }; + NdkCamera::EnumCameraResult(result.get(), captureResult); + + char extimeunit[4] = { 0 }; + unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); + strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); + char str[128] = { 0 }; + snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", + captureResult.autoExposure, captureResult.autoFocus, + extime, extimeunit, captureResult.compensation, captureResult.sensitivity, + // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, + (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, + captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, + (uint32_t)captureResult.duration, captureResult.frameDuration); + cameraInfo = str; + } - if (photoInfo.usingRawFormat != 0) +#ifdef OUTPUT_CAMERA_DBG_INFO +#if 0 + bool shouldRetry = false; + if (ldr != ~0) { - // - for (int idx = 0; idx < frames.size(); idx++) + if (ldr < MIN_LIGHT_Y) { - std::shared_ptr spImage = frames[idx]; - std::shared_ptr result = results[idx]; - - auto it = rawFiles.insert(rawFiles.end(), std::vector()); - - int32_t width; - int32_t height; - AImage_getWidth(spImage.get(), &width); - AImage_getHeight(spImage.get(), &height); + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); - int planeCount; - media_status_t status = AImage_getNumberOfPlanes(spImage.get(), &planeCount); - AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount); + // photoInfo.usingRawFormat = 1; + } + } + else if (ldr > MAX_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } - uint8_t *planeData = NULL; - int planeDataLen = 0; - mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen); - DngCreator dngCreator(characteristics.get(), result.get()); - dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0); + photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); } } - else +#endif // 0 +#endif // OUTPUT_CAMERA_DBG_INFO + + // Notify to take next photo + pThis->TakePhotoCb(1, photoInfo, "", takingTime); + + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } + }); + + th.detach(); + + return true; +} + +bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics, + std::vector >& results, + uint32_t ldr, std::vector >& frames) +{ + time_t takingTime = time(NULL); + if (mPhotoInfo.remedy != 0) + { + if ((takingTime - mPhotoInfo.scheduleTime) > 30) + { + takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + } + } + mPhotoInfo.photoTime = takingTime; + + vector osds; + osds.swap(mOsds); + PHOTO_INFO photoInfo = mPhotoInfo; + std::string path; + path.swap(mPath); + + // std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); + std::shared_ptr pByteArrays = std::make_shared(); + pByteArrays.get()->byteArrays.swap(frames); + + bool turnOffOtg = (photoInfo.usbCamera != 0); + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + + CPhoneDevice* pThis = this; + std::thread th([pThis, characteristics, results, photoInfo, osds, path, pByteArrays, ldr, takingTime]()mutable + { + cv::Mat rgb; + std::string cameraInfo; + media_status_t mstatus; + + acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; + ACameraMetadata_const_entry e = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e); + if (status == ACAMERA_OK) + { + facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; + } + + int sensorOrientation = 0; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e); + if (status == ACAMERA_OK) + { + sensorOrientation = (int)e.data.i32[0]; + } + } + + if (photoInfo.outputDbgInfo != 0) + { + if (!results.empty()) + { + NdkCamera::CAPTURE_RESULT captureResult = { 0 }; + NdkCamera::EnumCameraResult(results[0].get(), captureResult); + + char extimeunit[4] = { 0 }; + unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); + strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); + char str[128] = { 0 }; + snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", + captureResult.autoExposure, captureResult.autoFocus, + extime, extimeunit, captureResult.compensation, captureResult.sensitivity, + // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, + (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, + captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, + (uint32_t)captureResult.duration, captureResult.frameDuration); + cameraInfo = str; + } + } + +#ifdef OUTPUT_CAMERA_DBG_INFO +#if 0 + bool shouldRetry = false; + if (ldr != ~0) { - if (results.size() == 1 && frames.size() == 1) + if (ldr < MIN_LIGHT_Y) { - std::shared_ptr result = results[0]; - std::shared_ptr frame = frames[0]; + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + + // photoInfo.usingRawFormat = 1; + } + } + else if (ldr > MAX_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } - int32_t format; - mstatus = AImage_getFormat(frame.get(), &format); + photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); + } + } +#endif // 0 +#endif // OUTPUT_CAMERA_DBG_INFO + + // Notify to take next photo + pThis->TakePhotoCb(1, photoInfo, "", takingTime); + + XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + hdrplus::hdrplus_pipeline pipeline; + std::vector > localFrames; + localFrames.swap(pByteArrays.get()->byteArrays); + pipeline.run_pipeline(localFrames, 0, rgb); + localFrames.clear(); + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + + { + cv::Mat tempPic = convert16bit2_8bit_(rgb); + rgb = tempPic; + } + + if (photoInfo.orientation > 0) + { + if (photoInfo.orientation == 1) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + cv::flip(rgb, rgb, 1); + } + } + else if (photoInfo.orientation == 2) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 1); + } + else if (photoInfo.orientation == 3) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + flip(rgb, rgb, 0); + } + else + { + cv::flip(rgb, rgb, -1); + } + } + else if (photoInfo.orientation == 4) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 0); + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + } + cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR); + + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } + }); + + th.detach(); - if (format == AIMAGE_FORMAT_YUV_420_888) + return true; +} + +bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics, + std::vector >& results, + uint32_t ldr, std::vector >& frames) +{ + time_t takingTime = time(NULL); + if (mPhotoInfo.remedy != 0) + { + if ((takingTime - mPhotoInfo.scheduleTime) > 30) + { + takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + } + } + mPhotoInfo.photoTime = takingTime; + + vector osds; + osds.swap(mOsds); + PHOTO_INFO photoInfo = mPhotoInfo; + std::string path; + path.swap(mPath); + + // std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); + + acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; + ACameraMetadata_const_entry e = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e); + if (status == ACAMERA_OK) + { + facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; + } + + int sensorOrientation = 0; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e); + if (status == ACAMERA_OK) + { + sensorOrientation = (int)e.data.i32[0]; + } + } + + bool turnOffOtg = (photoInfo.usbCamera != 0); + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + cv::Mat rgb; + std::vector > rawFiles; + media_status_t mstatus; + + if (photoInfo.usingRawFormat != 0) + { + for (int idx = 0; idx < frames.size(); idx++) + { + std::shared_ptr spImage = frames[idx]; + std::shared_ptr spResult = results[idx]; + + hdrplus::MemFile* rawImage = new hdrplus::MemFile(); + rawFiles.push_back(std::shared_ptr(rawImage)); + // rawImage->FromAImage(spImage.get(), characteristics.get(), spResult.get()); + + int32_t width = 0; + int32_t height = 0; + mstatus = AImage_getWidth(spImage.get(), &width); + mstatus = AImage_getHeight(spImage.get(), &height); + + int32_t planeCount = 0; + mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount); + AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount); + + uint8_t *planeData = NULL; + int planeDataLen = 0; + mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen); + ALOGD("Start Converting Dng"); + DngCreator dngCreator(characteristics.get(), spResult.get()); + dngCreator.writeInputBuffer(rawImage->content, planeData, planeDataLen, width, height, 0); + ALOGD("End Converting Dng"); + } + } + else + { + if (results.size() == 1 && frames.size() == 1) + { + std::shared_ptr result = results[0]; + std::shared_ptr frame = frames[0]; + + int32_t format; + mstatus = AImage_getFormat(frame.get(), &format); + + if (format == AIMAGE_FORMAT_YUV_420_888) + { + int32_t width; + int32_t height; + mstatus = AImage_getWidth(frame.get(), &width); + mstatus = AImage_getHeight(frame.get(), &height); + + int32_t y_pixelStride = 0; + int32_t u_pixelStride = 0; + int32_t v_pixelStride = 0; + AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride); + AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride); + AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride); + + int32_t y_rowStride = 0; + int32_t u_rowStride = 0; + int32_t v_rowStride = 0; + AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride); + AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride); + AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride); + + uint8_t* y_data = 0; + uint8_t* u_data = 0; + uint8_t* v_data = 0; + int y_len = 0; + int u_len = 0; + int v_len = 0; + AImage_getPlaneData(frame.get(), 0, &y_data, &y_len); + AImage_getPlaneData(frame.get(), 1, &u_data, &u_len); + AImage_getPlaneData(frame.get(), 2, &v_data, &v_len); + + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) + { + // already nv21 + ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + } + else + { + // construct nv21 + uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; { - int32_t width; - int32_t height; - mstatus = AImage_getWidth(frame.get(), &width); - mstatus = AImage_getHeight(frame.get(), &height); - - int32_t y_pixelStride = 0; - int32_t u_pixelStride = 0; - int32_t v_pixelStride = 0; - AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride); - AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride); - AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride); - - int32_t y_rowStride = 0; - int32_t u_rowStride = 0; - int32_t v_rowStride = 0; - AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride); - AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride); - AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride); - - uint8_t* y_data = 0; - uint8_t* u_data = 0; - uint8_t* v_data = 0; - int y_len = 0; - int u_len = 0; - int v_len = 0; - AImage_getPlaneData(frame.get(), 0, &y_data, &y_len); - AImage_getPlaneData(frame.get(), 1, &u_data, &u_len); - AImage_getPlaneData(frame.get(), 2, &v_data, &v_len); - - if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) + // Y + uint8_t* yptr = nv21; + for (int y = 0; y < height; y++) { - // already nv21 - ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + const uint8_t* y_data_ptr = y_data + y_rowStride * y; + for (int x = 0; x < width; x++) + { + yptr[0] = y_data_ptr[0]; + yptr++; + y_data_ptr += y_pixelStride; + } } - else + + // UV + uint8_t* uvptr = nv21 + width * height; + for (int y = 0; y < height / 2; y++) { - // construct nv21 - uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; + const uint8_t* v_data_ptr = v_data + v_rowStride * y; + const uint8_t* u_data_ptr = u_data + u_rowStride * y; + for (int x = 0; x < width / 2; x++) { - // Y - uint8_t* yptr = nv21; - for (int y = 0; y < height; y++) - { - const uint8_t* y_data_ptr = y_data + y_rowStride * y; - for (int x = 0; x < width; x++) - { - yptr[0] = y_data_ptr[0]; - yptr++; - y_data_ptr += y_pixelStride; - } - } - - // UV - uint8_t* uvptr = nv21 + width * height; - for (int y = 0; y < height / 2; y++) - { - const uint8_t* v_data_ptr = v_data + v_rowStride * y; - const uint8_t* u_data_ptr = u_data + u_rowStride * y; - for (int x = 0; x < width / 2; x++) - { - uvptr[0] = v_data_ptr[0]; - uvptr[1] = u_data_ptr[0]; - uvptr += 2; - v_data_ptr += v_pixelStride; - u_data_ptr += u_pixelStride; - } - } + uvptr[0] = v_data_ptr[0]; + uvptr[1] = u_data_ptr[0]; + uvptr += 2; + v_data_ptr += v_pixelStride; + u_data_ptr += u_pixelStride; } - - ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); - - delete[] nv21; } } + + ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + + delete[] nv21; } } + } + } - frames.clear(); - std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); - m_threadClose.swap(closeThread); - if (closeThread.joinable()) - { - closeThread.detach(); - } + frames.clear(); + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + + CPhoneDevice* pThis = this; + std::thread th([pThis, characteristics, results, photoInfo, osds, path, rgb, rawFiles, facing, sensorOrientation, ldr, takingTime]()mutable + { + std::string cameraInfo; if (photoInfo.outputDbgInfo != 0) { if (!results.empty()) @@ -1749,14 +2118,16 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi #endif // OUTPUT_CAMERA_DBG_INFO // Notify to take next photo - TakePhotoCb(1, photoInfo, "", takingTime); + pThis->TakePhotoCb(1, photoInfo, "", takingTime); if (photoInfo.usingRawFormat != 0) { - XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); hdrplus::hdrplus_pipeline pipeline; pipeline.run_pipeline(rawFiles, 0, rgb); - XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + rawFiles.clear(); + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); #ifdef NDEBUG for (auto it = rawFilePaths.cbegin(); it != rawFilePaths.cend(); ++it) @@ -1806,7 +2177,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR); } - bool res = PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); if (res) { // TakePhotoCb(2, photoInfo, path, takingTime); diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h index bfa6e16a..0310d2d2 100644 --- a/app/src/main/cpp/PhoneDevice.h +++ b/app/src/main/cpp/PhoneDevice.h @@ -161,6 +161,8 @@ public: virtual bool on_image(cv::Mat& rgb); virtual void on_error(const std::string& msg); virtual void onDisconnected(ACameraDevice* device); + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); + virtual bool onOneCapture(std::shared_ptr characteristics, std::shared_ptr results, uint32_t ldr, cv::Mat rgb); virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); protected: @@ -174,6 +176,8 @@ public: virtual void onImageAvailable(AImageReader* reader); virtual int32_t getOutputFormat() const; + virtual bool onOneCapture(std::shared_ptr characteristics, std::shared_ptr results, uint32_t ldr, cv::Mat rgb); + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); protected: @@ -275,6 +279,8 @@ protected: std::string QueryCpuTemperature(); bool OnImageReady(cv::Mat& mat); + bool onOneCapture(std::shared_ptr characteristics, std::shared_ptr results, uint32_t ldr, cv::Mat rgb); + bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); void onError(const std::string& msg); void onDisconnected(ACameraDevice* device); diff --git a/app/src/main/cpp/camera2/ndkcamera.cpp b/app/src/main/cpp/camera2/ndkcamera.cpp index 323d0ae2..4002494b 100644 --- a/app/src/main/cpp/camera2/ndkcamera.cpp +++ b/app/src/main/cpp/camera2/ndkcamera.cpp @@ -138,10 +138,12 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA mPreviewImageReader = NULL; mPreviewImageWindow = NULL; mPreviewOutputTarget = NULL; + mPreviewSessionOutput = NULL; mImageReader = NULL; mImageWindow = NULL; mOutputTarget = NULL; + mSessionOutput = NULL; camera_device = 0; @@ -587,7 +589,7 @@ int NdkCamera::open(const std::string& cameraId) { status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput); status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput); - mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 2, &mImageReader); + mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures, &mImageReader); if (mstatus == AMEDIA_OK) { AImageReader_ImageListener listener; @@ -1170,7 +1172,7 @@ void NdkCamera::onImageAvailable(AImageReader* reader) return; } - if (mLdr == ~0) + // if (mLdr == ~0) { uint8_t* y_data = 0; int y_len = 0; @@ -1182,7 +1184,9 @@ void NdkCamera::onImageAvailable(AImageReader* reader) uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); #endif avgY = avgY / (uint64_t)y_len; + m_locker.lock(); mLdr = avgY; + m_locker.unlock(); } AImage_delete(image); @@ -1190,7 +1194,12 @@ void NdkCamera::onImageAvailable(AImageReader* reader) } else { - while (1) + uint32_t burstCaptures = m_params.burstCaptures; + if (burstCaptures == 0) + { + burstCaptures = 1; + } + if (burstCaptures == 1) { mstatus = AImageReader_acquireNextImage(reader, &image); if (mstatus != AMEDIA_OK) @@ -1198,32 +1207,152 @@ void NdkCamera::onImageAvailable(AImageReader* reader) // https://stackoverflow.com/questions/67063562 if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) { - if (mCaptureFrames.size() < m_params.burstCaptures) - { - XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus); - } + if (mCaptureFrames.size() < m_params.burstCaptures) + { + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus); + } } - break; + return; } + int32_t format; + mstatus = AImage_getFormat(image, &format); + + if (format == AIMAGE_FORMAT_YUV_420_888) + { + int32_t width; + int32_t height; + mstatus = AImage_getWidth(image, &width); + mstatus = AImage_getHeight(image, &height); + + int32_t y_pixelStride = 0; + int32_t u_pixelStride = 0; + int32_t v_pixelStride = 0; + AImage_getPlanePixelStride(image, 0, &y_pixelStride); + AImage_getPlanePixelStride(image, 1, &u_pixelStride); + AImage_getPlanePixelStride(image, 2, &v_pixelStride); + + int32_t y_rowStride = 0; + int32_t u_rowStride = 0; + int32_t v_rowStride = 0; + AImage_getPlaneRowStride(image, 0, &y_rowStride); + AImage_getPlaneRowStride(image, 1, &u_rowStride); + AImage_getPlaneRowStride(image, 2, &v_rowStride); + + uint8_t* y_data = 0; + uint8_t* u_data = 0; + uint8_t* v_data = 0; + int y_len = 0; + int u_len = 0; + int v_len = 0; + AImage_getPlaneData(image, 0, &y_data, &y_len); + AImage_getPlaneData(image, 1, &u_data, &u_len); + AImage_getPlaneData(image, 2, &v_data, &v_len); + + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) + { + // already nv21 + ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation, + camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame); + } + else + { + // construct nv21 + uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; + { + // Y + uint8_t* yptr = nv21; + for (int y = 0; y < height; y++) + { + const uint8_t* y_data_ptr = y_data + y_rowStride * y; + for (int x = 0; x < width; x++) + { + yptr[0] = y_data_ptr[0]; + yptr++; + y_data_ptr += y_pixelStride; + } + } + + // UV + uint8_t* uvptr = nv21 + width * height; + for (int y = 0; y < height / 2; y++) + { + const uint8_t* v_data_ptr = v_data + v_rowStride * y; + const uint8_t* u_data_ptr = u_data + u_rowStride * y; + for (int x = 0; x < width / 2; x++) + { + uvptr[0] = v_data_ptr[0]; + uvptr[1] = u_data_ptr[0]; + uvptr += 2; + v_data_ptr += v_pixelStride; + u_data_ptr += u_pixelStride; + } + } + } + + ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation, + camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame); + + delete[] nv21; + } + } m_photoTaken = true; - m_locker.lock(); - mCaptureFrames.push_back(std::shared_ptr(image, AImage_delete)); - m_locker.unlock(); - ALOGD("Capture Image Received"); + AImage_delete(image); + + std::shared_ptr result; + bool captureCompleted = false; + m_locker.lock(); + if (!mCaptureResults.empty()) + { + captureCompleted = true; + result = mCaptureResults[0]; + } + m_locker.unlock(); + + if (captureCompleted) + { + onOneCapture(mCharacteristics, result, mLdr, mOneFrame); + } } + else + { + while (1) + { + mstatus = AImageReader_acquireNextImage(reader, &image); + if (mstatus != AMEDIA_OK) + { + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) + { + if (mCaptureFrames.size() < m_params.burstCaptures) + { + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus); + } + } + break; + } - bool captureCompleted = false; - size_t expectedTimes = mCaptureRequests.size() - 1; - m_locker.lock(); - captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes; - m_locker.unlock(); + m_photoTaken = true; + m_locker.lock(); + mCaptureFrames.push_back(std::shared_ptr(image, AImage_delete)); + m_locker.unlock(); + + ALOGD("Capture Image Received"); + } + + bool captureCompleted = false; + size_t expectedTimes = mCaptureRequests.size() - 1; + m_locker.lock(); + captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes; + m_locker.unlock(); + + if (captureCompleted) + { + FireBurstCapture(); + } + } - if (captureCompleted) - { - onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames); - } } } @@ -1240,6 +1369,16 @@ bool NdkCamera::on_image(cv::Mat& rgb) return false; } +bool NdkCamera::onOneCapture(std::shared_ptr characteristics, std::shared_ptr result, uint32_t ldr, cv::Mat rgb) +{ + return false; +} + +bool NdkCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + return false; +} + bool NdkCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) { return false; @@ -1572,24 +1711,89 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque } else { +#ifdef _DEBUG uint64_t tid = getThreadIdOfULL(); ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid); +#endif ACameraMetadata* pCopy = ACameraMetadata_copy(result); bool captureCompleted = false; size_t expectedTimes = mCaptureRequests.size() - 1; - m_locker.lock(); - mCaptureResults.push_back(std::shared_ptr(pCopy, ACameraMetadata_free)); - captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes; - m_locker.unlock(); - if (captureCompleted) - { - onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames); - } + if (expectedTimes == 1) + { + std::shared_ptr captureResult(pCopy, ACameraMetadata_free); + m_locker.lock(); + mCaptureResults.push_back(captureResult); + captureCompleted = !mOneFrame.empty(); + m_locker.unlock(); + + if (captureCompleted) + { + onOneCapture(mCharacteristics, captureResult, mLdr, mOneFrame); + } + } + else + { + m_locker.lock(); + mCaptureResults.push_back(std::shared_ptr(pCopy, ACameraMetadata_free)); + captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes; + m_locker.unlock(); + + if (captureCompleted) + { + FireBurstCapture(); + } + } + } } +void NdkCamera::FireBurstCapture() +{ + size_t expectedTimes = mCaptureRequests.size() - 1; + std::vector > captureResults; + uint32_t ldr; + std::vector > captureFrames; + + m_locker.lock(); + ldr = mLdr; + captureResults.swap(mCaptureResults); + captureFrames.swap(mCaptureFrames); + m_locker.unlock(); + + media_status_t mstatus; + std::vector > frames; + for (int idx = 0; idx < expectedTimes; idx++) + { + std::shared_ptr spImage = captureFrames[idx]; + std::shared_ptr spResult = captureResults[idx]; + + auto it = frames.insert(frames.end(), std::vector()); + + int32_t width = 0; + int32_t height = 0; + mstatus = AImage_getWidth(spImage.get(), &width); + mstatus = AImage_getHeight(spImage.get(), &height); + + int32_t planeCount = 0; + mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount); + AASSERT(mstatus == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount); + + uint8_t *planeData = NULL; + int planeDataLen = 0; + mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen); + ALOGD("Start Converting Dng"); + DngCreator dngCreator(mCharacteristics.get(), spResult.get()); + dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0); + ALOGD("End Converting Dng"); + } + + captureFrames.clear(); + + onBurstCapture(mCharacteristics, captureResults, ldr, frames); +} + void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult) { camera_status_t status = ACAMERA_ERROR_BASE; @@ -1912,7 +2116,6 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt } } - // on_image((unsigned ch } } diff --git a/app/src/main/cpp/camera2/ndkcamera.h b/app/src/main/cpp/camera2/ndkcamera.h index 96d5ec07..a09b5ee6 100644 --- a/app/src/main/cpp/camera2/ndkcamera.h +++ b/app/src/main/cpp/camera2/ndkcamera.h @@ -166,6 +166,9 @@ public: virtual void on_error(const std::string& msg); virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height); virtual void onDisconnected(ACameraDevice* device); + + virtual bool onOneCapture(std::shared_ptr characteristics, std::shared_ptr result, uint32_t ldr, cv::Mat rgb); + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); void onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result); @@ -176,6 +179,8 @@ public: void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult); + void FireBurstCapture(); + uint32_t GetLdr() const { return mLdr; @@ -252,6 +257,9 @@ protected: uint32_t mLdr; std::vector > mCaptureFrames; + cv::Mat mOneFrame; + std::vector > mRawFrames; + ACameraCaptureSession* capture_session; // AImageReader* image_reader; diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/bayer_image.h b/app/src/main/cpp/hdrplus/include/hdrplus/bayer_image.h index 93230cf2..e2e5059f 100644 --- a/app/src/main/cpp/hdrplus/include/hdrplus/bayer_image.h +++ b/app/src/main/cpp/hdrplus/include/hdrplus/bayer_image.h @@ -10,11 +10,29 @@ namespace hdrplus { + class MemFile + { + public: + std::vector content; + + const std::vector GetConstData() const + { + return content; + } + + std::vector GetData() + { + return content; + } + }; + class bayer_image { public: explicit bayer_image( const std::string& bayer_image_path ); - explicit bayer_image( const std::vector& bayer_image_content ); + explicit bayer_image( const std::vector& bayer_image_content ); + explicit bayer_image( std::shared_ptr bayer_image_file ); + ~bayer_image() = default; std::pair get_noise_params() const; diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/burst.h b/app/src/main/cpp/hdrplus/include/hdrplus/burst.h index bde3c04f..33d2f1cd 100644 --- a/app/src/main/cpp/hdrplus/include/hdrplus/burst.h +++ b/app/src/main/cpp/hdrplus/include/hdrplus/burst.h @@ -8,12 +8,14 @@ namespace hdrplus { + class burst { public: explicit burst( const std::string& burst_path, const std::string& reference_image_path ); explicit burst(const std::vector& burst_paths, int reference_image_index); explicit burst( const std::vector >& bayer_image_contents, int reference_image_index ); + explicit burst( const std::vector >& bayer_image_files, int reference_image_index ); ~burst() = default; diff --git a/app/src/main/cpp/hdrplus/include/hdrplus/hdrplus_pipeline.h b/app/src/main/cpp/hdrplus/include/hdrplus/hdrplus_pipeline.h index 3d6f478c..49e4ca79 100644 --- a/app/src/main/cpp/hdrplus/include/hdrplus/hdrplus_pipeline.h +++ b/app/src/main/cpp/hdrplus/include/hdrplus/hdrplus_pipeline.h @@ -21,6 +21,7 @@ class hdrplus_pipeline void run_pipeline( const std::string& burst_path, const std::string& reference_image_path ); bool run_pipeline( const std::vector& burst_paths, int reference_image_index, cv::Mat& finalImg ); bool run_pipeline( const std::vector >& burst_contents, int reference_image_index, cv::Mat& finalImg ); + bool run_pipeline( const std::vector >& burst_contents, int reference_image_index, cv::Mat& finalImg ); hdrplus_pipeline() = default; ~hdrplus_pipeline() = default; diff --git a/app/src/main/cpp/hdrplus/src/bayer_image.cpp b/app/src/main/cpp/hdrplus/src/bayer_image.cpp index 0d84199a..cc29ffb2 100644 --- a/app/src/main/cpp/hdrplus/src/bayer_image.cpp +++ b/app/src/main/cpp/hdrplus/src/bayer_image.cpp @@ -141,6 +141,74 @@ bayer_image::bayer_image( const std::vector& bayer_image_content ) #endif } + bayer_image::bayer_image( std::shared_ptr bayer_image_file ) + { + libraw_processor = std::make_shared(); + + // Open RAW image file + int return_code; + { + std::vector& fileData = bayer_image_file->content; + if ( ( return_code = libraw_processor->open_buffer( (void *)(&fileData[0]), fileData.size() ) ) != LIBRAW_SUCCESS ) + { + libraw_processor->recycle(); +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Error opening file " + bayer_image_path + " " + libraw_strerror( return_code )); +#endif + } + } + + + // Unpack the raw image + if ( ( return_code = libraw_processor->unpack() ) != LIBRAW_SUCCESS ) + { +#ifdef __ANDROID__ + return; +#else + throw std::runtime_error("Error unpack file " + bayer_image_path + " " + libraw_strerror( return_code )); +#endif + } + + // Get image basic info + width = int( libraw_processor->imgdata.rawdata.sizes.raw_width ); + height = int( libraw_processor->imgdata.rawdata.sizes.raw_height ); + + // Read exif tags + Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(&bayer_image_file->content[0], bayer_image_file->content.size()); + assert(image.get() != 0); + image->readMetadata(); + Exiv2::ExifData &exifData = image->exifData(); + if (exifData.empty()) { + std::string error = "No Exif data found in the file"; + std::cout << error << std::endl; + } + + white_level = exifData["Exif.Image.WhiteLevel"].toLong(); + black_level_per_channel.resize( 4 ); + black_level_per_channel.at(0) = exifData["Exif.Image.BlackLevel"].toLong(0); + black_level_per_channel.at(1) = exifData["Exif.Image.BlackLevel"].toLong(1); + black_level_per_channel.at(2) = exifData["Exif.Image.BlackLevel"].toLong(2); + black_level_per_channel.at(3) = exifData["Exif.Image.BlackLevel"].toLong(3); + iso = exifData["Exif.Image.ISOSpeedRatings"].toLong(); + + // Create CV mat + // https://answers.opencv.org/question/105972/de-bayering-a-cr2-image/ + // https://www.libraw.org/node/2141 + raw_image = cv::Mat( height, width, CV_16U, libraw_processor->imgdata.rawdata.raw_image ).clone(); // changed the order of width and height + + // 2x2 box filter + grayscale_image = box_filter_kxk( raw_image ); + +#ifndef NDEBUG + printf("%s::%s read bayer image with\n width %zu\n height %zu\n iso %.3f\n white level %d\n black level %d %d %d %d\n", \ + __FILE__, __func__, width, height, iso, white_level, \ + black_level_per_channel[0], black_level_per_channel[1], black_level_per_channel[2], black_level_per_channel[3] ); + fflush( stdout ); +#endif + } + std::pair bayer_image::get_noise_params() const { // Set ISO to 100 if not positive diff --git a/app/src/main/cpp/hdrplus/src/burst.cpp b/app/src/main/cpp/hdrplus/src/burst.cpp index 08ab324c..c0c83c89 100644 --- a/app/src/main/cpp/hdrplus/src/burst.cpp +++ b/app/src/main/cpp/hdrplus/src/burst.cpp @@ -248,4 +248,74 @@ burst::burst( const std::vector >& bayer_image_contents, in #endif } +burst::burst( const std::vector >& bayer_image_files, int reference_image_index ) +{ + // Number of images + num_images = bayer_image_files.size(); + + // Find reference image path in input directory + // reference image path need to be absolute path + reference_image_idx = -1; + if ( reference_image_index >= 0 && reference_image_index < bayer_image_files.size() ) + { + reference_image_idx = reference_image_index; + } + + if ( reference_image_idx == -1 ) + { + return; + // throw std::runtime_error("Error reference image index is out of range " ); + } + +#ifndef NDEBUG + printf("%s::%s reference image idx %d\n", \ + __FILE__, __func__, reference_image_idx ); +#endif + + // Get source bayer image + // Downsample original bayer image by 2x2 box filter + for ( const auto& bayer_image_file : bayer_image_files ) + { + bayer_images.emplace_back( bayer_image_file ); + } + + // Pad information + int tile_size_bayer = 32; + int padding_top = tile_size_bayer / 2; + int padding_bottom = tile_size_bayer / 2 + \ + ( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \ + 0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer ); + int padding_left = tile_size_bayer / 2; + int padding_right = tile_size_bayer / 2 + \ + ( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \ + 0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer ); + padding_info_bayer = std::vector{ padding_top, padding_bottom, padding_left, padding_right }; + + // Pad bayer image + for ( const auto& bayer_image_i : bayer_images ) + { + cv::Mat bayer_image_pad_i; + cv::copyMakeBorder( bayer_image_i.raw_image, \ + bayer_image_pad_i, \ + padding_top, padding_bottom, padding_left, padding_right, \ + cv::BORDER_REFLECT ); + + // cv::Mat use internal reference count + bayer_images_pad.emplace_back( bayer_image_pad_i ); + grayscale_images_pad.emplace_back( box_filter_kxk( bayer_image_pad_i ) ); + } + +#ifndef NDEBUG + printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \ + __FILE__, __func__, \ + bayer_images[ 0 ].height, \ + bayer_images[ 0 ].width, \ + bayer_images_pad[ 0 ].size().height, \ + bayer_images_pad[ 0 ].size().width ); + printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \ + __FILE__, __func__, \ + padding_top, padding_bottom, padding_left, padding_right ); +#endif +} + } // namespace hdrplus diff --git a/app/src/main/cpp/hdrplus/src/hdrplus_pipeline.cpp b/app/src/main/cpp/hdrplus/src/hdrplus_pipeline.cpp index 90307a8b..5edc0b75 100644 --- a/app/src/main/cpp/hdrplus/src/hdrplus_pipeline.cpp +++ b/app/src/main/cpp/hdrplus/src/hdrplus_pipeline.cpp @@ -102,4 +102,37 @@ bool hdrplus_pipeline::run_pipeline( \ return true; } + + bool hdrplus_pipeline::run_pipeline( \ + const std::vector >& burst_files, \ + int reference_image_index, cv::Mat& finalImg ) + { + // Create burst of images + burst burst_images( burst_files, reference_image_index ); + std::vector>>> alignments; +#ifdef __ANDROID__ + ALOGI("Finish loading images"); +#endif + + // Run align + align_module.process( burst_images, alignments ); +#ifdef __ANDROID__ + ALOGI("Finish align"); +#endif + + // Run merging + merge_module.process( burst_images, alignments ); +#ifdef __ANDROID__ + ALOGI("Finish merging"); +#endif + + // Run finishing + finish_module.process( burst_images, finalImg); +#ifdef __ANDROID__ + ALOGI("Finish process"); +#endif + + return true; + } + } // namespace hdrplus