diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp index c7dc9866..eccf6c69 100644 --- a/app/src/main/cpp/PhoneDevice.cpp +++ b/app/src/main/cpp/PhoneDevice.cpp @@ -47,6 +47,67 @@ namespace fs = std::filesystem; extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread); + +bool makeHdr(vector& times, std::vector& paths, cv::Mat& rgb) +{ + // Read images and exposure times + vector images; + + for (auto it = paths.cbegin(); it != paths.cend(); ++it) + { + cv::Mat im = cv::imread((*it).c_str()); + images.push_back(im); + } + // Align input images + // cout << "Aligning images ... " << endl; + cv::Ptr alignMTB = cv::createAlignMTB(); +#if 0 + alignMTB->process(images, images); +#endif + + // Obtain Camera Response Function (CRF) + // cout << "Calculating Camera Response Function (CRF) ... " << endl; + cv::Mat responseDebevec; + cv::Ptr calibrateDebevec = cv::createCalibrateDebevec(); + calibrateDebevec->process(images, responseDebevec, times); + + // Merge images into an HDR linear image + // cout << "Merging images into one HDR image ... "; + cv::Mat hdrDebevec; + cv::Ptr mergeDebevec = cv::createMergeDebevec(); + mergeDebevec->process(images, hdrDebevec, times, responseDebevec); + // Save HDR image. + // imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec); + // cout << "saved hdrDebevec.hdr " << endl; + + { + std::vector empty; + empty.swap(images); + } + + // Tonemap using Reinhard's method to obtain 24-bit color image + // cout << "Tonemaping using Reinhard's method ... "; + cv::Mat ldrReinhard; + cv::Ptr tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0); + tonemapReinhard->process(hdrDebevec, ldrReinhard); + hdrDebevec.release(); + + int type = ldrReinhard.type(); + ldrReinhard = ldrReinhard * 255; + + ldrReinhard.convertTo(rgb, CV_8U); + ldrReinhard.release(); + + return true; +} + +bool AndroidBitmap_CompressWriteFile(void *userContext, const void *data, size_t size) +{ + FILE* file = (FILE*)userContext; + int bytesWritten = fwrite(data, 1, size, file); + return bytesWritten == size; +} + #define WAKELOCK_NAME "NDK_WK_" // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their // ranges @@ -478,6 +539,8 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa mSetStaticIpMid = env->GetMethodID(classService, "setStaticNetwork", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); + mConvertDngToPngMid = env->GetMethodID(classService, "convertDngToPng", "(Ljava/lang/String;Ljava/lang/String;)V"); + mCallSysCameraMid = env->GetMethodID(classService, "callSystemCamera", "(IJ)V"); env->DeleteLocalRef(classService); @@ -784,7 +847,7 @@ int CPhoneDevice::QueryBatteryVoltage(int retries) for (int idx = 0; idx < retries; idx++) { val = GpioControl::getBatteryBusVoltage(); // // BatVol - if (val >= 0) + if (val >= 0) { break; } @@ -1508,6 +1571,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< params.requestTemplate = mPhotoInfo.requestTemplate; params.awbMode = mPhotoInfo.awbMode; params.wait3ALocked = mPhotoInfo.wait3ALocked; + params.customHdr = mPhotoInfo.customHdr; params.burstRawCapture = mPhotoInfo.usingRawFormat; params.burstCaptures = mPhotoInfo.burstCaptures; if (params.requestTemplate <= 0 || params.requestTemplate > 5) @@ -2340,35 +2404,105 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi std::vector > localFrames; localFrames.swap(pByteArrays.get()->byteArrays); - - std::string outputPath = tmpDir + "output.bmp"; - size_t numberOfFrames = localFrames.size(); - std::vector imagePaths; - for (int idx = 0; idx < localFrames.size(); idx++) + + if (photoInfo.customHdr) { - std::string imagePath = tmpDir + std::to_string(idx) + ".dng"; - std::vector& frame = localFrames[idx]; - if (writeFile(imagePath, &frame[0], frame.size())) + std::vector imagePaths; + std::vector exposureTimes; + + for (int idx = 0; idx < localFrames.size(); idx++) { - imagePaths.push_back(imagePath); + ACameraMetadata_const_entry val = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(results[idx].get(), ACAMERA_SENSOR_EXPOSURE_TIME, &val); + int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1; + + exposureTimes.push_back(exTime / 1000000000.0); + + std::string imagePath = tmpDir + std::to_string(idx) + ".dng"; + std::vector& frame = localFrames[idx]; + if (writeFile(imagePath, &frame[0], frame.size())) + { + std::vector empty; + empty.swap(frame); + } + + string pngPath = imagePath + ".png"; + + pThis->ConvertDngToPng(imagePath, pngPath); + imagePaths.push_back(pngPath); + +#if 0 + AImageDecoder* imageDecoder = NULL; + AImageDecoder_createFromBuffer(&frame[0], frame.size(), &imageDecoder); + + const AImageDecoderHeaderInfo* info = AImageDecoder_getHeaderInfo(imageDecoder); + AndroidBitmapInfo bmpInfo = { 0 }; + bmpInfo.flags = AImageDecoderHeaderInfo_getAlphaFlags(info); + bmpInfo.width = AImageDecoderHeaderInfo_getWidth(info); + bmpInfo.height = AImageDecoderHeaderInfo_getHeight(info); + bmpInfo.format = (AndroidBitmapFormat) AImageDecoderHeaderInfo_getAndroidBitmapFormat(info); + bmpInfo.stride = AImageDecoder_getMinimumStride(imageDecoder); // Image decoder does not + // use padding by default + int32_t fmt = ANDROID_BITMAP_FORMAT_RGBA_8888; + size_t stride = photoInfo.width * 4; + size_t size = stride * photoInfo.height; + + int32_t dataSpace = AImageDecoderHeaderInfo_getDataSpace(info); + + frame.resize(size); + + int result = AImageDecoder_decodeImage(imageDecoder, (void *)(&frame[0]), bmpInfo.stride, size); + AImageDecoder_delete(imageDecoder); + + if (result != ANDROID_IMAGE_DECODER_SUCCESS) + { + imagePath += ".png"; + FILE* file = fopen(imagePath.c_str(), "wb"); + AndroidBitmap_compress(&bmpInfo, dataSpace, &frame[0], ANDROID_BITMAP_COMPRESS_FORMAT_PNG, 100, file, AndroidBitmap_CompressWriteFile); + fclose(file); + std::vector empty; + empty.swap(frame); + + imagePaths.push_back(imagePath); + } +#endif } - } - localFrames.clear(); + localFrames.clear(); - int exitCode = pThis->CallExecv(photoInfo.orientation, facing == ACAMERA_LENS_FACING_FRONT ? 1 : 0, outputPath, imagePaths); - for (auto it = imagePaths.cbegin(); it != imagePaths.cend(); ++it) - { - std::remove((*it).c_str()); + makeHdr(exposureTimes, imagePaths, rgb); } - - if (existsFile(outputPath)) + else { - rgb = cv::imread(outputPath); - std::remove(outputPath.c_str()); + std::string outputPath = tmpDir + "output.bmp"; + size_t numberOfFrames = localFrames.size(); + std::vector imagePaths; + for (int idx = 0; idx < localFrames.size(); idx++) + { + std::string imagePath = tmpDir + std::to_string(idx) + ".dng"; + std::vector& frame = localFrames[idx]; + if (writeFile(imagePath, &frame[0], frame.size())) + { + imagePaths.push_back(imagePath); + } + } + localFrames.clear(); + + int exitCode = pThis->CallExecv(photoInfo.orientation, facing == ACAMERA_LENS_FACING_FRONT ? 1 : 0, outputPath, imagePaths); + for (auto it = imagePaths.cbegin(); it != imagePaths.cend(); ++it) + { + std::remove((*it).c_str()); + } + + if (existsFile(outputPath)) + { + rgb = cv::imread(outputPath); + std::remove(outputPath.c_str()); + } + + std::error_code errCode; + fs::remove_all(fs::path(tmpDir), errCode); } - std::error_code errCode; - fs::remove_all(fs::path(tmpDir), errCode); } #else // USING_EXEC_HDRP XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); @@ -3527,6 +3661,27 @@ void CPhoneDevice::SetStaticIp(const std::string& iface, const std::string& ip, } } +void CPhoneDevice::ConvertDngToPng(const std::string& dngPath, const std::string& pngPath) +{ + JNIEnv* env = NULL; + jboolean ret = JNI_FALSE; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + + jstring jdngPath = env->NewStringUTF(dngPath.c_str()); + jstring jpngPath = env->NewStringUTF(pngPath.c_str()); + env->CallVoidMethod(m_javaService, mConvertDngToPngMid, jdngPath, jpngPath); + + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } +} + int CPhoneDevice::GetIceData(IDevice::ICE_INFO *iceInfo, IDevice::ICE_TAIL *iceTail, SENSOR_PARAM *sensorParam) { m_tempData.instantaneous_windspeed = 0xff; diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h index ecd72cbf..ad685b6a 100644 --- a/app/src/main/cpp/PhoneDevice.h +++ b/app/src/main/cpp/PhoneDevice.h @@ -332,6 +332,7 @@ protected: int CallExecv(int rotation, int frontCamera, const std::string& outputPath, const std::vector& images); void SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway); + void ConvertDngToPng(const std::string& dngPath, const std::string& pngPath); void SetStaticIp(); protected: @@ -364,6 +365,8 @@ protected: jmethodID mExecHdrplusMid; jmethodID mSetStaticIpMid; + jmethodID mConvertDngToPngMid; + jmethodID mCallSysCameraMid; std::string mPath; diff --git a/app/src/main/cpp/camera2/ndkcamera.cpp b/app/src/main/cpp/camera2/ndkcamera.cpp index 1a9e2302..f63243eb 100644 --- a/app/src/main/cpp/camera2/ndkcamera.cpp +++ b/app/src/main/cpp/camera2/ndkcamera.cpp @@ -1878,6 +1878,33 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque requests.push_back(request->request); } + if (m_params.customHdr) + { + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val); + int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1; + + val = {0}; + status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); + int sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0; + + uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF; + + XYLOG(XYLOG_SEVERITY_INFO, "HDR: Base Exp=%lld ISO=%d", exTime / 1000, sensitivity); + + if (exTime != -1 && sensitivity > 0) + { + for (int idx = 0; idx < burstCaptures; idx++) + { + ACaptureRequest_setEntry_u8(requests[idx], ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff); + int64_t expt = exTime * (idx + 1); + ACaptureRequest_setEntry_i64(requests[idx], ACAMERA_SENSOR_EXPOSURE_TIME, 1, &expt); + ACaptureRequest_setEntry_i32(requests[idx], ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); + sensitivity = sensitivityRange.min_; + } + } + } + // ALOGW("Will Stop Repeating Request"); // status = ACameraCaptureSession_stopRepeating(capture_session); // ALOGW("Finished Repeating Request"); diff --git a/app/src/main/cpp/camera2/ndkcamera.h b/app/src/main/cpp/camera2/ndkcamera.h index 6c17096c..9c2489cb 100644 --- a/app/src/main/cpp/camera2/ndkcamera.h +++ b/app/src/main/cpp/camera2/ndkcamera.h @@ -82,7 +82,8 @@ public: unsigned int zoom : 1; unsigned int wait3ALocked : 3; unsigned int burstRawCapture : 2; - unsigned int reserved : 16; + unsigned int customHdr : 1; + unsigned int reserved : 15; int64_t exposureTime; unsigned int sensitivity; int compensation; diff --git a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java index 459c915c..7de2ceaf 100644 --- a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java +++ b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java @@ -62,6 +62,7 @@ import android.widget.Toast; import com.dev.devapi.api.SysApi; import com.xypower.common.FileDownloader; +import com.xypower.common.FilesUtils; import com.xypower.common.NetworkUtils; import com.xypower.common.MicroPhotoContext; import com.xypower.mpapp.adb.CameraAdb; @@ -177,10 +178,29 @@ public class MicroPhotoService extends Service { public MicroPhotoService() { } + public void convertDngToPng(String dngFile, String pngFile) { + ImageDecoder.Source src = ImageDecoder.createSource(new File(dngFile)); + Bitmap bmp = null; + FileOutputStream output = null; + + try { + bmp = ImageDecoder.decodeBitmap(src); + output = new FileOutputStream(new File(pngFile)); + bmp.compress(Bitmap.CompressFormat.JPEG, 95, output); + } catch (Exception ex) { + ex.printStackTrace(); + } finally { + FilesUtils.closeFriendly(output); + if (bmp != null) { + bmp.recycle(); + } + } + } + @Override public void onTrimMemory(int level) { Log.w(TAG, "onTrimMemory level=" + level); - if (level >= ComponentCallbacks2.TRIM_MEMORY_RUNNING_CRITICAL) { + if (level >= ComponentCallbacks2.TRIM_MEMORY_BACKGROUND) { // Clear the caches. Note all pending requests will be removed too. final Context context = getApplicationContext(); try { @@ -188,7 +208,7 @@ public class MicroPhotoService extends Service { mHander.postDelayed(new Runnable() { @Override public void run() { - restartApp(context, MicroPhotoContext.PACKAGE_NAME_MPAPP, "TrimMemory"); + // restartApp(context, MicroPhotoContext.PACKAGE_NAME_MPAPP, "TrimMemory"); } }, 1000); } catch (Exception ex) { diff --git a/gradle.properties b/gradle.properties index 46b24ad1..717f7325 100644 --- a/gradle.properties +++ b/gradle.properties @@ -20,10 +20,13 @@ android.enableJetifier=true BUILD_TOOLS_VERSION=33.0.3 COMPILE_SDK_VERSION=33 -TARGET_SDK_VERSION=28 -COMPILE_MIN_SDK_VERSION=28 +TARGET_SDK_VERSION=30 +COMPILE_MIN_SDK_VERSION=30 -opencvsdk=D:/Workspace/deps/opencv-mobile-4.9.0-android +TARGET_SDK_VERSION_N938=28 +COMPILE_MIN_SDK_VERSION_N938=28 + +opencvsdk=D:/Workspace/deps/opencv-mobile-4.10.0-android coreroot=D:/Workspace/Github/xymp/xymp/Core hdrplusroot=D:/Workspace/deps/hdrplus_libs halideroot=D:/Workspace/deps/Halide/18.0.0