实现另外一种hdrplus

TempBranch
Matthew 8 months ago
parent 5213b007d6
commit 78b00b42e9

@ -39,7 +39,7 @@ android {
cppFlags '-std=c++17 -fexceptions -Wno-error=format-security -fopenmp'
// cppFlags '-std=c++17 -Wno-error=format-security'
// arguments "-DANDROID_STL=c++_shared"
arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DHDRPLUS_ROOT=" + hdrplusroot, "-DNCNN_ROOT=" + ncnnroot
arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DHDRPLUS_ROOT=" + hdrplusroot, "-DNCNN_ROOT=" + ncnnroot, "-DHALIDE_ROOT=" + halideroot
abiFilters 'arm64-v8a', 'armeabi-v7a'
// setAbiFilters(['arm64-v8a'])
}

@ -122,10 +122,17 @@ find_package(OpenMP REQUIRED)
include_directories( ${CMAKE_CURRENT_SOURCE_DIR}/hdrplus/include )
# include_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/hdrplus2)
include_directories(hdrplus2/${ANDROID_ABI})
include_directories(${HALIDE_ROOT}/${ANDROID_ABI}/include)
SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
SET(HDRPLUS_SOURCES
SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
SET(HDRPLUS_SOURCES
hdrplus/src/align.cpp
hdrplus/src/bayer_image.cpp
hdrplus/src/burst.cpp
@ -133,9 +140,15 @@ SET(HDRPLUS_SOURCES
hdrplus/src/hdrplus_pipeline.cpp
hdrplus/src/merge.cpp
hdrplus/src/params.cpp
)
SET(HDRPLUS2_SOURCES
hdrplus2/src/HDRPlus.cpp
hdrplus2/src/Burst.cpp
hdrplus2/src/InputSource.cpp
hdrplus2/src/LibRaw2DngConverter.cpp
hdrplus2/${ANDROID_ABI}/hdrplus_pipeline.registration.cpp)
SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
# SET(TERM_CORE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../../../xymp/Core)
@ -338,10 +351,12 @@ target_link_libraries( libhdrp.so PUBLIC -fopenmp -static-openmp
${HDRPLUS_LIBS}
)
else(USING_EXEC_HDRP)
SET(HDRPLUS_SOURCES_EMBED ${HDRPLUS_SOURCES} )
SET(HDRPLUS_LIBS_EMBED ${HDRPLUS_LIBS} )
endif()
SET(HDRPLUS_SOURCES_EMBED ${HDRPLUS2_SOURCES} )
SET(HDRPLUS_LIBS_EMBED ${HDRPLUS2_LIBS} )
add_library( # Sets the name of the library.
microphoto

@ -27,6 +27,7 @@
#ifdef USING_HDRPLUS
#include <hdrplus/hdrplus_pipeline.h>
#include <hdrplus2/include/HDRPlus.h>
#endif
#include <fcntl.h>
@ -1866,6 +1867,59 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
#ifdef USING_EXEC_HDRP
if (photoInfo.usingNewHdrplus)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
std::vector<std::vector<uint8_t> > localFrames;
localFrames.swap(pByteArrays.get()->byteArrays);
doHdrPlus(localFrames, rgb);
localFrames.clear();
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
{
cv::Mat tempPic = convert16bit2_8bit_(rgb);
rgb = tempPic;
}
if (photoInfo.orientation > 0)
{
if (photoInfo.orientation == 1)
{
if (facing == ACAMERA_LENS_FACING_FRONT)
{
cv::flip(rgb, rgb, 1);
}
}
else if (photoInfo.orientation == 2)
{
cv::Mat tempPic;
cv::transpose(rgb, tempPic);
cv::flip(tempPic, rgb, 1);
}
else if (photoInfo.orientation == 3)
{
if (facing == ACAMERA_LENS_FACING_FRONT)
{
flip(rgb, rgb, 0);
}
else
{
cv::flip(rgb, rgb, -1);
}
}
else if ((photoInfo.orientation % 4) == 0)
{
cv::Mat tempPic;
cv::transpose(rgb, tempPic);
cv::flip(tempPic, rgb, 0);
}
XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
}
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
}
else
{
uint64_t uniqueId = pThis->m_uniqueIdFeed.fetch_add(1);

@ -110,6 +110,48 @@ void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureR
((NdkCamera*)context)->onCaptureCompleted(session, request, result);
}
inline uint8_t GetCaptureIntent(ACameraDevice_request_template templateId)
{
/*
ACAMERA_CONTROL_CAPTURE_INTENT_CUSTOM = 0,
ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW = 1,
ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE = 2,
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD = 3,
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT = 4,
ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG = 5,
ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL = 6,
ACAMERA_CONTROL_CAPTURE_INTENT_MOTION_TRACKING = 7,
*/
uint8_t captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
switch (templateId)
{
case TEMPLATE_PREVIEW: // = 1,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW;
break;
case TEMPLATE_STILL_CAPTURE: // = 2,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
case TEMPLATE_RECORD: // = 3,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
break;
case TEMPLATE_VIDEO_SNAPSHOT: // = 4,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
break;
case TEMPLATE_ZERO_SHUTTER_LAG: // = 5,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
break;
case TEMPLATE_MANUAL: // = 6,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL;
break;
default:
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
}
return captureIntent;
}
NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params)
{
camera_facing = 0;
@ -265,6 +307,8 @@ int NdkCamera::open(const std::string& cameraId) {
DisplayDimension disp(mWidth, mHeight);
DisplayDimension foundRes = disp;
camera_status_t status = ACAMERA_OK;
int32_t previewWidth = 0;
int32_t previewHeight = 0;
ALOGD("Start ACameraManager_getCameraIdList");
{
@ -322,24 +366,52 @@ int NdkCamera::open(const std::string& cameraId) {
if (input) continue;
int32_t format = e.data.i32[i + 0];
if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/)
if (format == AIMAGE_FORMAT_RAW16)
{
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
// XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height());
if (!disp.IsSameRatio(res))
if (mFinalOutputFormat == AIMAGE_FORMAT_RAW16)
{
if (res.width() >= mWidth && res.height() >= mHeight)
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
if (!disp.IsSameRatio(res))
{
if (res.width() >= mWidth && res.height() >= mHeight)
{
temp = res;
}
continue;
}
if (res > disp)
{
temp = res;
foundIt = true;
foundRes = res;
}
continue;
}
}
else if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/)
{
if (previewWidth == 0 || previewHeight == 0)
{
previewWidth = e.data.i32[i + 1];
previewHeight = e.data.i32[i + 2];
}
if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp)
if (mFinalOutputFormat == AIMAGE_FORMAT_YUV_420_888)
{
foundIt = true;
foundRes = res;
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
// XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height());
if (!disp.IsSameRatio(res))
{
if (res.width() >= mWidth && res.height() >= mHeight)
{
temp = res;
}
continue;
}
if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp)
{
foundIt = true;
foundRes = res;
}
}
}
}
@ -598,7 +670,8 @@ int NdkCamera::open(const std::string& cameraId) {
}
// setup imagereader and its surface
media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, 4, &mPreviewImageReader);
media_status_t mstatus = AImageReader_new(previewWidth, previewHeight, AIMAGE_FORMAT_YUV_420_888, 4, &mPreviewImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new preview, status=%d", status);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
@ -614,6 +687,7 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 1, &mImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new, status=%d", status);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
@ -624,8 +698,11 @@ int NdkCamera::open(const std::string& cameraId) {
ANativeWindow_acquire(mImageWindow);
}
status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraOutputTarget_create, status=%d", status);
status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput);
AASSERT(status == ACAMERA_OK, "Failed to call ACaptureSessionOutput_create, status=%d", status);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput);
@ -672,7 +749,7 @@ int NdkCamera::open(const std::string& cameraId) {
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : GetCaptureIntent(ACameraDevice_request_template)m_params.requestTemplate);
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
@ -853,6 +930,7 @@ int NdkCamera::open(const std::string& cameraId) {
camera_capture_session_state_callbacks.onReady = ::onSessionReady;
camera_capture_session_state_callbacks.onClosed = onSessionClosed;
status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraDevice_createCaptureSession, status=%d", status);
ACameraCaptureSession_captureCallbacks capture_session_capture_callbacks;
capture_session_capture_callbacks.context = this;
@ -865,7 +943,7 @@ int NdkCamera::open(const std::string& cameraId) {
capture_session_capture_callbacks.onCaptureBufferLost = 0;
status = ACameraCaptureSession_setRepeatingRequest(capture_session, &capture_session_capture_callbacks, 1, &(mCaptureRequests[PREVIEW_REQUEST_IDX]->request), &(mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId));
AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_setRepeatingRequest, status=%d", status);
ALOGW("Preview Request: seqId=%d", mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId);
m_startTime = GetMicroTimeStamp();
@ -894,13 +972,14 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
// capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraDevice_createCaptureRequest, status=%d", status);
ACaptureRequest_setUserContext(request->request, request);
// uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO;
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : GetCaptureIntent((ACameraDevice_request_template)m_params.requestTemplate);
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
@ -1068,6 +1147,7 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
}
status = ACaptureRequest_addTarget(request->request, request->imageTarget);
AASSERT(status == ACAMERA_OK, "Failed to call ACaptureRequest_addTarget, status=%d", status);
// status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput);
// status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput);
@ -1665,6 +1745,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
//XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState);
AASSERT(status == ACAMERA_OK, "Failed to call PRECAPTURE_TRIGGER, status=%d", status);
readyForCapture = false;
numberOfPrecaptures = 0;
@ -1775,7 +1856,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
}
// ALOGW("Will Stop Repeating Request");
status = ACameraCaptureSession_stopRepeating(capture_session);
// status = ACameraCaptureSession_stopRepeating(capture_session);
// ALOGW("Finished Repeating Request");
ACameraCaptureSession_captureCallbacks capture_session_capture_cb;
@ -1791,6 +1872,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
int numberOfRequests = requests.size();
status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb,
numberOfRequests, &requests[0], &sequenceId);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_capture, status=%d", status);
ALOGW("Capture num = %d sequenceId=%d", numberOfRequests, sequenceId);
for (int idx = 1; idx < mCaptureRequests.size(); idx++)
@ -1814,9 +1896,9 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
bool captureCompleted = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
if (expectedTimes == 1)
{
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = !mOneFrame.empty();
@ -1830,7 +1912,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
else
{
m_locker.lock();
mCaptureResults.push_back(std::shared_ptr<ACameraMetadata>(pCopy, ACameraMetadata_free));
mCaptureResults.push_back(captureResult);
captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes;
m_locker.unlock();
@ -1930,9 +2012,13 @@ void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetada
void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
{
XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason);
XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d PhotoTaken=%d", session, request, failure->reason, m_photoTaken ? 1 : 0);
char msg[32] = { 0 };
if (failure->sequenceId == mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId)
{
return;
}
char msg[64] = { 0 };
snprintf(msg, sizeof(msg), "CaptureFailed reason=%d PhotoTaken=%d", failure->reason, m_photoTaken ? 1 : 0);
if (!m_photoTaken)
{

File diff suppressed because it is too large Load Diff

@ -0,0 +1,36 @@
// MACHINE GENERATED -- DO NOT EDIT
extern "C" {
struct halide_filter_metadata_t;
void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char * const *extra_key_value_pairs
);
}
extern "C" {
extern int hdrplus_pipeline_argv(void **args);
extern const struct halide_filter_metadata_t *hdrplus_pipeline_metadata();
}
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
extern "C" const char * const *HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC();
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
namespace halide_nsreg_hdrplus_pipeline {
namespace {
struct Registerer {
Registerer() {
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC());
#else
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), nullptr);
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
}
};
static Registerer registerer;
} // namespace
} // halide_nsreg_hdrplus_pipeline

File diff suppressed because it is too large Load Diff

@ -0,0 +1,36 @@
// MACHINE GENERATED -- DO NOT EDIT
extern "C" {
struct halide_filter_metadata_t;
void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char * const *extra_key_value_pairs
);
}
extern "C" {
extern int hdrplus_pipeline_argv(void **args);
extern const struct halide_filter_metadata_t *hdrplus_pipeline_metadata();
}
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
extern "C" const char * const *HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC();
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
namespace halide_nsreg_hdrplus_pipeline {
namespace {
struct Registerer {
Registerer() {
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC());
#else
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), nullptr);
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
}
};
static Registerer registerer;
} // namespace
} // halide_nsreg_hdrplus_pipeline

@ -0,0 +1,13 @@
#ifndef __HDRPLUS__
#define __HDRPLUS__
#include <string>
#include <vector>
#include <opencv2/opencv.hpp> // all opencv header
int doHdrPlus(const std::string& dir_path, const std::string& out_name, const std::vector<std::string>& in_names);
bool doHdrPlus(const std::vector< std::vector<uint8_t> >& images, cv::Mat& mat);
#endif // __HDRPLUS__

@ -0,0 +1,39 @@
#include "Burst.h"
Halide::Runtime::Buffer<uint16_t> Burst::ToBuffer() const {
if (Raws.empty()) {
return Halide::Runtime::Buffer<uint16_t>();
}
Halide::Runtime::Buffer<uint16_t> result(GetWidth(), GetHeight(),
Raws.size());
for (int i = 0; i < Raws.size(); ++i) {
auto resultSlice = result.sliced(2, i);
Raws[i].CopyToBuffer(resultSlice);
}
return result;
}
void Burst::CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const {
buffer.copy_from(ToBuffer());
}
std::vector<RawImage> Burst::LoadRaws(const std::vector< std::vector<uint8_t> >& images) {
std::vector<RawImage> result;
for (const auto &img : images) {
result.emplace_back(&img[0], img.size());
}
return result;
}
std::vector<RawImage> Burst::LoadRaws(const std::string &dirPath,
std::vector<std::string> &inputs) {
std::vector<RawImage> result;
for (const auto &input : inputs) {
const std::string img_path = dirPath + "/" + input;
result.emplace_back(img_path);
}
return result;
}
const RawImage &Burst::GetRaw(const size_t i) const { return this->Raws[i]; }

@ -0,0 +1,76 @@
#pragma once
#include "InputSource.h"
#include <hdrplus_pipeline.h>
#include <string>
#include <vector>
class Burst {
public:
Burst(std::string dir_path, std::vector<std::string> inputs)
: Dir(std::move(dir_path)), Inputs(std::move(inputs)),
Raws(LoadRaws(Dir, Inputs))
{
}
Burst(const std::vector< std::vector<uint8_t> >& images)
: Raws(LoadRaws(images))
{
}
~Burst() = default;
Burst(const Burst& src)
{
this->Dir = src.Dir;
this->Inputs = src.Inputs;
this->Raws = src.Raws;
int aa = 0;
}
int GetWidth() const { return Raws.empty() ? -1 : Raws[0].GetWidth(); }
int GetHeight() const { return Raws.empty() ? -1 : Raws[0].GetHeight(); }
int GetBlackLevel() const
{
return Raws.empty() ? -1 : Raws[0].GetScalarBlackLevel();
}
int GetWhiteLevel() const {
return Raws.empty() ? -1 : Raws[0].GetWhiteLevel();
}
WhiteBalance GetWhiteBalance() const {
return Raws.empty() ? WhiteBalance{-1, -1, -1, -1}
: Raws[0].GetWhiteBalance();
}
CfaPattern GetCfaPattern() const {
return Raws.empty() ? CfaPattern::CFA_UNKNOWN : Raws[0].GetCfaPattern();
}
Halide::Runtime::Buffer<float> GetColorCorrectionMatrix() const {
return Raws.empty() ? Halide::Runtime::Buffer<float>()
: Raws[0].GetColorCorrectionMatrix();
}
Halide::Runtime::Buffer<uint16_t> ToBuffer() const;
void CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const;
const RawImage &GetRaw(const size_t i) const;
private:
std::string Dir;
std::vector<std::string> Inputs;
std::vector<RawImage> Raws;
private:
static std::vector<RawImage> LoadRaws(const std::string &dirPath,
std::vector<std::string> &inputs);
static std::vector<RawImage> LoadRaws(const std::vector< std::vector<uint8_t> >& images);
};

@ -0,0 +1,137 @@
#include <fstream>
#include <iostream>
#include <stdio.h>
// #define STB_IMAGE_WRITE_IMPLEMENTATION
// #include <include/stb_image_write.h>
#include <hdrplus_pipeline.h>
#include "Burst.h"
#include <include/HDRPlus.h>
extern "C" void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char *const *extra_key_value_pairs) {
}
/*
* HDRPlus Class -- Houses file I/O, defines pipeline attributes and calls
* processes main stages of the pipeline.
*/
class HDRPlus {
const Burst &burst;
public:
const Compression c;
const Gain g;
HDRPlus(Burst& burst, const Compression c, const Gain g)
: burst(burst), c(c), g(g)
{
}
Halide::Runtime::Buffer<uint8_t> process() {
const int width = burst.GetWidth();
const int height = burst.GetHeight();
Halide::Runtime::Buffer<uint8_t> output_img(3, width, height);
std::cerr << "Black point: " << burst.GetBlackLevel() << std::endl;
std::cerr << "White point: " << burst.GetWhiteLevel() << std::endl;
const WhiteBalance wb = burst.GetWhiteBalance();
std::cerr << "RGGB: " << wb.r << " " << wb.g0 << " " << wb.g1 << " " << wb.b
<< std::endl;
Halide::Runtime::Buffer<uint16_t> imgs = burst.ToBuffer();
if (imgs.dimensions() != 3 || imgs.extent(2) < 2) {
throw std::invalid_argument(
"The input of HDRPlus must be a 3-dimensional buffer with at least "
"two channels.");
}
const int cfa_pattern = static_cast<int>(burst.GetCfaPattern());
auto ccm = burst.GetColorCorrectionMatrix();
hdrplus_pipeline(imgs, burst.GetBlackLevel(), burst.GetWhiteLevel(), wb.r,
wb.g0, wb.g1, wb.b, cfa_pattern, ccm, c, g, output_img);
// transpose to account for interleaved layout
output_img.transpose(0, 1);
output_img.transpose(1, 2);
return output_img;
}
#if 0
static bool save_png(const std::string &dir_path, const std::string &img_name,
const Halide::Runtime::Buffer<uint8_t> &img) {
const std::string img_path = dir_path + "/" + img_name;
const int stride_in_bytes = img.width() * img.channels();
if (!stbi_write_png(img_path.c_str(), img.width(), img.height(),
img.channels(), img.data(), stride_in_bytes)) {
std::cerr << "Unable to write output image '" << img_name << "'"
<< std::endl;
return false;
}
return true;
}
#endif
};
bool doHdrPlus(const std::vector< std::vector<uint8_t> >& images, cv::Mat& mat)
{
Compression c = 3.8f;
Gain g = 1.1f;
Burst burst(images);
HDRPlus hdr_plus(burst, c, g);
Halide::Runtime::Buffer<uint8_t> outputHdr = hdr_plus.process();
int width = outputHdr.width();
int height = outputHdr.height();
int channels = outputHdr.channels();
int jch = 0;
mat = cv::Mat::zeros(height, width, CV_8UC3);
for (int i = 0; i < height; ++i)
{
jch = 0;
for (int j = 0; j < width; ++j)
{
for (int n = 0; n < channels; ++n)
{
mat.at<uchar>(i, jch + n) = (uchar)outputHdr(j, i, n);
}
jch += channels;
}
}
// if (!HDRPlus::save_png(dir_path, out_name, output)) {
return true;
}
#if 0
int doHdrPlus(const std::string& dir_path, const std::string& out_name, const std::vector<std::string>& in_names) {
Compression c = 3.8f;
Gain g = 1.1f;
Burst burst(dir_path, in_names);
HDRPlus hdr_plus(burst, c, g);
Halide::Runtime::Buffer<uint8_t> output = hdr_plus.process();
if (!HDRPlus::save_png(dir_path, out_name, output)) {
return EXIT_FAILURE;
}
return 0;
}
#endif

@ -0,0 +1,152 @@
#include "InputSource.h"
#include <algorithm>
#include <unordered_map>
#include "LibRaw2DngConverter.h"
RawImage::RawImage(const std::string &path)
: Path(path), RawProcessor(std::make_shared<LibRaw>()) {
// TODO: Check LibRaw parametres.
// RawProcessor->imgdata.params.X = Y;
std::cerr << "Opening " << path << std::endl;
if (int err = RawProcessor->open_file(path.c_str())) {
std::cerr << "Cannot open file " << path
<< " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int err = RawProcessor->unpack()) {
std::cerr << "Cannot unpack file " << path
<< " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int ret = RawProcessor->raw2image()) {
std::cerr << "Cannot do raw2image on " << path
<< " error: " << libraw_strerror(ret) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
}
RawImage::RawImage(const uint8_t* data, size_t length)
: RawProcessor(std::make_shared<LibRaw>())
{
std::cerr << "Opening raw from memory" << std::endl;
if (int err = RawProcessor->open_buffer((void *)data, length)) {
std::cerr << "Cannot open raw from memory" << " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening raw");
#endif
}
if (int err = RawProcessor->unpack()) {
std::cerr << "Cannot unpack raw from memory " << " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int ret = RawProcessor->raw2image()) {
std::cerr << "Cannot do raw2image" << " error: " << libraw_strerror(ret) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
}
WhiteBalance RawImage::GetWhiteBalance() const {
const auto coeffs = RawProcessor->imgdata.color.cam_mul;
// Scale multipliers to green channel
const float r = coeffs[0] / coeffs[1];
const float g0 = 1.f; // same as coeffs[1] / coeffs[1];
const float g1 = 1.f;
const float b = coeffs[2] / coeffs[1];
return WhiteBalance{r, g0, g1, b};
}
void RawImage::CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const {
const auto image_data = (uint16_t *)RawProcessor->imgdata.rawdata.raw_image;
const auto raw_width = RawProcessor->imgdata.rawdata.sizes.raw_width;
const auto raw_height = RawProcessor->imgdata.rawdata.sizes.raw_height;
const auto top = RawProcessor->imgdata.rawdata.sizes.top_margin;
const auto left = RawProcessor->imgdata.rawdata.sizes.left_margin;
Halide::Runtime::Buffer<uint16_t> raw_buffer(image_data, raw_width,
raw_height);
buffer.copy_from(raw_buffer.translated({-left, -top}));
}
void RawImage::WriteDng(const std::string &output_path,
const Halide::Runtime::Buffer<uint16_t> &buffer) const {
LibRaw2DngConverter converter(*this);
converter.SetBuffer(buffer);
converter.Write(output_path);
}
std::array<float, 4> RawImage::GetBlackLevel() const {
// See https://www.libraw.org/node/2471
const auto raw_color = RawProcessor->imgdata.color;
const auto base_black_level = static_cast<float>(raw_color.black);
std::array<float, 4> black_level = {
base_black_level + static_cast<float>(raw_color.cblack[0]),
base_black_level + static_cast<float>(raw_color.cblack[1]),
base_black_level + static_cast<float>(raw_color.cblack[2]),
base_black_level + static_cast<float>(raw_color.cblack[3])};
if (raw_color.cblack[4] == 2 && raw_color.cblack[5] == 2) {
for (int x = 0; x < raw_color.cblack[4]; ++x) {
for (int y = 0; y < raw_color.cblack[5]; ++y) {
const auto index = y * 2 + x;
black_level[index] = raw_color.cblack[6 + index];
}
}
}
return black_level;
}
int RawImage::GetScalarBlackLevel() const {
const auto black_level = GetBlackLevel();
return static_cast<int>(
*std::min_element(black_level.begin(), black_level.end()));
}
std::string RawImage::GetCfaPatternString() const {
static const std::unordered_map<char, char> CDESC_TO_CFA = {
{'R', 0}, {'G', 1}, {'B', 2}, {'r', 0}, {'g', 1}, {'b', 2}};
const auto &cdesc = RawProcessor->imgdata.idata.cdesc;
return {CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(0, 0)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(0, 1)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(1, 0)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(1, 1)])};
}
CfaPattern RawImage::GetCfaPattern() const {
const auto cfa_pattern = GetCfaPatternString();
if (cfa_pattern == std::string{0, 1, 1, 2}) {
return CfaPattern::CFA_RGGB;
} else if (cfa_pattern == std::string{1, 0, 2, 1}) {
return CfaPattern::CFA_GRBG;
} else if (cfa_pattern == std::string{2, 1, 1, 0}) {
return CfaPattern::CFA_BGGR;
} else if (cfa_pattern == std::string{1, 2, 0, 1}) {
return CfaPattern::CFA_GBRG;
}
throw std::invalid_argument("Unsupported CFA pattern: " + cfa_pattern);
return CfaPattern::CFA_UNKNOWN;
}
Halide::Runtime::Buffer<float> RawImage::GetColorCorrectionMatrix() const {
const auto raw_color = RawProcessor->imgdata.color;
Halide::Runtime::Buffer<float> ccm(3, 3);
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
ccm(i, j) = raw_color.rgb_cam[j][i];
}
}
return ccm;
}

@ -0,0 +1,47 @@
#pragma once
#include <array>
#include <string>
#include <libraw/libraw.h>
#include "finish.h"
#include <HalideBuffer.h>
class RawImage {
public:
explicit RawImage(const std::string &path);
explicit RawImage(const uint8_t* data, size_t length);
~RawImage() = default;
int GetWidth() const { return RawProcessor->imgdata.rawdata.sizes.width; }
int GetHeight() const { return RawProcessor->imgdata.rawdata.sizes.height; }
int GetScalarBlackLevel() const;
std::array<float, 4> GetBlackLevel() const;
int GetWhiteLevel() const { return RawProcessor->imgdata.color.maximum; }
WhiteBalance GetWhiteBalance() const;
std::string GetCfaPatternString() const;
CfaPattern GetCfaPattern() const;
Halide::Runtime::Buffer<float> GetColorCorrectionMatrix() const;
void CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const;
// Writes current RawImage as DNG. If buffer was provided, then use it instead
// of internal buffer.
void WriteDng(const std::string &path,
const Halide::Runtime::Buffer<uint16_t> &buffer = {}) const;
std::shared_ptr<LibRaw> GetRawProcessor() const { return RawProcessor; }
private:
std::string Path;
std::shared_ptr<LibRaw> RawProcessor;
};

@ -0,0 +1,95 @@
#include "LibRaw2DngConverter.h"
#include <unordered_map>
#include <libraw/libraw.h>
#include "InputSource.h"
LibRaw2DngConverter::LibRaw2DngConverter(const RawImage &raw)
: OutputStream(), Raw(raw),
Tiff(SetTiffFields(
TiffPtr(TIFFStreamOpen("", &OutputStream), TIFFClose))) {}
LibRaw2DngConverter::TiffPtr
LibRaw2DngConverter::SetTiffFields(LibRaw2DngConverter::TiffPtr tiff_ptr) {
const auto RawProcessor = Raw.GetRawProcessor();
const auto raw_color = RawProcessor->imgdata.color;
const uint16_t bayer_pattern_dimensions[] = {2, 2};
const auto tiff = tiff_ptr.get();
TIFFSetField(tiff, TIFFTAG_DNGVERSION, "\01\04\00\00");
TIFFSetField(tiff, TIFFTAG_DNGBACKWARDVERSION, "\01\04\00\00");
TIFFSetField(tiff, TIFFTAG_SUBFILETYPE, 0);
TIFFSetField(tiff, TIFFTAG_COMPRESSION, COMPRESSION_NONE);
TIFFSetField(tiff, TIFFTAG_BITSPERSAMPLE, 16);
TIFFSetField(tiff, TIFFTAG_ROWSPERSTRIP, 1);
TIFFSetField(tiff, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT);
TIFFSetField(tiff, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_CFA);
TIFFSetField(tiff, TIFFTAG_SAMPLESPERPIXEL, 1);
TIFFSetField(tiff, TIFFTAG_PLANARCONFIG, PLANARCONFIG_CONTIG);
TIFFSetField(tiff, TIFFTAG_SAMPLEFORMAT, SAMPLEFORMAT_UINT);
TIFFSetField(tiff, TIFFTAG_CFAREPEATPATTERNDIM, &bayer_pattern_dimensions);
const std::string cfa = Raw.GetCfaPatternString();
TIFFSetField(tiff, TIFFTAG_CFAPATTERN, cfa.c_str());
TIFFSetField(tiff, TIFFTAG_MAKE, "hdr-plus");
TIFFSetField(tiff, TIFFTAG_UNIQUECAMERAMODEL, "hdr-plus");
const std::array<float, 9> color_matrix = {
raw_color.cam_xyz[0][0], raw_color.cam_xyz[0][1], raw_color.cam_xyz[0][2],
raw_color.cam_xyz[1][0], raw_color.cam_xyz[1][1], raw_color.cam_xyz[1][2],
raw_color.cam_xyz[2][0], raw_color.cam_xyz[2][1], raw_color.cam_xyz[2][2],
};
TIFFSetField(tiff, TIFFTAG_COLORMATRIX1, 9, &color_matrix);
TIFFSetField(tiff, TIFFTAG_CALIBRATIONILLUMINANT1, 21); // D65
const std::array<float, 3> as_shot_neutral = {
1.f / (raw_color.cam_mul[0] / raw_color.cam_mul[1]), 1.f,
1.f / (raw_color.cam_mul[2] / raw_color.cam_mul[1])};
TIFFSetField(tiff, TIFFTAG_ASSHOTNEUTRAL, 3, &as_shot_neutral);
TIFFSetField(tiff, TIFFTAG_CFALAYOUT, 1); // Rectangular (or square) layout
TIFFSetField(
tiff, TIFFTAG_CFAPLANECOLOR, 3,
"\00\01\02"); // RGB
// https://www.awaresystems.be/imaging/tiff/tifftags/cfaplanecolor.html
const std::array<float, 4> black_level = Raw.GetBlackLevel();
TIFFSetField(tiff, TIFFTAG_BLACKLEVEL, 4, &black_level);
static const uint32_t white_level = raw_color.maximum;
TIFFSetField(tiff, TIFFTAG_WHITELEVEL, 1, &white_level);
if (RawProcessor->imgdata.sizes.flip > 0) {
// Seems that LibRaw uses LibTIFF notation.
TIFFSetField(tiff, TIFFTAG_ORIENTATION, RawProcessor->imgdata.sizes.flip);
} else {
TIFFSetField(tiff, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT);
}
return tiff_ptr;
}
void LibRaw2DngConverter::SetBuffer(
const Halide::Runtime::Buffer<uint16_t> &buffer) const {
const auto width = buffer.width();
const auto height = buffer.height();
const auto tiff = Tiff.get();
TIFFSetField(tiff, TIFFTAG_IMAGEWIDTH, width);
TIFFSetField(tiff, TIFFTAG_IMAGELENGTH, height);
uint16_t *row_pointer = buffer.data();
for (int row = 0; row < height; row++) {
TIFFWriteScanline(tiff, row_pointer, row, 0);
row_pointer += width;
}
}
void LibRaw2DngConverter::Write(const std::string &path) const {
TIFFCheckpointDirectory(Tiff.get());
TIFFFlush(Tiff.get());
std::ofstream output(path, std::ofstream::binary);
output << OutputStream.str();
}

@ -0,0 +1,26 @@
#pragma once
#include <sstream>
#include <tiffio.h>
#include <tiffio.hxx>
#include <HalideBuffer.h>
class RawImage;
class LibRaw2DngConverter {
using TiffPtr = std::shared_ptr<TIFF>;
TiffPtr SetTiffFields(TiffPtr tiff_ptr);
public:
explicit LibRaw2DngConverter(const RawImage &raw);
void SetBuffer(const Halide::Runtime::Buffer<uint16_t> &buffer) const;
void Write(const std::string &path) const;
private:
std::ostringstream OutputStream;
const RawImage &Raw;
std::shared_ptr<TIFF> Tiff;
};

@ -0,0 +1,36 @@
#ifndef HDRPLUS_FINISH_H_
#define HDRPLUS_FINISH_H_
#include <hdrplus_pipeline.h>
template <class T = float> struct TypedWhiteBalance {
template <class TT>
explicit TypedWhiteBalance(const TypedWhiteBalance<TT> &other)
: r(other.r), g0(other.g0), g1(other.g1), b(other.b) {}
TypedWhiteBalance(T r, T g0, T g1, T b) : r(r), g0(g0), g1(g1), b(b) {}
T r;
T g0;
T g1;
T b;
};
using WhiteBalance = TypedWhiteBalance<float>;
typedef uint16_t BlackPoint;
typedef uint16_t WhitePoint;
typedef float Compression;
typedef float Gain;
enum class CfaPattern : int {
CFA_UNKNOWN = 0,
CFA_RGGB = 1,
CFA_GRBG = 2,
CFA_BGGR = 3,
CFA_GBRG = 4
};
#endif

@ -26,6 +26,7 @@ COMPILE_MIN_SDK_VERSION=28
opencvsdk=D:/Workspace/deps/opencv-mobile-4.9.0-android
coreroot=D:/Workspace/Github/xymp/xymp/Core
hdrplusroot=D:/Workspace/deps/hdrplus_libs
halideroot=D:/Workspace/deps/Halide/18.0.0
ncnnroot=D:/Workspace/deps/ncnn-20240410-android-vulkan
# ncnnroot=D:/Workspace/deps/ncnn-20230517-android-vulkan
libzipsdkroot=D:/Workspace/deps/libzip-android-sdk

Loading…
Cancel
Save