|
|
|
@ -46,6 +46,18 @@ extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
|
|
|
|
|
// are normalized to eight bits.
|
|
|
|
|
static const int kMaxChannelValue = 262143;
|
|
|
|
|
|
|
|
|
|
class ByteArraysPointer
|
|
|
|
|
{
|
|
|
|
|
public:
|
|
|
|
|
ByteArraysPointer()
|
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
~ByteArraysPointer()
|
|
|
|
|
{
|
|
|
|
|
byteArrays.clear();
|
|
|
|
|
}
|
|
|
|
|
std::vector<std::vector<uint8_t> > byteArrays;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
cv::Mat convert16bit2_8bit_(cv::Mat ans){
|
|
|
|
|
if(ans.type()==CV_16UC3){
|
|
|
|
@ -181,6 +193,24 @@ bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat& rgb)
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::CPhoneCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, cv::Mat rgb)
|
|
|
|
|
{
|
|
|
|
|
if (m_dev != NULL)
|
|
|
|
|
{
|
|
|
|
|
return m_dev->onOneCapture(characteristics, result, ldr, rgb);
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames)
|
|
|
|
|
{
|
|
|
|
|
if (m_dev != NULL)
|
|
|
|
|
{
|
|
|
|
|
return m_dev->onBurstCapture(characteristics, results, ldr, frames);
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
|
|
|
|
|
{
|
|
|
|
|
if (m_dev != NULL)
|
|
|
|
@ -211,6 +241,24 @@ CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t
|
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::CJpegCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, cv::Mat rgb)
|
|
|
|
|
{
|
|
|
|
|
if (m_dev != NULL)
|
|
|
|
|
{
|
|
|
|
|
return m_dev->onOneCapture(characteristics, result, ldr, rgb);
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames)
|
|
|
|
|
{
|
|
|
|
|
if (m_dev != NULL)
|
|
|
|
|
{
|
|
|
|
|
m_dev->onBurstCapture(characteristics, results, ldr, frames);
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
|
|
|
|
|
{
|
|
|
|
|
if (m_dev != NULL)
|
|
|
|
@ -1512,9 +1560,9 @@ void DrawOutlineText(cv::Ptr<cv::ft::FreeType2> ft2, cv::Mat& mat, const std::st
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics,
|
|
|
|
|
std::vector<std::shared_ptr<ACameraMetadata> >& results,
|
|
|
|
|
uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
|
|
|
|
|
bool CPhoneDevice::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics,
|
|
|
|
|
std::shared_ptr<ACameraMetadata> result,
|
|
|
|
|
uint32_t ldr, cv::Mat rgb)
|
|
|
|
|
{
|
|
|
|
|
time_t takingTime = time(NULL);
|
|
|
|
|
if (mPhotoInfo.remedy != 0)
|
|
|
|
@ -1532,7 +1580,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
|
|
|
|
|
std::string path;
|
|
|
|
|
path.swap(mPath);
|
|
|
|
|
|
|
|
|
|
std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
|
|
|
|
|
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
|
|
|
|
|
|
|
|
|
|
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
|
|
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
|
@ -1556,137 +1604,458 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
|
|
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
|
|
mCamera = NULL;
|
|
|
|
|
|
|
|
|
|
std::thread th([=]()mutable
|
|
|
|
|
{
|
|
|
|
|
cv::Mat rgb;
|
|
|
|
|
std::vector<std::vector<uint8_t> > rawFiles;
|
|
|
|
|
media_status_t mstatus;
|
|
|
|
|
|
|
|
|
|
media_status_t mstatus;
|
|
|
|
|
std::string cameraInfo;
|
|
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
|
|
|
|
|
m_threadClose.swap(closeThread);
|
|
|
|
|
if (closeThread.joinable())
|
|
|
|
|
{
|
|
|
|
|
closeThread.detach();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CPhoneDevice* pThis = this;
|
|
|
|
|
std::thread th([pThis, characteristics, result, photoInfo, osds, path, rgb, facing, sensorOrientation, ldr, takingTime]()mutable
|
|
|
|
|
{
|
|
|
|
|
std::string cameraInfo;
|
|
|
|
|
if (photoInfo.outputDbgInfo != 0)
|
|
|
|
|
{
|
|
|
|
|
NdkCamera::CAPTURE_RESULT captureResult = { 0 };
|
|
|
|
|
NdkCamera::EnumCameraResult(result.get(), captureResult);
|
|
|
|
|
|
|
|
|
|
char extimeunit[4] = { 0 };
|
|
|
|
|
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
|
|
|
|
|
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
|
|
|
|
|
char str[128] = { 0 };
|
|
|
|
|
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
|
|
|
|
|
captureResult.autoExposure, captureResult.autoFocus,
|
|
|
|
|
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
|
|
|
|
|
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
|
|
|
|
|
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
|
|
|
|
|
captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
|
|
|
|
|
(uint32_t)captureResult.duration, captureResult.frameDuration);
|
|
|
|
|
cameraInfo = str;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (photoInfo.usingRawFormat != 0)
|
|
|
|
|
#ifdef OUTPUT_CAMERA_DBG_INFO
|
|
|
|
|
#if 0
|
|
|
|
|
bool shouldRetry = false;
|
|
|
|
|
if (ldr != ~0)
|
|
|
|
|
{
|
|
|
|
|
//
|
|
|
|
|
for (int idx = 0; idx < frames.size(); idx++)
|
|
|
|
|
if (ldr < MIN_LIGHT_Y)
|
|
|
|
|
{
|
|
|
|
|
std::shared_ptr<AImage> spImage = frames[idx];
|
|
|
|
|
std::shared_ptr<ACameraMetadata> result = results[idx];
|
|
|
|
|
|
|
|
|
|
auto it = rawFiles.insert(rawFiles.end(), std::vector<uint8_t>());
|
|
|
|
|
|
|
|
|
|
int32_t width;
|
|
|
|
|
int32_t height;
|
|
|
|
|
AImage_getWidth(spImage.get(), &width);
|
|
|
|
|
AImage_getHeight(spImage.get(), &height);
|
|
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
|
|
{
|
|
|
|
|
shouldRetry = true;
|
|
|
|
|
char presetBuf[16] = {0};
|
|
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
|
|
|
|
|
|
|
int planeCount;
|
|
|
|
|
media_status_t status = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
|
|
|
|
|
AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
|
|
|
|
|
// photoInfo.usingRawFormat = 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (ldr > MAX_LIGHT_Y)
|
|
|
|
|
{
|
|
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
|
|
{
|
|
|
|
|
shouldRetry = true;
|
|
|
|
|
char presetBuf[16] = {0};
|
|
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
uint8_t *planeData = NULL;
|
|
|
|
|
int planeDataLen = 0;
|
|
|
|
|
mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
|
|
|
|
|
DngCreator dngCreator(characteristics.get(), result.get());
|
|
|
|
|
dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0);
|
|
|
|
|
photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
#endif // 0
|
|
|
|
|
#endif // OUTPUT_CAMERA_DBG_INFO
|
|
|
|
|
|
|
|
|
|
// Notify to take next photo
|
|
|
|
|
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
|
|
|
|
|
|
|
|
|
|
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
|
|
|
|
|
if (res)
|
|
|
|
|
{
|
|
|
|
|
// TakePhotoCb(2, photoInfo, path, takingTime);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
th.detach();
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics,
|
|
|
|
|
std::vector<std::shared_ptr<ACameraMetadata> >& results,
|
|
|
|
|
uint32_t ldr, std::vector<std::vector<uint8_t> >& frames)
|
|
|
|
|
{
|
|
|
|
|
time_t takingTime = time(NULL);
|
|
|
|
|
if (mPhotoInfo.remedy != 0)
|
|
|
|
|
{
|
|
|
|
|
if ((takingTime - mPhotoInfo.scheduleTime) > 30)
|
|
|
|
|
{
|
|
|
|
|
takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
mPhotoInfo.photoTime = takingTime;
|
|
|
|
|
|
|
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
|
|
osds.swap(mOsds);
|
|
|
|
|
PHOTO_INFO photoInfo = mPhotoInfo;
|
|
|
|
|
std::string path;
|
|
|
|
|
path.swap(mPath);
|
|
|
|
|
|
|
|
|
|
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
|
|
|
|
|
std::shared_ptr<ByteArraysPointer> pByteArrays = std::make_shared<ByteArraysPointer>();
|
|
|
|
|
pByteArrays.get()->byteArrays.swap(frames);
|
|
|
|
|
|
|
|
|
|
bool turnOffOtg = (photoInfo.usbCamera != 0);
|
|
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
|
|
mCamera = NULL;
|
|
|
|
|
|
|
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
|
|
|
|
|
m_threadClose.swap(closeThread);
|
|
|
|
|
if (closeThread.joinable())
|
|
|
|
|
{
|
|
|
|
|
closeThread.detach();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CPhoneDevice* pThis = this;
|
|
|
|
|
std::thread th([pThis, characteristics, results, photoInfo, osds, path, pByteArrays, ldr, takingTime]()mutable
|
|
|
|
|
{
|
|
|
|
|
cv::Mat rgb;
|
|
|
|
|
std::string cameraInfo;
|
|
|
|
|
media_status_t mstatus;
|
|
|
|
|
|
|
|
|
|
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
|
|
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
|
|
|
|
|
if (status == ACAMERA_OK)
|
|
|
|
|
{
|
|
|
|
|
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int sensorOrientation = 0;
|
|
|
|
|
{
|
|
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
|
|
status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
|
|
|
|
|
if (status == ACAMERA_OK)
|
|
|
|
|
{
|
|
|
|
|
sensorOrientation = (int)e.data.i32[0];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (photoInfo.outputDbgInfo != 0)
|
|
|
|
|
{
|
|
|
|
|
if (!results.empty())
|
|
|
|
|
{
|
|
|
|
|
NdkCamera::CAPTURE_RESULT captureResult = { 0 };
|
|
|
|
|
NdkCamera::EnumCameraResult(results[0].get(), captureResult);
|
|
|
|
|
|
|
|
|
|
char extimeunit[4] = { 0 };
|
|
|
|
|
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
|
|
|
|
|
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
|
|
|
|
|
char str[128] = { 0 };
|
|
|
|
|
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
|
|
|
|
|
captureResult.autoExposure, captureResult.autoFocus,
|
|
|
|
|
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
|
|
|
|
|
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
|
|
|
|
|
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
|
|
|
|
|
captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
|
|
|
|
|
(uint32_t)captureResult.duration, captureResult.frameDuration);
|
|
|
|
|
cameraInfo = str;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#ifdef OUTPUT_CAMERA_DBG_INFO
|
|
|
|
|
#if 0
|
|
|
|
|
bool shouldRetry = false;
|
|
|
|
|
if (ldr != ~0)
|
|
|
|
|
{
|
|
|
|
|
if (results.size() == 1 && frames.size() == 1)
|
|
|
|
|
if (ldr < MIN_LIGHT_Y)
|
|
|
|
|
{
|
|
|
|
|
std::shared_ptr<ACameraMetadata> result = results[0];
|
|
|
|
|
std::shared_ptr<AImage> frame = frames[0];
|
|
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
|
|
{
|
|
|
|
|
shouldRetry = true;
|
|
|
|
|
char presetBuf[16] = {0};
|
|
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
|
|
|
|
|
|
|
// photoInfo.usingRawFormat = 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (ldr > MAX_LIGHT_Y)
|
|
|
|
|
{
|
|
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
|
|
{
|
|
|
|
|
shouldRetry = true;
|
|
|
|
|
char presetBuf[16] = {0};
|
|
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int32_t format;
|
|
|
|
|
mstatus = AImage_getFormat(frame.get(), &format);
|
|
|
|
|
photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endif // 0
|
|
|
|
|
#endif // OUTPUT_CAMERA_DBG_INFO
|
|
|
|
|
|
|
|
|
|
// Notify to take next photo
|
|
|
|
|
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
|
|
|
|
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
|
|
hdrplus::hdrplus_pipeline pipeline;
|
|
|
|
|
std::vector<std::vector<uint8_t> > localFrames;
|
|
|
|
|
localFrames.swap(pByteArrays.get()->byteArrays);
|
|
|
|
|
pipeline.run_pipeline(localFrames, 0, rgb);
|
|
|
|
|
localFrames.clear();
|
|
|
|
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
|
|
|
|
|
|
|
{
|
|
|
|
|
cv::Mat tempPic = convert16bit2_8bit_(rgb);
|
|
|
|
|
rgb = tempPic;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (photoInfo.orientation > 0)
|
|
|
|
|
{
|
|
|
|
|
if (photoInfo.orientation == 1)
|
|
|
|
|
{
|
|
|
|
|
if (facing == ACAMERA_LENS_FACING_FRONT)
|
|
|
|
|
{
|
|
|
|
|
cv::flip(rgb, rgb, 1);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (photoInfo.orientation == 2)
|
|
|
|
|
{
|
|
|
|
|
cv::Mat tempPic;
|
|
|
|
|
cv::transpose(rgb, tempPic);
|
|
|
|
|
cv::flip(tempPic, rgb, 1);
|
|
|
|
|
}
|
|
|
|
|
else if (photoInfo.orientation == 3)
|
|
|
|
|
{
|
|
|
|
|
if (facing == ACAMERA_LENS_FACING_FRONT)
|
|
|
|
|
{
|
|
|
|
|
flip(rgb, rgb, 0);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
cv::flip(rgb, rgb, -1);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (photoInfo.orientation == 4)
|
|
|
|
|
{
|
|
|
|
|
cv::Mat tempPic;
|
|
|
|
|
cv::transpose(rgb, tempPic);
|
|
|
|
|
cv::flip(tempPic, rgb, 0);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
|
|
}
|
|
|
|
|
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
|
|
|
|
|
|
|
|
|
|
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
|
|
|
|
|
if (res)
|
|
|
|
|
{
|
|
|
|
|
// TakePhotoCb(2, photoInfo, path, takingTime);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
th.detach();
|
|
|
|
|
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888)
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics,
|
|
|
|
|
std::vector<std::shared_ptr<ACameraMetadata> >& results,
|
|
|
|
|
uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
|
|
|
|
|
{
|
|
|
|
|
time_t takingTime = time(NULL);
|
|
|
|
|
if (mPhotoInfo.remedy != 0)
|
|
|
|
|
{
|
|
|
|
|
if ((takingTime - mPhotoInfo.scheduleTime) > 30)
|
|
|
|
|
{
|
|
|
|
|
takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
mPhotoInfo.photoTime = takingTime;
|
|
|
|
|
|
|
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
|
|
osds.swap(mOsds);
|
|
|
|
|
PHOTO_INFO photoInfo = mPhotoInfo;
|
|
|
|
|
std::string path;
|
|
|
|
|
path.swap(mPath);
|
|
|
|
|
|
|
|
|
|
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
|
|
|
|
|
|
|
|
|
|
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
|
|
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
|
|
|
|
|
if (status == ACAMERA_OK)
|
|
|
|
|
{
|
|
|
|
|
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int sensorOrientation = 0;
|
|
|
|
|
{
|
|
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
|
|
status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
|
|
|
|
|
if (status == ACAMERA_OK)
|
|
|
|
|
{
|
|
|
|
|
sensorOrientation = (int)e.data.i32[0];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool turnOffOtg = (photoInfo.usbCamera != 0);
|
|
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
|
|
mCamera = NULL;
|
|
|
|
|
|
|
|
|
|
cv::Mat rgb;
|
|
|
|
|
std::vector<std::shared_ptr<hdrplus::MemFile> > rawFiles;
|
|
|
|
|
media_status_t mstatus;
|
|
|
|
|
|
|
|
|
|
if (photoInfo.usingRawFormat != 0)
|
|
|
|
|
{
|
|
|
|
|
for (int idx = 0; idx < frames.size(); idx++)
|
|
|
|
|
{
|
|
|
|
|
std::shared_ptr<AImage> spImage = frames[idx];
|
|
|
|
|
std::shared_ptr<ACameraMetadata> spResult = results[idx];
|
|
|
|
|
|
|
|
|
|
hdrplus::MemFile* rawImage = new hdrplus::MemFile();
|
|
|
|
|
rawFiles.push_back(std::shared_ptr<hdrplus::MemFile>(rawImage));
|
|
|
|
|
// rawImage->FromAImage(spImage.get(), characteristics.get(), spResult.get());
|
|
|
|
|
|
|
|
|
|
int32_t width = 0;
|
|
|
|
|
int32_t height = 0;
|
|
|
|
|
mstatus = AImage_getWidth(spImage.get(), &width);
|
|
|
|
|
mstatus = AImage_getHeight(spImage.get(), &height);
|
|
|
|
|
|
|
|
|
|
int32_t planeCount = 0;
|
|
|
|
|
mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
|
|
|
|
|
AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
|
|
|
|
|
|
|
|
|
|
uint8_t *planeData = NULL;
|
|
|
|
|
int planeDataLen = 0;
|
|
|
|
|
mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
|
|
|
|
|
ALOGD("Start Converting Dng");
|
|
|
|
|
DngCreator dngCreator(characteristics.get(), spResult.get());
|
|
|
|
|
dngCreator.writeInputBuffer(rawImage->content, planeData, planeDataLen, width, height, 0);
|
|
|
|
|
ALOGD("End Converting Dng");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
if (results.size() == 1 && frames.size() == 1)
|
|
|
|
|
{
|
|
|
|
|
std::shared_ptr<ACameraMetadata> result = results[0];
|
|
|
|
|
std::shared_ptr<AImage> frame = frames[0];
|
|
|
|
|
|
|
|
|
|
int32_t format;
|
|
|
|
|
mstatus = AImage_getFormat(frame.get(), &format);
|
|
|
|
|
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888)
|
|
|
|
|
{
|
|
|
|
|
int32_t width;
|
|
|
|
|
int32_t height;
|
|
|
|
|
mstatus = AImage_getWidth(frame.get(), &width);
|
|
|
|
|
mstatus = AImage_getHeight(frame.get(), &height);
|
|
|
|
|
|
|
|
|
|
int32_t y_pixelStride = 0;
|
|
|
|
|
int32_t u_pixelStride = 0;
|
|
|
|
|
int32_t v_pixelStride = 0;
|
|
|
|
|
AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride);
|
|
|
|
|
AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride);
|
|
|
|
|
AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride);
|
|
|
|
|
|
|
|
|
|
int32_t y_rowStride = 0;
|
|
|
|
|
int32_t u_rowStride = 0;
|
|
|
|
|
int32_t v_rowStride = 0;
|
|
|
|
|
AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride);
|
|
|
|
|
AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride);
|
|
|
|
|
AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride);
|
|
|
|
|
|
|
|
|
|
uint8_t* y_data = 0;
|
|
|
|
|
uint8_t* u_data = 0;
|
|
|
|
|
uint8_t* v_data = 0;
|
|
|
|
|
int y_len = 0;
|
|
|
|
|
int u_len = 0;
|
|
|
|
|
int v_len = 0;
|
|
|
|
|
AImage_getPlaneData(frame.get(), 0, &y_data, &y_len);
|
|
|
|
|
AImage_getPlaneData(frame.get(), 1, &u_data, &u_len);
|
|
|
|
|
AImage_getPlaneData(frame.get(), 2, &v_data, &v_len);
|
|
|
|
|
|
|
|
|
|
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
|
|
|
|
|
{
|
|
|
|
|
// already nv21
|
|
|
|
|
ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
// construct nv21
|
|
|
|
|
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
|
|
|
|
|
{
|
|
|
|
|
int32_t width;
|
|
|
|
|
int32_t height;
|
|
|
|
|
mstatus = AImage_getWidth(frame.get(), &width);
|
|
|
|
|
mstatus = AImage_getHeight(frame.get(), &height);
|
|
|
|
|
|
|
|
|
|
int32_t y_pixelStride = 0;
|
|
|
|
|
int32_t u_pixelStride = 0;
|
|
|
|
|
int32_t v_pixelStride = 0;
|
|
|
|
|
AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride);
|
|
|
|
|
AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride);
|
|
|
|
|
AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride);
|
|
|
|
|
|
|
|
|
|
int32_t y_rowStride = 0;
|
|
|
|
|
int32_t u_rowStride = 0;
|
|
|
|
|
int32_t v_rowStride = 0;
|
|
|
|
|
AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride);
|
|
|
|
|
AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride);
|
|
|
|
|
AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride);
|
|
|
|
|
|
|
|
|
|
uint8_t* y_data = 0;
|
|
|
|
|
uint8_t* u_data = 0;
|
|
|
|
|
uint8_t* v_data = 0;
|
|
|
|
|
int y_len = 0;
|
|
|
|
|
int u_len = 0;
|
|
|
|
|
int v_len = 0;
|
|
|
|
|
AImage_getPlaneData(frame.get(), 0, &y_data, &y_len);
|
|
|
|
|
AImage_getPlaneData(frame.get(), 1, &u_data, &u_len);
|
|
|
|
|
AImage_getPlaneData(frame.get(), 2, &v_data, &v_len);
|
|
|
|
|
|
|
|
|
|
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
|
|
|
|
|
// Y
|
|
|
|
|
uint8_t* yptr = nv21;
|
|
|
|
|
for (int y = 0; y < height; y++)
|
|
|
|
|
{
|
|
|
|
|
// already nv21
|
|
|
|
|
ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
|
|
|
|
|
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
|
|
|
|
|
for (int x = 0; x < width; x++)
|
|
|
|
|
{
|
|
|
|
|
yptr[0] = y_data_ptr[0];
|
|
|
|
|
yptr++;
|
|
|
|
|
y_data_ptr += y_pixelStride;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
|
|
|
|
|
// UV
|
|
|
|
|
uint8_t* uvptr = nv21 + width * height;
|
|
|
|
|
for (int y = 0; y < height / 2; y++)
|
|
|
|
|
{
|
|
|
|
|
// construct nv21
|
|
|
|
|
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
|
|
|
|
|
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
|
|
|
|
|
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
|
|
|
|
|
for (int x = 0; x < width / 2; x++)
|
|
|
|
|
{
|
|
|
|
|
// Y
|
|
|
|
|
uint8_t* yptr = nv21;
|
|
|
|
|
for (int y = 0; y < height; y++)
|
|
|
|
|
{
|
|
|
|
|
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
|
|
|
|
|
for (int x = 0; x < width; x++)
|
|
|
|
|
{
|
|
|
|
|
yptr[0] = y_data_ptr[0];
|
|
|
|
|
yptr++;
|
|
|
|
|
y_data_ptr += y_pixelStride;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// UV
|
|
|
|
|
uint8_t* uvptr = nv21 + width * height;
|
|
|
|
|
for (int y = 0; y < height / 2; y++)
|
|
|
|
|
{
|
|
|
|
|
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
|
|
|
|
|
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
|
|
|
|
|
for (int x = 0; x < width / 2; x++)
|
|
|
|
|
{
|
|
|
|
|
uvptr[0] = v_data_ptr[0];
|
|
|
|
|
uvptr[1] = u_data_ptr[0];
|
|
|
|
|
uvptr += 2;
|
|
|
|
|
v_data_ptr += v_pixelStride;
|
|
|
|
|
u_data_ptr += u_pixelStride;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
uvptr[0] = v_data_ptr[0];
|
|
|
|
|
uvptr[1] = u_data_ptr[0];
|
|
|
|
|
uvptr += 2;
|
|
|
|
|
v_data_ptr += v_pixelStride;
|
|
|
|
|
u_data_ptr += u_pixelStride;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
|
|
|
|
|
|
|
|
|
|
delete[] nv21;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
|
|
|
|
|
|
|
|
|
|
delete[] nv21;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
frames.clear();
|
|
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
|
|
|
|
|
m_threadClose.swap(closeThread);
|
|
|
|
|
if (closeThread.joinable())
|
|
|
|
|
{
|
|
|
|
|
closeThread.detach();
|
|
|
|
|
}
|
|
|
|
|
frames.clear();
|
|
|
|
|
|
|
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
|
|
|
|
|
m_threadClose.swap(closeThread);
|
|
|
|
|
if (closeThread.joinable())
|
|
|
|
|
{
|
|
|
|
|
closeThread.detach();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CPhoneDevice* pThis = this;
|
|
|
|
|
std::thread th([pThis, characteristics, results, photoInfo, osds, path, rgb, rawFiles, facing, sensorOrientation, ldr, takingTime]()mutable
|
|
|
|
|
{
|
|
|
|
|
std::string cameraInfo;
|
|
|
|
|
if (photoInfo.outputDbgInfo != 0)
|
|
|
|
|
{
|
|
|
|
|
if (!results.empty())
|
|
|
|
@ -1749,14 +2118,16 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
|
|
|
|
|
#endif // OUTPUT_CAMERA_DBG_INFO
|
|
|
|
|
|
|
|
|
|
// Notify to take next photo
|
|
|
|
|
TakePhotoCb(1, photoInfo, "", takingTime);
|
|
|
|
|
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
|
|
|
|
|
|
|
|
|
|
if (photoInfo.usingRawFormat != 0)
|
|
|
|
|
{
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
|
|
hdrplus::hdrplus_pipeline pipeline;
|
|
|
|
|
pipeline.run_pipeline(rawFiles, 0, rgb);
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
|
|
|
|
|
rawFiles.clear();
|
|
|
|
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
|
|
|
|
|
|
|
#ifdef NDEBUG
|
|
|
|
|
for (auto it = rawFilePaths.cbegin(); it != rawFilePaths.cend(); ++it)
|
|
|
|
@ -1806,7 +2177,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
|
|
|
|
|
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool res = PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
|
|
|
|
|
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
|
|
|
|
|
if (res)
|
|
|
|
|
{
|
|
|
|
|
// TakePhotoCb(2, photoInfo, path, takingTime);
|
|
|
|
|