From cbf3dce87e364d5daca910bb668b79353f97682b Mon Sep 17 00:00:00 2001 From: Matthew Date: Sun, 6 Oct 2024 16:21:50 +0800 Subject: [PATCH] =?UTF-8?q?NDK=E5=AE=9E=E7=8E=B0RAW=E6=A0=BC=E5=BC=8F?= =?UTF-8?q?=E6=8B=8D=E7=85=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/build.gradle | 1 + app/src/main/cpp/PhoneDevice.cpp | 622 ++++++++++++++++-- app/src/main/cpp/PhoneDevice.h | 15 +- app/src/main/cpp/camera2/Camera2Helper.h | 108 +++ app/src/main/cpp/camera2/camera_listeners.cpp | 1 + app/src/main/cpp/camera2/ndkcamera.cpp | 300 +++++---- app/src/main/cpp/camera2/ndkcamera.h | 21 +- 7 files changed, 880 insertions(+), 188 deletions(-) diff --git a/app/build.gradle b/app/build.gradle index 7eabfe8b..cb61f500 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -52,6 +52,7 @@ android { proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' } debug { + minifyEnabled false jniDebuggable true testCoverageEnabled false } diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp index a7ffa30b..efa6441b 100644 --- a/app/src/main/cpp/PhoneDevice.cpp +++ b/app/src/main/cpp/PhoneDevice.cpp @@ -8,6 +8,7 @@ #include "GPIOControl.h" #include "CvText.h" #include "PositionHelper.h" +#include "DngCreator.h" #include #include @@ -21,6 +22,7 @@ #include #include #include +#include #include #ifdef USING_HDRPLUS @@ -29,6 +31,7 @@ #include #include +#include namespace fs = std::filesystem; #define CMD_SET_485_EN_STATE 131 @@ -159,7 +162,7 @@ static inline uint32_t YUV2RGB(int nY, int nU, int nV) { return 0xff000000 | (nR << 16) | (nG << 8) | nB; } -CPhoneDevice::CPhoneCamera::CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params, int burstCaptures) : NdkCamera(width, height, params, burstCaptures), m_dev(dev) +CPhoneDevice::CPhoneCamera::CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params) : NdkCamera(width, height, params), m_dev(dev) { } @@ -178,9 +181,13 @@ bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat& rgb) return false; } -bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr characteristics, const std::vector >& results, const std::vector >& frames) +bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) { - return true; + if (m_dev != NULL) + { + return m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return false; } void CPhoneDevice::CPhoneCamera::on_error(const std::string& msg) @@ -204,8 +211,12 @@ CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t { } -bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr characteristics, const std::vector >& results, const std::vector >& frames) +bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) { + if (m_dev != NULL) + { + m_dev->onBurstCapture(characteristics, results, ldr, frames); + } return true; } @@ -242,8 +253,7 @@ void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader) uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); #endif avgY = avgY / (uint64_t)y_len; - mResult.avgY = avgY; - mFinalResult.avgY = avgY; + mLdr = avgY; #if 1 if (avgY < 50) { @@ -268,10 +278,6 @@ void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader) } #endif - XYLOG(XYLOG_SEVERITY_INFO, "Photo Taken: AES=%u AFS=%u AWBS=%u", (uint32_t)mFinalResult.aeState, (uint32_t)mFinalResult.awbState, (uint32_t)mFinalResult.afState); - - mFinalResult.duration = GetMicroTimeStamp() - m_startTime; - int32_t format; AImage_getFormat(image, &format); @@ -1337,6 +1343,8 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< params.requestTemplate = mPhotoInfo.requestTemplate; params.awbMode = mPhotoInfo.awbMode; params.wait3ALocked = mPhotoInfo.wait3ALocked; + params.burstRawCapture = mPhotoInfo.usingRawFormat; + params.burstCaptures = mPhotoInfo.burstCaptures; if (params.requestTemplate <= 0 || params.requestTemplate > 5) { params.requestTemplate = 2; @@ -1363,7 +1371,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< TurnOnCameraPower(NULL); res = true; - if (mPhotoInfo.mediaType == 0 && mPhotoInfo.usingRawFormat == 0) + if (mPhotoInfo.mediaType == 0/* && mPhotoInfo.usingRawFormat == 0*/) { mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params); // mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params); @@ -1507,6 +1515,330 @@ void DrawOutlineText(cv::Ptr ft2, cv::Mat& mat, const std::st } } +bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + time_t takingTime = time(NULL); + if (mPhotoInfo.remedy != 0) + { + if ((takingTime - mPhotoInfo.scheduleTime) > 30) + { + takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + } + } + mPhotoInfo.photoTime = takingTime; + + vector osds; + osds.swap(mOsds); + PHOTO_INFO photoInfo = mPhotoInfo; + std::string path; + path.swap(mPath); + + std::string tmpPath = m_appPath + (APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); + + acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; + ACameraMetadata_const_entry e = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e); + if (status == ACAMERA_OK) + { + facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; + } + + int sensorOrientation = 0; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e); + if (status == ACAMERA_OK) + { + sensorOrientation = (int)e.data.i32[0]; + } + } + + bool turnOffOtg = (photoInfo.usbCamera != 0); + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + std::thread th([=] + { + cv::Mat rgb; + std::vector > rawFiles; + + media_status_t mstatus; + std::string cameraInfo; + if (photoInfo.usingRawFormat != 0) + { + // + for (int idx = 0; idx < frames.size(); idx++) + { + std::shared_ptr spImage = frames[idx]; + std::shared_ptr result = results[idx]; + + auto it = rawFiles.insert(rawFiles.end(), std::vector()); + + int32_t width; + int32_t height; + AImage_getWidth(spImage.get(), &width); + AImage_getHeight(spImage.get(), &height); + + int planeCount; + media_status_t status = AImage_getNumberOfPlanes(spImage.get(), &planeCount); + AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount); + + uint8_t *data = nullptr; + int len = 0; + mstatus = AImage_getPlaneData(spImage.get(), 0, &data, &len); + DngCreator dngCreator(characteristics.get(), result.get()); + dngCreator.writeInputBuffer(*it, data, len, width, height, 0); + } + } + else + { + if (results.size() == 1 && frames.size() == 1) + { + std::shared_ptr result = results[0]; + std::shared_ptr frame = frames[0]; + + if (photoInfo.outputDbgInfo != 0) + { + NdkCamera::CAPTURE_RESULT captureResult = { 0 }; + NdkCamera::EnumCameraResult(result.get(), captureResult); + + char extimeunit[4] = { 0 }; + unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); + strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); + char str[128] = { 0 }; + snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", + captureResult.autoExposure, captureResult.autoFocus, + extime, extimeunit, captureResult.compensation, captureResult.sensitivity, + // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, + (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, + captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, + (uint32_t)captureResult.duration, captureResult.frameDuration); + cameraInfo = str; + } + + int32_t format; + media_status_t mstatus = AImage_getFormat(frame.get(), &format); + + if (format == AIMAGE_FORMAT_YUV_420_888) + { + int32_t width; + int32_t height; + mstatus = AImage_getWidth(frame.get(), &width); + mstatus = AImage_getHeight(frame.get(), &height); + + int32_t y_pixelStride = 0; + int32_t u_pixelStride = 0; + int32_t v_pixelStride = 0; + AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride); + AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride); + AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride); + + int32_t y_rowStride = 0; + int32_t u_rowStride = 0; + int32_t v_rowStride = 0; + AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride); + AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride); + AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride); + + uint8_t* y_data = 0; + uint8_t* u_data = 0; + uint8_t* v_data = 0; + int y_len = 0; + int u_len = 0; + int v_len = 0; + AImage_getPlaneData(frame.get(), 0, &y_data, &y_len); + AImage_getPlaneData(frame.get(), 1, &u_data, &u_len); + AImage_getPlaneData(frame.get(), 2, &v_data, &v_len); + + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) + { + // already nv21 + ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + } + else + { + // construct nv21 + uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; + { + // Y + uint8_t* yptr = nv21; + for (int y = 0; y < height; y++) + { + const uint8_t* y_data_ptr = y_data + y_rowStride * y; + for (int x = 0; x < width; x++) + { + yptr[0] = y_data_ptr[0]; + yptr++; + y_data_ptr += y_pixelStride; + } + } + + // UV + uint8_t* uvptr = nv21 + width * height; + for (int y = 0; y < height / 2; y++) + { + const uint8_t* v_data_ptr = v_data + v_rowStride * y; + const uint8_t* u_data_ptr = u_data + u_rowStride * y; + for (int x = 0; x < width / 2; x++) + { + uvptr[0] = v_data_ptr[0]; + uvptr[1] = u_data_ptr[0]; + uvptr += 2; + v_data_ptr += v_pixelStride; + u_data_ptr += u_pixelStride; + } + } + } + + ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + + delete[] nv21; + } + + + if (photoInfo.outputDbgInfo != 0) + { + + } + } + } + } + + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + +#ifdef OUTPUT_CAMERA_DBG_INFO +#if 0 + bool shouldRetry = false; + if (ldr != ~0) + { + if (ldr < MIN_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + + // photoInfo.usingRawFormat = 1; + } + } + else if (ldr > MAX_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } + + photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); + } + } +#endif // 0 +#endif // OUTPUT_CAMERA_DBG_INFO + + // Notify to take next photo + TakePhotoCb(1, photoInfo, "", takingTime); + + if (photoInfo.usingRawFormat != 0) + { + std::vector rawFilePaths; + for (auto it = rawFiles.cbegin(); it != rawFiles.cend(); ++it) + { + std::string dngFilePath = tmpPath + std::to_string(std::distance(rawFiles.cbegin(), it)) + ".dng"; +#ifdef _DEBUG + char log[256] = { 0 }; + strcpy(log, dngFilePath.c_str()); +#endif + FILE *file = fopen(dngFilePath.c_str(), "wb"); + if (file) { + if (!(*it).empty()) + { + fwrite(&((*it)[0]), 1, (*it).size(), file); + } + fclose(file); + rawFilePaths.push_back(dngFilePath); + } + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + hdrplus::hdrplus_pipeline pipeline; + pipeline.run_pipeline(rawFilePaths, 0, rgb); + XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + +#ifdef NDEBUG + for (auto it = rawFilePaths.cbegin(); it != rawFiles.cend(); ++it) + { + std::remove((*it).c_str()); + } +#endif + { + cv::Mat tempPic = convert16bit2_8bit_(rgb); + rgb = tempPic; + } + + if (photoInfo.orientation > 0) + { + if (photoInfo.orientation == 1) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + cv::flip(rgb, rgb, 1); + } + } else if (photoInfo.orientation == 2) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 1); + } + else if (photoInfo.orientation == 3) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + flip(rgb, rgb, 0); + } + else + { + cv::flip(rgb, rgb, -1); + } + } + else if (photoInfo.orientation == 4) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 0); + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + } + cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR); + } + + bool res = PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } + }); + + th.detach(); + + return true; +} + bool CPhoneDevice::OnImageReady(cv::Mat& mat) { time_t takingTime = time(NULL); @@ -1662,24 +1994,13 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) if (mCamera != NULL) { - NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); if (mPhotoInfo.outputDbgInfo != 0) { cv::Scalar scalarRed(0, 0, 255); // red char extimeunit[4] = { 0 }; - unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); - strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); char str[128] = { 0 }; - snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", - captureResult.autoExposure, captureResult.autoFocus, - extime, extimeunit, captureResult.compensation, captureResult.sensitivity, - // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, - (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, - captureResult.sceneMode, GpioControl::getLightAdc(), (unsigned int)captureResult.avgY, captureResult.zoomRatio, - (uint32_t)captureResult.duration, captureResult.frameDuration); - // cv::putText(mat, str, cv::Point(0, mat.rows - 20), cv::FONT_HERSHEY_COMPLEX, fontScale, scalarWhite, thickness1, cv::LINE_AA); int fs = fontSize * 2 / 3; textSize = ft2->getTextSize(str, fs, -1, &baseline); @@ -1811,29 +2132,255 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) #ifdef OUTPUT_CAMERA_DBG_INFO if (shouldRetry) { - TakePhotoCb(false, mPhotoInfo, fullPath, takingTime, objs); + TakePhotoCb(0, mPhotoInfo, fullPath, takingTime, objs); } else { - TakePhotoCb(res, mPhotoInfo, fullPath, takingTime, objs); + TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs); } #else - TakePhotoCb(res, mPhotoInfo, fullPath, takingTime, objs); + TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs); #endif } else { ALOGI("Photo file exists: %s", mPath.c_str()); } - CPhoneCamera* pCamera = mCamera; - mCamera = NULL; - bool turnOffOtg = (mPhotoInfo.usbCamera != 0); - std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); - m_threadClose.swap(closeThread); - if (closeThread.joinable()) + return res; +} + +bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat) +{ + int baseline = 0; + cv::Size textSize; + double height = mat.rows; + double width = mat.cols; + // double ratio = std::min(height / 1024, width / 1920); + double ratio = height / 1024.0; + int thickness = round(1.4 * ratio); + if (thickness < 1) thickness = 1; + else if (thickness > 5) thickness = 5; + cv::Scalar scalarWhite(255, 255, 255); // white + int fontSize = (int)(28.0 * ratio); + cv::Point pt; + + std::string fontPath; + if (existsFile("/system/fonts/NotoSansCJK-Regular.ttc")) { - closeThread.detach(); + fontPath = "/system/fonts/NotoSansCJK-Regular.ttc"; + } + else if (existsFile("/system/fonts/NotoSerifCJK-Regular.ttc")) + { + fontPath = "/system/fonts/NotoSerifCJK-Regular.ttc"; + } + else + { + fontPath = m_appPath+ "fonts/Noto.otf"; + } + cv::Ptr ft2; + ft2 = cv::ft::createFreeType2(); + ft2->loadFontData(fontPath.c_str(), 0); + // cv::Rect rc(0, 0, mat.cols, mat.rows); + // cv::rectangle (mat, rc, cv::Scalar(255, 255, 255), cv::FILLED); + std::vector objs; + + if ((m_pRecognizationCfg != NULL) && (m_pRecognizationCfg->enabled != 0) && (photoInfo.recognization != 0)) + { + XYLOG(XYLOG_SEVERITY_INFO, "Channel AI Enabled"); + + // visualize(ncnnPath.c_str(), in); +#ifdef _DEBUG + double startTime = ncnn::get_current_time(); +#endif // _DEBUG + + bool detected = YoloV5NcnnDetect(mat, true, m_pRecognizationCfg->blobName8, m_pRecognizationCfg->blobName16, m_pRecognizationCfg->blobName32, objs); +#ifdef _DEBUG + double elasped = ncnn::get_current_time() - startTime; + // __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "%.2fms detect", elasped); +#endif // _DEBUG +#ifdef _DEBUG + ALOGI( "NCNN recognization: %.2fms res=%d", elasped, ((detected && !objs.empty()) ? 1 : 0)); +#endif + if (detected && !objs.empty()) + { + cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16); + cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16); + float minSizeW = m_pRecognizationCfg->minSize > 0 ? (photoInfo.width * m_pRecognizationCfg->minSize / 100) : 0; + float minSizeH = m_pRecognizationCfg->minSize > 0 ? (photoInfo.height * m_pRecognizationCfg->minSize / 100) : 0; + + for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend();) + { + if (it->label >= m_pRecognizationCfg->items.size()) + { + it = objs.erase(it); + continue; + } + + const IDevice::CFG_RECOGNIZATION::ITEM& item = m_pRecognizationCfg->items[it->label]; + if (item.enabled == 0 || it->prob < item.prob) + { + it = objs.erase(it); + continue; + } + + if (m_pRecognizationCfg->minSize > 0) + { + if (it->w < minSizeW || it->h < minSizeH) + { + it = objs.erase(it); + continue; + } + } + + if ((photoInfo.recognization & 0x2) != 0) + { + cv::Rect rc(it->x, it->y, it->w, it->h); + cv::rectangle(mat, rc, borderColor, m_pRecognizationCfg->thickness); + textSize = ft2->getTextSize(item.name, fontSize, thickness, &baseline); + textSize.height += baseline; + if (it->y > textSize.height) + { + pt.y = it->y - textSize.height - 4 - m_pRecognizationCfg->thickness; + } + else if (mat.rows - it->y - it->h > textSize.height) + { + pt.y = it->y + it->h + 4 + m_pRecognizationCfg->thickness; + } + else + { + // Inner + pt.y = it->y + 4 + m_pRecognizationCfg->thickness; + } + if (mat.cols - it->x > textSize.width) + { + pt.x = it->x; + } + else + { + pt.x = it->x + it->w - textSize.width; + } + +#ifdef OUTPUT_CAMERA_DBG_INFO + char buf[128]; + snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)", + it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height); + XYLOG(XYLOG_SEVERITY_DEBUG, buf); +#endif + ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true); + } + ++it; + } + } + } + else + { + XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled"); + } + +// #ifdef OUTPUT_CAMERA_DBG_INFO + + if (!cameraInfo.empty()) + { + // NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); + + if (photoInfo.outputDbgInfo != 0) + { + cv::Scalar scalarRed(0, 0, 255); // red + + int fs = fontSize * 2 / 3; + textSize = ft2->getTextSize(cameraInfo, fs, -1, &baseline); + cv::Point lt(0, mat.rows - fs - 20 * ratio); + cv::Point lt2(0, lt.y - 2 * ratio); + cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio); + + if (rb.x > (int)width - 1) + { + rb.x = (int)width - 1; + } + if (rb.y > (int)height - 1) + { + rb.y = (int)height - 1; + } + cv::Mat roi = mat(cv::Rect(lt2, rb)); + cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite); + double alpha = 0.5; + cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi); + + // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1); + ft2->putText(mat, cameraInfo, lt, fs, scalarRed, -1, cv::LINE_AA, false); + + // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1); + } + } +// #endif // OUTPUT_CAMERA_DBG_INFO + + for (vector::const_iterator it = osds.cbegin(); it != osds.cend(); ++it) + { + if (it->text.empty()) + { + continue; + } + +#ifdef _DEBUG + if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT) + { + int aa = 0; + } +#endif + + textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline); + XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline); + + if (it->alignment == OSD_ALIGNMENT_TOP_LEFT) + { + pt.x = it->x * ratio; + pt.y = it->y * ratio; + } + else if (it->alignment == OSD_ALIGNMENT_TOP_RIGHT) + { + pt.x = width - textSize.width - it->x * ratio; + pt.y= it->y * ratio; + } + else if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT) + { + pt.x = width - textSize.width - it->x * ratio; + pt.y = height - it->y * ratio - textSize.height - baseline; + } + else if (it->alignment == OSD_ALIGNMENT_BOTTOM_LEFT) + { + pt.x = it->x * ratio; + pt.y = height - it->y * ratio - textSize.height - baseline; + } + + // cv::Rect rc(pt.x, pt.y, textSize.width, textSize.height); + // cv::rectangle(mat, rc, cv::Scalar(0,255,255), 2); + DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness); + } + + std::vector params; + params.push_back(cv::IMWRITE_JPEG_QUALITY); + params.push_back((int)((uint32_t)photoInfo.quality)); + + bool res = false; + std::string fullPath = endsWith(path, ".jpg") ? path : (path + CTerminal::BuildPhotoFileName(photoInfo)); + + if (!std::filesystem::exists(std::filesystem::path(fullPath))) + { + bool res = cv::imwrite(fullPath.c_str(), mat, params); + if (!res) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s", fullPath.c_str() + m_appPath.size()); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Write File: %s", fullPath.c_str() + m_appPath.size()); + } + TakePhotoCb(res ? 2 : 0, photoInfo, fullPath, photoInfo.photoTime, objs); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "Photo File Exists: %s", fullPath.c_str() + m_appPath.size()); } return res; @@ -1851,7 +2398,7 @@ bool CPhoneDevice::OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, else { std::vector objs; - TakePhotoCb(result, mPhotoInfo, "", time(NULL), objs); + TakePhotoCb(0, mPhotoInfo, "", time(NULL), objs); CPhoneCamera* pCamera = mCamera; mCamera = NULL; @@ -1885,7 +2432,7 @@ bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path { std::rename(path, fullPath.c_str()); } - TakePhotoCb(result, mPhotoInfo, fullPath, time(NULL), objs); + TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs); bool turnOffOtg = (mPhotoInfo.usbCamera != 0); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); @@ -1907,7 +2454,7 @@ void CPhoneDevice::onError(const std::string& msg) CPhoneCamera* pCamera = mCamera; mCamera = NULL; - TakePhotoCb(false, mPhotoInfo, mPath, 0); + TakePhotoCb(0, mPhotoInfo, mPath, 0); bool turnOffOtg = (mPhotoInfo.usbCamera != 0); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); @@ -1926,7 +2473,7 @@ void CPhoneDevice::onDisconnected(ACameraDevice* device) CPhoneCamera* pCamera = mCamera; mCamera = NULL; - TakePhotoCb(false, mPhotoInfo, mPath, 0); + TakePhotoCb(0, mPhotoInfo, mPath, 0); bool turnOffOtg = (mPhotoInfo.usbCamera != 0); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); @@ -2157,6 +2704,7 @@ int CPhoneDevice::GetWData(IDevice::WEATHER_INFO *weatherInfo) return true; } + #ifdef USING_N938 bool CPhoneDevice::OpenSensors() { @@ -2235,4 +2783,4 @@ bool CPhoneDevice::CloseSensors() { return false; } -#endif \ No newline at end of file +#endif diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h index 609d2810..bfa6e16a 100644 --- a/app/src/main/cpp/PhoneDevice.h +++ b/app/src/main/cpp/PhoneDevice.h @@ -156,12 +156,12 @@ public: class CPhoneCamera : public NdkCamera { public: - CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params, int burstCaptures = 1); + CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params); virtual ~CPhoneCamera(); virtual bool on_image(cv::Mat& rgb); virtual void on_error(const std::string& msg); virtual void onDisconnected(ACameraDevice* device); - virtual bool onBurstCapture(std::shared_ptr characteristics, const std::vector >& results, const std::vector >& frames); + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); protected: CPhoneDevice* m_dev; @@ -174,7 +174,7 @@ public: virtual void onImageAvailable(AImageReader* reader); virtual int32_t getOutputFormat() const; - virtual bool onBurstCapture(std::shared_ptr characteristics, const std::vector >& results, const std::vector >& frames); + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); protected: std::string m_path; @@ -250,7 +250,8 @@ protected: bool SendBroadcastMessage(std::string action, int value); // bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_, - inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector& objects) const + bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat); + inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector& objects) const { if (m_listener != NULL) { @@ -259,13 +260,12 @@ protected: return false; } - - inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime) const + inline bool TakePhotoCb(int result, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime) const { if (m_listener != NULL) { std::vector objects; - return m_listener->OnPhotoTaken(res, photoInfo, path, photoTime, objects); + return m_listener->OnPhotoTaken(result, photoInfo, path, photoTime, objects); } return false; @@ -275,6 +275,7 @@ protected: std::string QueryCpuTemperature(); bool OnImageReady(cv::Mat& mat); + bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); void onError(const std::string& msg); void onDisconnected(ACameraDevice* device); diff --git a/app/src/main/cpp/camera2/Camera2Helper.h b/app/src/main/cpp/camera2/Camera2Helper.h index d75746eb..b3a3b33a 100644 --- a/app/src/main/cpp/camera2/Camera2Helper.h +++ b/app/src/main/cpp/camera2/Camera2Helper.h @@ -17,6 +17,11 @@ #ifndef __CAMERA2_HELPER_H__ #define __CAMERA2_HELPER_H__ +#include +#include +#include +#include "mat.h" + template class RangeValue { @@ -103,4 +108,107 @@ private: }; +inline void ConvertYUV21ToMat(const uint8_t* nv21, int nv21_width, int nv21_height, int orgWidth, int orgHeight, + int sensorOrientation, bool front, int rotation, cv::Mat& rgb) +{ + int w = 0; + int h = 0; + int rotate_type = 0; + cv::Mat nv21_rotated; + const unsigned char* yuv420data = nv21; + + if (rotation != 0) + { + int co = 0; + if (front) + { + co = (sensorOrientation + (rotation - 1) * 90) % 360; + co = (360 - co) % 360; + } + else + { + co = (sensorOrientation - (rotation - 1) * 90 + 360) % 360; + } + + // XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing); + + // int co = 0; + if (co == 0) + { + w = nv21_width; + h = nv21_height; + rotate_type = front ? 2 : 1; + } + else if (co == 90) + { + w = nv21_height; + h = nv21_width; + + int tmp = orgWidth; + orgWidth = orgHeight; + orgHeight = tmp; + + rotate_type = front ? 5 : 6; + } + else if (co == 180) + { + w = nv21_width; + h = nv21_height; + rotate_type = front ? 4 : 3; + } + else if (co == 270) + { + w = nv21_height; + h = nv21_width; + + int tmp = orgWidth; + orgWidth = orgHeight; + orgHeight = tmp; + + rotate_type = front ? 7 : 8; + } + + nv21_rotated.create(h + h / 2, w, CV_8UC1); + ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type); + yuv420data = nv21_rotated.data; + } + else + { + w = nv21_width; + h = nv21_height; + } + + // nv21_rotated to rgb + if (w == orgWidth && h == orgHeight) + { + rgb.create(h, w, CV_8UC3); + // ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data); + ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data); + } + else + { + cv::Mat org(h, w, CV_8UC3); + ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data); + if (w * orgHeight == h * orgWidth) // Same Ratio + { + cv::resize(org, rgb, cv::Size(orgWidth, orgHeight)); + } + else + { + // Crop image + if (w > orgWidth && h >= orgHeight) + { + int left = (w - orgWidth) / 2; + int top = (h - orgHeight) / 2; + rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth)); + } + else + { + rgb = org; + } + } + } +} + + #endif /* __CAMERA2_HELPER_H__ */ diff --git a/app/src/main/cpp/camera2/camera_listeners.cpp b/app/src/main/cpp/camera2/camera_listeners.cpp index 4a3ef4a4..d299b293 100644 --- a/app/src/main/cpp/camera2/camera_listeners.cpp +++ b/app/src/main/cpp/camera2/camera_listeners.cpp @@ -1,3 +1,4 @@ + /* * Copyright (C) 2017 The Android Open Source Project * diff --git a/app/src/main/cpp/camera2/ndkcamera.cpp b/app/src/main/cpp/camera2/ndkcamera.cpp index 2eaec467..f3e457f0 100644 --- a/app/src/main/cpp/camera2/ndkcamera.cpp +++ b/app/src/main/cpp/camera2/ndkcamera.cpp @@ -100,7 +100,7 @@ void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureR ((NdkCamera*)context)->onCaptureCompleted(session, request, result); } -NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params, int burstCaptures) : mBurstCaptures(burstCaptures) +NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params) { camera_facing = 0; camera_orientation = 0; @@ -153,7 +153,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA lightDetected = false; mResult = { 0 }; - mResult.avgY = ~0; + mLdr = ~0; } NdkCamera::~NdkCamera() @@ -567,6 +567,12 @@ int NdkCamera::open(const std::string& cameraId) { status = ACaptureSessionOutputContainer_create(&capture_session_output_container); + uint32_t burstCaptures = getBurstCaptures(); + if (burstCaptures == 0) + { + burstCaptures = 1; + } + // setup imagereader and its surface media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, 5, &mPreviewImageReader); if (mstatus == AMEDIA_OK) @@ -579,7 +585,7 @@ int NdkCamera::open(const std::string& cameraId) { ANativeWindow_acquire(mPreviewImageWindow); } - mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), mBurstCaptures, &mImageReader); + mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures, &mImageReader); if (mstatus == AMEDIA_OK) { AImageReader_ImageListener listener; @@ -593,16 +599,19 @@ int NdkCamera::open(const std::string& cameraId) { status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget); status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget); - for (int idx = 0; idx <= mBurstCaptures; idx++) + + for (int idx = 0; idx <= burstCaptures; idx++) { CaptureRequest *request = new CaptureRequest(); std::memset(request, 0, sizeof(CaptureRequest)); + bool isPreviewReqest = (idx == PREVIEW_REQUEST_IDX); + request->pThis = this; - request->imageReader = (idx == PREVIEW_REQUEST_IDX) ? mPreviewImageReader : mImageReader; - request->imageWindow = (idx == PREVIEW_REQUEST_IDX) ? mPreviewImageWindow : mImageWindow; - request->imageTarget = (idx == PREVIEW_REQUEST_IDX) ? mPreviewOutputTarget : mOutputTarget; - request->templateId = (idx == PREVIEW_REQUEST_IDX) ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; + request->imageReader = isPreviewReqest ? mPreviewImageReader : mImageReader; + request->imageWindow = isPreviewReqest ? mPreviewImageWindow : mImageWindow; + request->imageTarget = isPreviewReqest ? mPreviewOutputTarget : mOutputTarget; + request->templateId = isPreviewReqest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; mCaptureRequests.push_back(request); @@ -709,27 +718,30 @@ int NdkCamera::open(const std::string& cameraId) { } } - if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) - { - uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; - status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); + if (isPreviewReqest) + { + if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) + { + uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); - XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); - mResult.aeLockSetted = 1; - } - else - { - uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; - status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); - XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); - } + XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); + mResult.aeLockSetted = 1; + } + else + { + uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); + XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); + } - uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; - status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); - m_precaptureStartTime = m_startTime; + uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); + XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); + m_precaptureStartTime = m_startTime; - // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); + // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); + } } else { @@ -818,6 +830,8 @@ void NdkCamera::close() XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str()); camera_status_t res = ACAMERA_OK; + mCaptureFrames.clear(); + if ((ACameraManager *)camera_manager != NULL) { // res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); @@ -928,22 +942,22 @@ void NdkCamera::close() void NdkCamera::onImageAvailable(AImageReader* reader) { AImage* image = 0; - media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image); - - if (mstatus != AMEDIA_OK) - { - // error - // https://stackoverflow.com/questions/67063562 - if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) - { - XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); - } - return; - } + media_status_t mstatus = AMEDIA_OK; if (reader == mPreviewImageReader) { - if (mResult.avgY == ~0) + mstatus = AImageReader_acquireLatestImage(reader, &image); + if (mstatus != AMEDIA_OK) + { + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) + { + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); + } + return; + } + + if (mLdr == ~0) { uint8_t* y_data = 0; int y_len = 0; @@ -955,54 +969,34 @@ void NdkCamera::onImageAvailable(AImageReader* reader) uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); #endif avgY = avgY / (uint64_t)y_len; - mResult.avgY = avgY; - mFinalResult.avgY = avgY; + mLdr = avgY; } AImage_delete(image); - return; } - -#if 0 - if (!lightDetected) + else { - AImage_getPlaneData(image, 0, &y_data, &y_len); - - lightDetected = true; - -#if 1 - if (avgY < 50) + while (1) { - if (m_params.autoExposure) + mstatus = AImageReader_acquireNextImage(reader, &image); + if (mstatus != AMEDIA_OK) { - uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; - camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); - - int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY); - status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); - - int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY); - status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); - - XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY, - mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity); + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) + { + if (mCaptureFrames.size() < m_params.burstCaptures) + { + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus); + } + } + break; } - AImage_delete(image); - return; + + m_photoTaken = true; + mCaptureFrames.push_back(std::shared_ptr(image, AImage_delete)); } -#endif } -#endif - - - m_photoTaken = true; - XYLOG(XYLOG_SEVERITY_INFO, "Photo Taken: AES=%u AFS=%u AWBS=%u", (uint32_t)mFinalResult.aeState, (uint32_t)mFinalResult.awbState, (uint32_t)mFinalResult.afState); - - mFinalResult.duration = GetMicroTimeStamp() - m_startTime; - - mCaptureFrames.push_back(std::shared_ptr(image, AImage_delete)); - } void NdkCamera::on_error(const std::string& msg) @@ -1011,7 +1005,6 @@ void NdkCamera::on_error(const std::string& msg) void NdkCamera::onDisconnected(ACameraDevice* device) { - } bool NdkCamera::on_image(cv::Mat& rgb) @@ -1019,7 +1012,7 @@ bool NdkCamera::on_image(cv::Mat& rgb) return false; } -bool NdkCamera::onBurstCapture(std::shared_ptr characteristics, const std::vector >& results, const std::vector >& frames) +bool NdkCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) { return false; } @@ -1146,46 +1139,23 @@ void NdkCamera::onSessionReady(ACameraCaptureSession *session) { if (m_photoTaken) { - AASSERT(mCaptureFrames.size() == mCaptureResults.size(), "Frame size %u doesn't equal to result size %u", - (uint32_t)mCaptureFrames.size(), (uint32_t)mCaptureResults.size()); -#ifndef NDEBUG - for (int idx = 0; idx < mCaptureFrames.size(); idx++) + for (int idx = 0; idx < 10; idx++) { - std::shared_ptr spImage = mCaptureFrames[idx]; - - - int32_t format; - AImage_getFormat(spImage.get(), &format); - if (format == AIMAGE_FORMAT_YUV_420_888) + if (mCaptureFrames.size() >= m_params.burstCaptures && mCaptureResults.size() >= m_params.burstCaptures) { + break; } - else - { - ALOGW("Capture Available TID=%lld", (long long)getThreadIdOfULL()); - uint32_t frameNumber = mFrameNumber.fetch_add(1); - std::string path = "/sdcard/com.xypower.mpapp/tmp/" + std::to_string(frameNumber); - if (format == AIMAGE_FORMAT_JPEG) - { - path += ".jpg"; - writeJpegFile(spImage.get(), path.c_str()); - } - else - { - path += ".dng"; - writeRawFile(spImage.get(), mCharacteristics.get(), mCaptureResults[idx].get(), path.c_str()); - - } - } - + std::this_thread::sleep_for(std::chrono::milliseconds(16)); } -#endif // NDEBUG + AASSERT(mCaptureFrames.size() == mCaptureResults.size(), "Frame size %u doesn't equal to result size %u", + (uint32_t)mCaptureFrames.size(), (uint32_t)mCaptureResults.size()); + onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames); } } void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { - } void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) @@ -1448,9 +1418,13 @@ bool NdkCamera::IsCameraAvailable(const std::string& cameraId) int32_t NdkCamera::getOutputFormat() const { - return AIMAGE_FORMAT_YUV_420_888; + return m_params.burstRawCapture ? AIMAGE_FORMAT_RAW16 : AIMAGE_FORMAT_YUV_420_888; } +int32_t NdkCamera::getBurstCaptures() const +{ + return m_params.burstRawCapture ? m_params.burstCaptures : 1; +} void NdkCamera::CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manualPreview, @@ -1582,8 +1556,6 @@ void NdkCamera::writeJpegFile(AImage *image, const char* path) void NdkCamera::writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path) { - - // dngCreator. int32_t width; int32_t height; @@ -1639,9 +1611,9 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt AImage_getPlaneRowStride(image, 1, &u_rowStride); AImage_getPlaneRowStride(image, 2, &v_rowStride); - uint8_t* y_data = 0; - uint8_t* u_data = 0; - uint8_t* v_data = 0; + uint8_t *y_data = 0; + uint8_t *u_data = 0; + uint8_t *v_data = 0; int y_len = 0; int u_len = 0; int v_len = 0; @@ -1649,23 +1621,20 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt AImage_getPlaneData(image, 1, &u_data, &u_len); AImage_getPlaneData(image, 2, &v_data, &v_len); - if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) - { + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && + u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && + v_rowStride == width) { // already nv21 :) // on_image((unsigned char*)y_data, (int)width, (int)height); - } - else - { + } else { // construct nv21 - unsigned char* nv21 = new unsigned char[width * height + width * height / 2]; + unsigned char *nv21 = new unsigned char[width * height + width * height / 2]; { // Y - unsigned char* yptr = nv21; - for (int y = 0; y < height; y++) - { - const unsigned char* y_data_ptr = y_data + y_rowStride * y; - for (int x = 0; x < width; x++) - { + unsigned char *yptr = nv21; + for (int y = 0; y < height; y++) { + const unsigned char *y_data_ptr = y_data + y_rowStride * y; + for (int x = 0; x < width; x++) { yptr[0] = y_data_ptr[0]; yptr++; y_data_ptr += y_pixelStride; @@ -1673,13 +1642,11 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt } // UV - unsigned char* uvptr = nv21 + width * height; - for (int y = 0; y < height / 2; y++) - { - const unsigned char* v_data_ptr = v_data + v_rowStride * y; - const unsigned char* u_data_ptr = u_data + u_rowStride * y; - for (int x = 0; x < width / 2; x++) - { + unsigned char *uvptr = nv21 + width * height; + for (int y = 0; y < height / 2; y++) { + const unsigned char *v_data_ptr = v_data + v_rowStride * y; + const unsigned char *u_data_ptr = u_data + u_rowStride * y; + for (int x = 0; x < width / 2; x++) { uvptr[0] = v_data_ptr[0]; uvptr[1] = u_data_ptr[0]; uvptr += 2; @@ -1689,8 +1656,71 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt } } - // on_image((unsigned char*)nv21, (int)width, (int)height); + // on_image((unsigned ch + } +} - delete[] nv21; +void NdkCamera::EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult) +{ + camera_status_t status = ACAMERA_ERROR_BASE; + + ACameraMetadata_const_entry val = { 0 }; + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); + captureResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; + + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val); + captureResult.awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE; + + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); + captureResult.afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; + + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val); + int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1; + captureResult.exposureTime = exTime; + + val = {0}; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_MODE, &val); + captureResult.autoFocus = (status == ACAMERA_OK) ? *(val.data.u8) : 0; + + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_MODE, &val); + uint8_t aeMode = (status == ACAMERA_OK) ? val.data.u8[0] : 0; + captureResult.autoExposure = aeMode; + + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_FRAME_DURATION, &val); + int64_t frameDuration = (status == ACAMERA_OK) ? val.data.i64[0] : 0; + captureResult.frameDuration = frameDuration; + + val = { 0 }; + float focusDistance = NAN; + status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val); + if (status == ACAMERA_OK) + { + focusDistance = *val.data.f; + } + captureResult.FocusDistance = focusDistance; + + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_ZOOM_RATIO, &val); + if (status == ACAMERA_OK) + { + captureResult.zoomRatio = *val.data.f; } + + val = {0}; + status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); + captureResult.sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0; + + val = {0}; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_SCENE_MODE, &val); + captureResult.sceneMode = (status == ACAMERA_OK) ? *(val.data.u8) : 0; + + val = {0}; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val); + captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0; } \ No newline at end of file diff --git a/app/src/main/cpp/camera2/ndkcamera.h b/app/src/main/cpp/camera2/ndkcamera.h index 6b3b5560..0298e64b 100644 --- a/app/src/main/cpp/camera2/ndkcamera.h +++ b/app/src/main/cpp/camera2/ndkcamera.h @@ -81,6 +81,7 @@ public: unsigned int orientation:3; unsigned int zoom : 1; unsigned int wait3ALocked : 3; + unsigned int burstRawCapture : 1; unsigned int reserved : 18; int64_t exposureTime; unsigned int sensitivity; @@ -88,6 +89,7 @@ public: float zoomRatio; uint8_t requestTemplate; uint8_t awbMode; + uint8_t burstCaptures; unsigned short focusTimeout; // milli-seconds 65535 }; @@ -135,7 +137,7 @@ public: int sequenceId; }; - NdkCamera(int32_t width, int32_t height, const CAMERA_PARAMS& params, int burstCaptures = 1); + NdkCamera(int32_t width, int32_t height, const CAMERA_PARAMS& params); virtual ~NdkCamera(); // facing 0=front 1=back @@ -143,13 +145,14 @@ public: void close(); int selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY); - void writeJpegFile(AImage *image, const char* path); - void writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path); + static void writeJpegFile(AImage *image, const char* path); + static void writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path); void onAvailabilityCallback(const char* cameraId); void onUnavailabilityCallback(const char* cameraId); virtual void onImageAvailable(AImageReader* reader); virtual int32_t getOutputFormat() const; + virtual int32_t getBurstCaptures() const; void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height); void CreateSession(ANativeWindow* previewWindow); @@ -160,7 +163,7 @@ public: virtual void on_error(const std::string& msg); virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height); virtual void onDisconnected(ACameraDevice* device); - virtual bool onBurstCapture(std::shared_ptr characteristics, const std::vector >& results, const std::vector >& frames); + virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames); void onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result); void onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result); @@ -168,14 +171,15 @@ public: void onSessionReady(ACameraCaptureSession *session); void onError(ACameraDevice* device, int error); - const CAPTURE_RESULT& getCaptureResult() const + uint32_t GetLdr() const { - return mFinalResult; + return mLdr; } bool IsCameraAvailable(const std::string& cameraId); static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height); + static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult); protected: std::mutex m_locker; @@ -183,10 +187,8 @@ protected: std::atomic mFrameNumber; - protected: CAMERA_PARAMS m_params; - int mBurstCaptures; int camera_facing; int camera_orientation; bool m_firstFrame; @@ -218,7 +220,6 @@ protected: bool mCaptureTriggered; CAPTURE_RESULT mResult; - CAPTURE_RESULT mFinalResult; unsigned long long m_startTime; protected: @@ -241,7 +242,9 @@ protected: std::shared_ptr mCharacteristics; std::vector mCaptureRequests; + std::shared_ptr mPreviewResults; std::vector > mCaptureResults; + uint32_t mLdr; std::vector > mCaptureFrames; ACameraCaptureSession* capture_session;