diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp index f06c8db7..42f631b6 100644 --- a/app/src/main/cpp/PhoneDevice.cpp +++ b/app/src/main/cpp/PhoneDevice.cpp @@ -160,6 +160,128 @@ void CPhoneDevice::CPhoneCamera::onDisconnected(ACameraDevice* device) } } + +CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t height, const std::string& path, const NdkCamera::CAMERA_PARAMS& params) : CPhoneDevice::CPhoneCamera(dev, width, height, params), m_path(path) +{ +} + +void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader) +{ + ALOGD("onImageAvailable %p", reader); + + AImage* image = 0; + media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image); + + if (mstatus != AMEDIA_OK) + { + // error + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) + { + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); + } + return; + } + + uint8_t* y_data = 0; + int y_len = 0; +#if 0 + if (!lightDetected) + { + AImage_getPlaneData(image, 0, &y_data, &y_len); + + lightDetected = true; + +#if __cplusplus >= 201703L + uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); +#else + uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); +#endif + avgY = avgY / (uint64_t)y_len; + mResult.avgY = avgY; + mFinalResult.avgY = avgY; +#if 1 + if (avgY < 50) + { + if (m_params.autoExposure) + { + uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; + camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); + + int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY); + status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); + + int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY); + status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); + + XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY, + mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity); + } + AImage_delete(image); + return; + } +#endif + } +#endif + + if (m_imagesCaptured == ~0 || m_imagesCaptured != EXPECTED_CAPTURE_IDX) + { + // XYLOG(XYLOG_SEVERITY_DEBUG, "m_imagesCaptured=%u wait for next image", m_imagesCaptured); + // Not Ready Or Taken + AImage_delete(image); + if (m_imagesCaptured != ~0) + { + XYLOG(XYLOG_SEVERITY_DEBUG, "Skip Image index=%u", m_imagesCaptured); + m_imagesCaptured++; + } + return; + } + + XYLOG(XYLOG_SEVERITY_INFO, "Photo Taken: AES=%u AFS=%u AWBS=%u", (uint32_t)mFinalResult.aeState, (uint32_t)mFinalResult.awbState, (uint32_t)mFinalResult.afState); + + mFinalResult.duration = GetMicroTimeStamp() - m_startTime; + + int32_t format; + AImage_getFormat(image, &format); + + if (format == AIMAGE_FORMAT_JPEG) + { + int planeCount; + media_status_t status = AImage_getNumberOfPlanes(image, &planeCount); + + // LOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount); + if (!(status == AMEDIA_OK && planeCount == 1)) + { + // LOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount); + return; + } + + uint8_t *data = nullptr; + int len = 0; + AImage_getPlaneData(image, 0, &data, &len); + + FILE *file = fopen(m_path.c_str(), "wb"); + if (file && data && len) + { + fwrite(data, 1, len, file); + fclose(file); + } + else + { + if (file) + fclose(file); + } + } + + AImage_delete(image); + m_imagesCaptured ++; +} + +int32_t CPhoneDevice::CJpegCamera::getOutputFormat() const +{ + return AIMAGE_FORMAT_JPEG; +} + CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode) : mCameraPowerCount(0), mOtgCount(0), mVersionCode(versionCode) { mCamera = NULL; @@ -1209,6 +1331,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< if (mPhotoInfo.mediaType == 0) { mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params); + // mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params); if (mCamera->open(to_string(mPhotoInfo.cameraId)) == 0) { XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Succeeded to OpenCamera CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); @@ -1578,9 +1701,9 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness); } - vector params; + std::vector params; params.push_back(cv::IMWRITE_JPEG_QUALITY); - params.push_back(mPhotoInfo.quality); + params.push_back((int)((uint32_t)mPhotoInfo.quality)); bool res = false; std::string fullPath = endsWith(mPath, ".jpg") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo)); @@ -1872,3 +1995,12 @@ int CPhoneDevice::GetWData(IDevice::WEATHER_INFO *weatherInfo) } +bool CPhoneDevice::OpenSensors() +{ + return false; +} + +bool CPhoneDevice::CloseSensors() +{ + return false; +} \ No newline at end of file diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h index 73696c69..4472cf1d 100644 --- a/app/src/main/cpp/PhoneDevice.h +++ b/app/src/main/cpp/PhoneDevice.h @@ -153,7 +153,8 @@ class CPhoneDevice : public IDevice { public: - class CPhoneCamera : public NdkCamera { + class CPhoneCamera : public NdkCamera + { public: CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params); virtual ~CPhoneCamera(); @@ -165,6 +166,18 @@ public: CPhoneDevice* m_dev; }; + class CJpegCamera : public CPhoneCamera + { + public: + CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t height, const std::string& path, const NdkCamera::CAMERA_PARAMS& params); + + virtual void onImageAvailable(AImageReader* reader); + virtual int32_t getOutputFormat() const; + + protected: + std::string m_path; + }; + struct TIMER_CONTEXT { CPhoneDevice* device; @@ -200,6 +213,8 @@ public: virtual int GetWData(WEATHER_INFO *weatherInfo); virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, SENSOR_PARAM sensorParam); + virtual bool OpenSensors(); + virtual bool CloseSensors(); bool GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector& items); diff --git a/app/src/main/cpp/camera2/ndkcamera.cpp b/app/src/main/cpp/camera2/ndkcamera.cpp index 0dd13859..a0352a52 100644 --- a/app/src/main/cpp/camera2/ndkcamera.cpp +++ b/app/src/main/cpp/camera2/ndkcamera.cpp @@ -564,8 +564,8 @@ int NdkCamera::open(const std::string& cameraId) { // setup imagereader and its surface { - // media_status_t mstatus = AImageReader_new(foundRes.width(), foundRes.height(), AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader); - media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader); + // media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader); + media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), /*maxImages*/2, &image_reader); if (mstatus == AMEDIA_OK) { @@ -598,7 +598,7 @@ int NdkCamera::open(const std::string& cameraId) { uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_MODE, 1, &ctrlMode); - uint8_t captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; + uint8_t captureIntent = (ACameraDevice_request_template)m_params.requestTemplate == TEMPLATE_PREVIEW ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent); uint8_t flashMode = ACAMERA_FLASH_MODE_OFF; @@ -985,103 +985,99 @@ void NdkCamera::onImageAvailable(AImageReader* reader) int32_t format; AImage_getFormat(image, &format); - // ASSERT(format == AIMAGE_FORMAT_YUV_420_888); - - int32_t width = 0; - int32_t height = 0; - AImage_getWidth(image, &width); - AImage_getHeight(image, &height); - - int32_t y_pixelStride = 0; - int32_t u_pixelStride = 0; - int32_t v_pixelStride = 0; - AImage_getPlanePixelStride(image, 0, &y_pixelStride); - AImage_getPlanePixelStride(image, 1, &u_pixelStride); - AImage_getPlanePixelStride(image, 2, &v_pixelStride); - - int32_t y_rowStride = 0; - int32_t u_rowStride = 0; - int32_t v_rowStride = 0; - AImage_getPlaneRowStride(image, 0, &y_rowStride); - AImage_getPlaneRowStride(image, 1, &u_rowStride); - AImage_getPlaneRowStride(image, 2, &v_rowStride); - - // uint8_t* y_data = 0; - uint8_t* u_data = 0; - uint8_t* v_data = 0; - // int y_len = 0; - int u_len = 0; - int v_len = 0; - AImage_getPlaneData(image, 0, &y_data, &y_len); - AImage_getPlaneData(image, 1, &u_data, &u_len); - AImage_getPlaneData(image, 2, &v_data, &v_len); + if (format == AIMAGE_FORMAT_YUV_420_888) + { + int32_t width = 0; + int32_t height = 0; + AImage_getWidth(image, &width); + AImage_getHeight(image, &height); + + int32_t y_pixelStride = 0; + int32_t u_pixelStride = 0; + int32_t v_pixelStride = 0; + AImage_getPlanePixelStride(image, 0, &y_pixelStride); + AImage_getPlanePixelStride(image, 1, &u_pixelStride); + AImage_getPlanePixelStride(image, 2, &v_pixelStride); + + int32_t y_rowStride = 0; + int32_t u_rowStride = 0; + int32_t v_rowStride = 0; + AImage_getPlaneRowStride(image, 0, &y_rowStride); + AImage_getPlaneRowStride(image, 1, &u_rowStride); + AImage_getPlaneRowStride(image, 2, &v_rowStride); + + // uint8_t* y_data = 0; + uint8_t* u_data = 0; + uint8_t* v_data = 0; + // int y_len = 0; + int u_len = 0; + int v_len = 0; + AImage_getPlaneData(image, 0, &y_data, &y_len); + AImage_getPlaneData(image, 1, &u_data, &u_len); + AImage_getPlaneData(image, 2, &v_data, &v_len); #if 1 #if __cplusplus >= 201703L - uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); + uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); #else - uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); + uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); #endif - mResult.avgY = avgY / y_len; - mFinalResult.avgY = mResult.avgY; + mResult.avgY = avgY / y_len; + mFinalResult.avgY = mResult.avgY; #endif - if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) - { - // already nv21 :) - on_image((unsigned char*)y_data, (int)width, (int)height); - } - else - { - // construct nv21 - unsigned char* nv21 = new unsigned char[width * height + width * height / 2]; + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) { - // Y - unsigned char* yptr = nv21; - for (int y=0; y