// Tencent is pleased to support the open source community by making ncnn available. // // Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // https://opensource.org/licenses/BSD-3-Clause // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. #include "ndkcamera.h" #include #include #include #include #include #include #include #include "mat.h" #include "gpu.h" #include "Camera2Helper.h" #include #include static void onAvailabilityCallback(void* context, const char* cameraId) { ((NdkCamera*)context)->onAvailabilityCallback(cameraId); // ALOGI("CameraStatus::onAvailability CameraId: %s", cameraId); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onAvailability CameraId: %s", cameraId); } static void onUnavailabilityCallback(void* context, const char* cameraId) { ((NdkCamera*)context)->onUnavailabilityCallback(cameraId); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onUnavailability CameraId: %s", cameraId); } static void onDisconnected(void* context, ACameraDevice* device) { ((NdkCamera*)context)->onDisconnected(device); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onDisconnected CameraId: %s", ACameraDevice_getId(device)); } static void onError(void* context, ACameraDevice* device, int error) { ((NdkCamera*)context)->onError(device, error); } static void onImageAvailable(void* context, AImageReader* reader) { ((NdkCamera*)context)->onImageAvailable(reader); } static void onSessionActive(void* context, ACameraCaptureSession *session) { ALOGD("onSessionActive %p", session); } static void onSessionReady(void* context, ACameraCaptureSession *session) { ALOGD("onSessionReady %p", session); ((NdkCamera*)context)->onSessionReady(session); } static void onSessionClosed(void* context, ACameraCaptureSession *session) { XYLOG(XYLOG_SEVERITY_INFO, "onSessionClosed %p", session); } void onCaptureFailed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure) { // XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason); ((NdkCamera*)context)->onCaptureFailed(session, request, failure); } void onCaptureSequenceCompleted(void* context, ACameraCaptureSession* session, int sequenceId, int64_t frameNumber) { ALOGD("onCaptureSequenceCompleted %p %d %lld", session, sequenceId, frameNumber); } void onCaptureSequenceAborted(void* context, ACameraCaptureSession* session, int sequenceId) { ALOGD("onCaptureSequenceAborted %p %d", session, sequenceId); } void onCaptureProgressed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { ((NdkCamera*)context)->onCaptureProgressed(session, request, result); } void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { ((NdkCamera*)context)->onCaptureCompleted(session, request, result); } NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params) { camera_facing = 0; camera_orientation = 0; m_params = params; m_firstFrame = true; m_photoTaken = false; mWidth = width; mHeight = height; m_imagesCaptured = ~0; maxFrameDuration = 0; afSupported = false; awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO; aeLockAvailable = false; awbLockAvailable = false; sceneModeSupported = false; numberOfPrecaptures = 0; m_precaptureStartTime = 0; activeArraySize[0] = 0; activeArraySize[1] = 0; maxRegions[0] = 0; maxRegions[1] = 0; maxRegions[2] = 0; camera_manager_cb.context = this; camera_manager_cb.onCameraAvailable = ::onAvailabilityCallback; camera_manager_cb.onCameraUnavailable = ::onUnavailabilityCallback; camera_device = 0; image_reader = 0; image_reader_surface = 0; image_reader_target = 0; capture_request = 0; capture_session_output_container = 0; capture_session_output = 0; capture_session = 0; captureSequenceId = 0; lightDetected = false; mResult = { 0 }; } NdkCamera::~NdkCamera() { close(); } int NdkCamera::selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY) { camera_manager.Create(); // ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb); // find camera bool foundIt = false; // DisplayDimension disp(mWidth, mHeight); // DisplayDimension foundRes = disp; camera_status_t status = ACAMERA_OK; ACameraIdList* cameraIdList = NULL; status = ACameraManager_getCameraIdList(camera_manager, &cameraIdList); if (status != ACAMERA_OK) { return 1; } for (int i = 0; i < cameraIdList->numCameras; ++i) { const char *id = cameraIdList->cameraIds[i]; if (cameraId.compare(id) == 0) { foundIt = true; break; } } ACameraManager_deleteCameraIdList(cameraIdList); if (!foundIt) { return 2; } ACameraMetadata * camera_metadata = 0; status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata); if (status != ACAMERA_OK) { return 3; } { ACameraMetadata_const_entry e = {0}; camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e); // format of the data: format, width, height, input?, type int32 maxResolutionX = 0; maxResolutionY = 0; for (int i = 0; i < e.count; i += 4) { int32_t input = e.data.i32[i + 3]; int32_t format = e.data.i32[i + 0]; if (input) continue; if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/) { if (e.data.i32[i + 1] * e.data.i32[i + 2] > (maxResolutionX * maxResolutionY)) { maxResolutionX = e.data.i32[i + 1]; maxResolutionY = e.data.i32[i + 2]; } } } } return 0; } int NdkCamera::open(const std::string& cameraId) { XYLOG(XYLOG_SEVERITY_DEBUG, "DBG::try open %s", cameraId.c_str()); // camera_facing = _camera_facing; camera_manager.Create(); // ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb); // find camera bool foundIt = false; DisplayDimension disp(mWidth, mHeight); DisplayDimension foundRes = disp; camera_status_t status = ACAMERA_OK; ALOGD("Start ACameraManager_getCameraIdList"); { ACameraIdList *camera_id_list = 0; for (int retry = 0; retry < 100; retry++) { status = ACameraManager_getCameraIdList(camera_manager, &camera_id_list); AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraIdList return error, %d", status); for (int i = 0; i < camera_id_list->numCameras; ++i) { const char *id = camera_id_list->cameraIds[i]; if (cameraId.compare(id) == 0) { foundIt = true; break; } } ACameraManager_deleteCameraIdList(camera_id_list); if (foundIt) { break; } std::this_thread::sleep_for(std::chrono::milliseconds(16)); } ALOGD("End ACameraManager_getCameraIdList"); // ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); if (!foundIt) { XYLOG(XYLOG_SEVERITY_ERROR, "Camera Not Found on ID: %s", cameraId.c_str()); return 1; } mCameraId = cameraId; ACameraMetadata * camera_metadata = 0; status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata); AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraCharacteristics return error, %d", status); { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e); // format of the data: format, width, height, input?, type int32 // DisplayDimension foundRes(4000, 4000); // DisplayDimension maxJPG(0, 0); foundIt = false; DisplayDimension temp; for (int i = 0; i < e.count; i += 4) { int32_t input = e.data.i32[i + 3]; if (input) continue; int32_t format = e.data.i32[i + 0]; if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/) { DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]); // XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height()); if (!disp.IsSameRatio(res)) { if (res.width() >= mWidth && res.height() >= mHeight) { temp = res; } continue; } if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp) { foundIt = true; foundRes = res; } } } if (!foundIt) { foundRes = temp; foundIt = true; } } if (!foundIt || foundRes.width() == 0 || foundRes.height() == 0) { ACameraMetadata_free(camera_metadata); XYLOG(XYLOG_SEVERITY_ERROR, "Camera RES(%d, %d) Not Found on ID: %s", mWidth, mHeight, cameraId.c_str()); return 1; } // foundRes.Flip(); // query faceing acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e); AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_LENS_FACING return error, %d", status); if (status == ACAMERA_OK) { facing = (acamera_metadata_enum_android_lens_facing_t) e.data.u8[0]; } } camera_facing = facing; // query orientation int orientation = 0; { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e); AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_ORIENTATION return error, %d", status); if (status == ACAMERA_OK) { orientation = (int) e.data.i32[0]; } } camera_orientation = orientation; { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &e); } { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AF_AVAILABLE_MODES, &e); // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); #ifdef _DEBUG std::string afModes; for (int idx = 0; idx < e.count; idx++) { afModes += std::to_string(e.data.u8[idx]) + " "; } XYLOG(XYLOG_SEVERITY_DEBUG, "Available AF Mode: ", afModes.c_str()); #endif afSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AF_MODE_OFF)); } { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AWB_AVAILABLE_MODES, &e); // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); if (status == ACAMERA_OK) { for (int idx = 0; idx < e.count; idx++) { if (m_params.awbMode == e.data.u8[idx]) { awbMode = m_params.awbMode; break; } // unsigned int m = e.data.u8[idx]; // XYLOG(XYLOG_SEVERITY_DEBUG, "Available AWB Mode %u", m); } } // awbSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AWB_MODE_OFF)); } if (!afSupported) { XYLOG(XYLOG_SEVERITY_ERROR, "AF not Supported"); } { ACameraMetadata_const_entry val = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val); // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE return error, %d", status); if (status == ACAMERA_OK) { exposureRange.min_ = val.data.i64[0]; if (exposureRange.min_ < kMinExposureTime) { exposureRange.min_ = kMinExposureTime; } exposureRange.max_ = val.data.i64[1]; if (exposureRange.max_ > kMaxExposureTime) { exposureRange.max_ = kMaxExposureTime; } // exposureTime = exposureRange.value(2); } else { ALOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE"); exposureRange.min_ = exposureRange.max_ = 0l; // exposureTime_ = 0l; } } { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AE_LOCK_AVAILABLE, &e); // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); aeLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AE_LOCK_AVAILABLE_TRUE) : false; } { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AWB_LOCK_AVAILABLE, &e); awbLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_TRUE) : false; } { ACameraMetadata_const_entry val = {0}; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_ZOOM_RATIO_RANGE, &val); if (status == ACAMERA_OK) { float zoomRatioMin = val.data.f[0]; float zoomRatioMax = val.data.f[1]; ALOGI("Zoom Ratio Range: [%f,%f]", zoomRatioMin, zoomRatioMax); } } { ACameraMetadata_const_entry val = {0}; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_RANGE, &val); if (status == ACAMERA_OK) { aeCompensationRange.min_ = val.data.i32[0]; aeCompensationRange.max_ = val.data.i32[1]; XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_RANGE [%d,%d]", aeCompensationRange.min_, aeCompensationRange.max_); } else { ALOGW("Unsupported ACAMERA_CONTROL_AE_COMPENSATION_RANGE"); aeCompensationRange.min_ = aeCompensationRange.max_ = 0l; } } { ACameraMetadata_const_entry val = {0}; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_STEP, &val); if (status == ACAMERA_OK) { aeCompensationStep = val.data.r[0]; XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_STEP num=%d den=%d", aeCompensationStep.numerator, aeCompensationStep.denominator); } } { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION, &e); maxFrameDuration = (status == ACAMERA_OK) ? *e.data.i64 : 0; } { ACameraMetadata_const_entry val = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val); if (status == ACAMERA_OK) { sensitivityRange.min_ = val.data.i32[0]; sensitivityRange.max_ = val.data.i32[1]; } else { ALOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE"); sensitivityRange.min_ = sensitivityRange.max_ = 0; } } { ACameraMetadata_const_entry val = {0}; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &val); if (status == ACAMERA_OK) { activeArraySize[0] = val.data.i32[2]; activeArraySize[1] = val.data.i32[3]; } } { ACameraMetadata_const_entry val = {0}; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_MAX_REGIONS, &val); if (status == ACAMERA_OK) { maxRegions[0] = val.data.i32[0]; maxRegions[1] = val.data.i32[1]; maxRegions[2] = val.data.i32[2]; } } { ACameraMetadata_const_entry e = {0}; status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e); if (status == ACAMERA_OK) { for (int i = 0; i < e.count; i++) { if (m_params.sceneMode == e.data.u8[i]) { sceneModeSupported = true; break; } } } } ACameraMetadata_free(camera_metadata); } // open camera { ACameraDevice_StateCallbacks camera_device_state_callbacks; camera_device_state_callbacks.context = this; camera_device_state_callbacks.onDisconnected = ::onDisconnected; camera_device_state_callbacks.onError = ::onError; status = ACameraManager_openCamera(camera_manager, cameraId.c_str(), &camera_device_state_callbacks, &camera_device); if (status != ACAMERA_OK) { XYLOG(XYLOG_SEVERITY_ERROR, "Failed to open camera %s res=%d", cameraId.c_str(), status); return 1; } } XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::Open %s Orientation=%d width=%d height=%d", cameraId.c_str(), camera_orientation, foundRes.width(), foundRes.height()); // setup imagereader and its surface { // media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader); media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), /*maxImages*/2, &image_reader); if (mstatus == AMEDIA_OK) { AImageReader_ImageListener listener; listener.context = this; listener.onImageAvailable = ::onImageAvailable; mstatus = AImageReader_setImageListener(image_reader, &listener); mstatus = AImageReader_getWindow(image_reader, &image_reader_surface); // ANativeWindow_setBuffersGeometry(image_reader_surface, width, height,WINDOW_FORMAT_RGBX_8888); ANativeWindow_acquire(image_reader_surface); } } m_imagesCaptured = ~0; // capture request { ACameraDevice_request_template templateId = ((afSupported && m_params.autoFocus) || m_params.autoExposure) ? TEMPLATE_PREVIEW : TEMPLATE_STILL_CAPTURE; templateId = (ACameraDevice_request_template)m_params.requestTemplate; status = ACameraDevice_createCaptureRequest(camera_device, templateId, &capture_request); int32_t fpsRange[2] = {5, 15}; // status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_TARGET_FPS_RANGE, 2, fpsRange); } // uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO; uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_MODE, 1, &ctrlMode); uint8_t captureIntent = (ACameraDevice_request_template)m_params.requestTemplate == TEMPLATE_PREVIEW ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent); uint8_t flashMode = ACAMERA_FLASH_MODE_OFF; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_FLASH_MODE, 1, &flashMode); uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode); uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST; // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_EDGE_MODE, 1, &edgeMode); if (afSupported && m_params.autoFocus) { if (!m_params.zoom) { if (maxRegions[2] > 0) { int32_t centerX = activeArraySize[0] >> 1; int32_t centerY = activeArraySize[1] >> 1; int32_t sizeX = activeArraySize[0] >> 4; int32_t sizeY = activeArraySize[1] >> 4; int32_t afRegions[] = {centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000}; // status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions); if (status == ACAMERA_OK) { // m_imagesCaptured = ~0; #ifdef _DEBUG int aa = 0; #endif } } // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO; uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE; // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_MODE, 1, &afMode); // uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL; // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); // trig = ACAMERA_CONTROL_AF_TRIGGER_START; // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); } if (status == ACAMERA_OK) { m_imagesCaptured = ~0; } } else { uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); // m_imagesCaptured = (status == ACAMERA_OK) ? ~0 : 0; } // std::this_thread::sleep_for(std::chrono::milliseconds(128)); if (m_params.sceneMode != 0) { uint8_t sceneMode = m_params.sceneMode; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode); } { if (m_params.autoExposure) { uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); // ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_); if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0) { int32_t compensation = m_params.compensation; if (compensation < aeCompensationRange.min_) { compensation = aeCompensationRange.min_; } if (compensation > aeCompensationRange.max_) { compensation = aeCompensationRange.max_; } // int32_t aeCompensation = aeCompensationRange.max_; status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); if (status != ACAMERA_OK) { int aa = 0; } } if (maxRegions[0] > 0) { int32_t aeRegions[] = {0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000}; // status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions); if (status == ACAMERA_OK) { // m_imagesCaptured = ~0; #ifdef _DEBUG int aa = 0; #endif } } if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) { uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1,&aeLock); XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); mResult.aeLockSetted = 1; } else { uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1,&aeLock); XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); } uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); m_precaptureStartTime = m_startTime; if (status == ACAMERA_OK) { m_imagesCaptured = ~0; } // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); } else { uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); if (m_params.sensitivity > 0) { int32_t sensitivity = m_params.sensitivity; status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); } if (m_params.exposureTime > 0) { int64_t exposureTime = m_params.exposureTime; status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); } int64_t frameDuration = maxFrameDuration / 2; // status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration); } // TODO: // m_imagesCaptured = 0; { status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode); if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) { uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock); mResult.awbLockSetted = 1; XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState); } } #if 0 uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode); uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker); #endif status = ACameraOutputTarget_create(image_reader_surface, &image_reader_target); status = ACaptureRequest_addTarget(capture_request, image_reader_target); } if (m_params.zoom) { float zoomRatio = m_params.zoomRatio; // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; status = ACaptureRequest_setEntry_float(capture_request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio); if (status != ACAMERA_OK) { #ifdef _DEBUG int aa = 0; #endif } } // capture session { ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks; camera_capture_session_state_callbacks.context = this; camera_capture_session_state_callbacks.onActive = onSessionActive; camera_capture_session_state_callbacks.onReady = ::onSessionReady; camera_capture_session_state_callbacks.onClosed = onSessionClosed; status = ACaptureSessionOutputContainer_create(&capture_session_output_container); status = ACaptureSessionOutput_create(image_reader_surface, &capture_session_output); status = ACaptureSessionOutputContainer_add(capture_session_output_container, capture_session_output); status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session); ACameraCaptureSession_captureCallbacks camera_capture_session_capture_callbacks; camera_capture_session_capture_callbacks.context = this; camera_capture_session_capture_callbacks.onCaptureStarted = 0; camera_capture_session_capture_callbacks.onCaptureProgressed = ::onCaptureProgressed; camera_capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted; camera_capture_session_capture_callbacks.onCaptureFailed = ::onCaptureFailed; camera_capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted; camera_capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted; camera_capture_session_capture_callbacks.onCaptureBufferLost = 0; if (m_imagesCaptured != 0) { status = ACameraCaptureSession_setRepeatingRequest(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, &captureSequenceId); } else { status = ACameraCaptureSession_capture(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, &captureSequenceId); } m_startTime = GetMicroTimeStamp(); m_precaptureStartTime = m_startTime; } return status == ACAMERA_OK ? 0 : 1; } void NdkCamera::close() { XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str()); camera_status_t res = ACAMERA_OK; if ((ACameraManager *)camera_manager != NULL) { // res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); } if (capture_session) { // res = ACameraCaptureSession_stopRepeating(capture_session); ACameraCaptureSession_close(capture_session); capture_session = 0; } if (capture_request) { res = ACaptureRequest_removeTarget(capture_request, image_reader_target); ACaptureRequest_free(capture_request); capture_request = 0; } if (image_reader_target) { ACameraOutputTarget_free(image_reader_target); image_reader_target = 0; } if (capture_session_output) { if (capture_session_output_container) { ACaptureSessionOutputContainer_remove(capture_session_output_container, capture_session_output); } ACaptureSessionOutput_free(capture_session_output); capture_session_output = 0; } if (capture_session_output_container) { ACaptureSessionOutputContainer_free(capture_session_output_container); capture_session_output_container = 0; } if (camera_device) { XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::close device %s, %p", mCameraId.c_str(), camera_device); ACameraDevice_close(camera_device); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed device %s, %p", mCameraId.c_str(), camera_device); camera_device = 0; } if (image_reader_surface) { ANativeWindow_release(image_reader_surface); image_reader_surface = 0; } if (image_reader != NULL) { // AImageReader_setImageListener(image_reader, NULL); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); AImageReader_delete(image_reader); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); image_reader = 0; } XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed %s", mCameraId.c_str()); } void NdkCamera::onImageAvailable(AImageReader* reader) { ALOGD("onImageAvailable %p", reader); AImage* image = 0; media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image); if (mstatus != AMEDIA_OK) { // error // https://stackoverflow.com/questions/67063562 if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) { XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); } return; } uint8_t* y_data = 0; int y_len = 0; #if 0 if (!lightDetected) { AImage_getPlaneData(image, 0, &y_data, &y_len); lightDetected = true; #if __cplusplus >= 201703L uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); #else uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); #endif avgY = avgY / (uint64_t)y_len; mResult.avgY = avgY; mFinalResult.avgY = avgY; #if 1 if (avgY < 50) { if (m_params.autoExposure) { uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY); status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY); status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY, mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity); } AImage_delete(image); return; } #endif } #endif if (m_imagesCaptured == ~0 || m_imagesCaptured != EXPECTED_CAPTURE_IDX) { // XYLOG(XYLOG_SEVERITY_DEBUG, "m_imagesCaptured=%u wait for next image", m_imagesCaptured); // Not Ready Or Taken AImage_delete(image); if (m_imagesCaptured != ~0) { XYLOG(XYLOG_SEVERITY_DEBUG, "Skip Image index=%u", m_imagesCaptured); m_imagesCaptured++; } return; } m_photoTaken = true; XYLOG(XYLOG_SEVERITY_INFO, "Photo Taken: AES=%u AFS=%u AWBS=%u", (uint32_t)mFinalResult.aeState, (uint32_t)mFinalResult.awbState, (uint32_t)mFinalResult.afState); mFinalResult.duration = GetMicroTimeStamp() - m_startTime; int32_t format; AImage_getFormat(image, &format); if (format == AIMAGE_FORMAT_YUV_420_888) { int32_t width = 0; int32_t height = 0; AImage_getWidth(image, &width); AImage_getHeight(image, &height); int32_t y_pixelStride = 0; int32_t u_pixelStride = 0; int32_t v_pixelStride = 0; AImage_getPlanePixelStride(image, 0, &y_pixelStride); AImage_getPlanePixelStride(image, 1, &u_pixelStride); AImage_getPlanePixelStride(image, 2, &v_pixelStride); int32_t y_rowStride = 0; int32_t u_rowStride = 0; int32_t v_rowStride = 0; AImage_getPlaneRowStride(image, 0, &y_rowStride); AImage_getPlaneRowStride(image, 1, &u_rowStride); AImage_getPlaneRowStride(image, 2, &v_rowStride); // uint8_t* y_data = 0; uint8_t* u_data = 0; uint8_t* v_data = 0; // int y_len = 0; int u_len = 0; int v_len = 0; AImage_getPlaneData(image, 0, &y_data, &y_len); AImage_getPlaneData(image, 1, &u_data, &u_len); AImage_getPlaneData(image, 2, &v_data, &v_len); #if 1 #if __cplusplus >= 201703L uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); #else uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); #endif mResult.avgY = avgY / y_len; mFinalResult.avgY = mResult.avgY; #endif if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) { // already nv21 :) on_image((unsigned char*)y_data, (int)width, (int)height); } else { // construct nv21 unsigned char* nv21 = new unsigned char[width * height + width * height / 2]; { // Y unsigned char* yptr = nv21; for (int y=0; yGetSensorOrientation(&facing, &angle)) { if (facing == ACAMERA_LENS_FACING_FRONT) { imageRotation = (angle + rotation_) % 360; imageRotation = (360 - imageRotation) % 360; } else { imageRotation = (angle - rotation_ + 360) % 360; } } */ int orgWidth = mWidth; int orgHeight = mHeight; // int co = camera_orientation > 0 ? camera_orientation + 90 : camera_orientation; if (m_params.orientation != 0) { int co = 0; if (camera_facing == ACAMERA_LENS_FACING_FRONT) { co = (camera_orientation + (m_params.orientation - 1) * 90) % 360; co = (360 - co) % 360; } else { co = (camera_orientation - (m_params.orientation - 1) * 90 + 360) % 360; } XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing); // int co = 0; if (co == 0) { w = nv21_width; h = nv21_height; rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 2 : 1; } else if (co == 90) { w = nv21_height; h = nv21_width; orgWidth = mHeight; orgHeight = mWidth; rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 5 : 6; } else if (co == 180) { w = nv21_width; h = nv21_height; rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 4 : 3; } else if (co == 270) { w = nv21_height; h = nv21_width; orgWidth = mHeight; orgHeight = mWidth; rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 7 : 8; } nv21_rotated.create(h + h / 2, w, CV_8UC1); ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type); yuv420data = nv21_rotated.data; } else { w = nv21_width; h = nv21_height; XYLOG(XYLOG_SEVERITY_DEBUG, "NO Orientation Facing=%d", camera_facing); } // nv21_rotated to rgb cv::Mat rgb; if (w == orgWidth && h == orgHeight) { rgb.create(h, w, CV_8UC3); // ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data); ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data); } else { cv::Mat org(h, w, CV_8UC3); ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data); if (w * orgHeight == h * orgWidth) // Same Ratio { cv::resize(org, rgb, cv::Size(orgWidth, orgHeight)); } else { // Crop image if (w > orgWidth && h >= orgHeight) { int left = (w - orgWidth) / 2; int top = (h - orgHeight) / 2; rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth)); } else { rgb = org; } } } on_image(rgb); } void NdkCamera::onSessionReady(ACameraCaptureSession *session) { return; camera_status_t status = ACAMERA_OK; ACameraCaptureSession_captureCallbacks camera_capture_session_capture_callbacks; camera_capture_session_capture_callbacks.context = this; camera_capture_session_capture_callbacks.onCaptureStarted = 0; camera_capture_session_capture_callbacks.onCaptureProgressed = ::onCaptureProgressed; camera_capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted; camera_capture_session_capture_callbacks.onCaptureFailed = ::onCaptureFailed; camera_capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted; camera_capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted; camera_capture_session_capture_callbacks.onCaptureBufferLost = 0; if (m_imagesCaptured != 0) { status = ACameraCaptureSession_setRepeatingRequest(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, &captureSequenceId); } else { status = ACameraCaptureSession_capture(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, &captureSequenceId); } } void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { } void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { // CALL_REQUEST(setEntry_i64(requests_[PREVIEW_REQUEST_IDX].request_, // ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime_)); // ACameraMetadata_getConstEntry(result, ) ACameraMetadata_const_entry val = { 0 }; camera_status_t status = ACAMERA_ERROR_BASE; mResult.afState = ACAMERA_CONTROL_AF_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); mResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val); mResult.awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); mResult.afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_MODE, &val); mResult.awbMode = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AWB_MODE_OFF; if (afSupported && (m_params.autoFocus != 0)) { if (mResult.afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED) { // Will lock it if (mResult.afLockSetted == 0) { uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); mResult.afLockSetted = 1; XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger AF AFS=%u", (uint32_t)mResult.afState); } } } unsigned long long ts = GetMicroTimeStamp(); #if 0 XYLOG(XYLOG_SEVERITY_DEBUG, "3ASTATE: AES=%u AWBS=%u AFS=%u", (uint32_t)mResult.aeState, (uint32_t)mResult.awbState, (uint32_t)mResult.afState); #endif if (m_params.autoExposure != 0) { if (mResult.aeState == ACAMERA_CONTROL_AE_STATE_SEARCHING) { numberOfPrecaptures ++; if (numberOfPrecaptures > 8 && ((ts - m_precaptureStartTime) > 2000)) { uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); // XYLOG(XYLOG_SEVERITY_WARNING, "Retrigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState); numberOfPrecaptures = 0; m_precaptureStartTime = ts; } } else { numberOfPrecaptures = 0; m_precaptureStartTime = ts; } if (mResult.aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE) { uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState); numberOfPrecaptures = 0; m_precaptureStartTime = ts; } } if (!lightDetected) { val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val); int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1; mResult.exposureTime = exTime; val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); mResult.sensitivity = (status == ACAMERA_OK) ? (*(val.data.i32)) : 0; } if (m_imagesCaptured == ~0) { if (ts - m_startTime >= m_params.focusTimeout * 2) { XYLOG(XYLOG_SEVERITY_WARNING, "Prepare Capture Timeout for 3A And will Capture AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)mResult.afState, (unsigned int)mResult.aeState, (unsigned int)mResult.awbState, (unsigned int)(ts - m_startTime)); m_imagesCaptured = 0; } else { if (awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) { if (awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) { if (mResult.awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) { return; } else { #if 0 XYLOG(XYLOG_SEVERITY_DEBUG, "AWB Locked"); #endif } } else { if (mResult.awbState != ACAMERA_CONTROL_AWB_STATE_CONVERGED && mResult.awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) { return; } else { #if 0 XYLOG(XYLOG_SEVERITY_DEBUG, "AWB CONVERGED Or Locked"); #endif } } } if (m_params.autoExposure != 0) { if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) { if (mResult.aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { return; } else { #if 0 XYLOG(XYLOG_SEVERITY_DEBUG, "AE Locked"); #endif } } else { if (mResult.aeState != ACAMERA_CONTROL_AE_STATE_CONVERGED && mResult.aeState != ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED && mResult.aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { return; } else { #if 0 XYLOG(XYLOG_SEVERITY_DEBUG, "AE CONVERGED Or Locked"); #endif } } } if (afSupported && (m_params.autoFocus != 0)) { // if (mResult.afState == ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED || mResult.afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_UNFOCUSED) if (mResult.afState == ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) { // uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); // aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); // XYLOG(XYLOG_SEVERITY_INFO, "onCaptureCompleted New Focus Trigger AFS=%u AES=%u Time=%u", (unsigned int)mResult.afState, (unsigned int)mResult.aeState); return; } ALOGD("onCaptureCompleted 1 AFS=%u AES=%u", (unsigned int)mResult.afState, (unsigned int)mResult.aeState); if (mResult.afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED || mResult.afState == ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED) // if (afState != ACAMERA_CONTROL_AF_STATE_INACTIVE) { XYLOG(XYLOG_SEVERITY_DEBUG, "AF Enabled And Focused"); m_imagesCaptured = 0; } } else { XYLOG(XYLOG_SEVERITY_DEBUG, "AF Not Supported Or AF Not Enabled"); m_imagesCaptured = 0; } } } if (m_imagesCaptured != 0 || camera_facing == 2) { // return; } val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val); int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1; mResult.exposureTime = exTime; val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_MODE, &val); mResult.autoFocus = (status == ACAMERA_OK) ? *(val.data.u8) : 0; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_MODE, &val); uint8_t aeMode = (status == ACAMERA_OK) ? val.data.u8[0] : 0; mResult.autoExposure = aeMode; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_FRAME_DURATION, &val); int64_t frameDuration = (status == ACAMERA_OK) ? val.data.i64[0] : 0; mResult.frameDuration = frameDuration; val = { 0 }; float focusDistance = NAN; if (afSupported && (m_params.autoFocus != 0)) { status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val); if (status == ACAMERA_OK) { focusDistance = *val.data.f; } } mResult.FocusDistance = focusDistance; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_ZOOM_RATIO, &val); if (status == ACAMERA_OK) { mResult.zoomRatio = *val.data.f; } /* val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); mResult.afState = *(val.data.u8); */ #if 0 ALOGD("onCaptureCompleted 2 AFS=%u AES=%u", (unsigned int)mResult.afState, (unsigned int)mResult.aeState); #endif val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); mResult.sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0; val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_SCENE_MODE, &val); mResult.sceneMode = (status == ACAMERA_OK) ? *(val.data.u8) : 0; val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val); mResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0; if (m_imagesCaptured == EXPECTED_CAPTURE_IDX) { mFinalResult = mResult; } if (m_imagesCaptured != ~0) { XYLOG(XYLOG_SEVERITY_DEBUG, "onCaptureCompleted 3ASTATE: AES=%u AWBS=%u AFS=%u EXPO=%lld m_imagesCaptured=%u", (uint32_t)mResult.aeState, (uint32_t)mResult.awbState, (uint32_t)mResult.afState, mResult.exposureTime, m_imagesCaptured); } // __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureCompleted %p %p %p", session, request, result); } void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure) { XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason); char msg[32] = { 0 }; snprintf(msg, sizeof(msg), "CaptureFailed reason=%d PhotoTaken=%d", failure->reason, m_photoTaken ? 1 : 0); if (!m_photoTaken) { on_error(msg); } } void NdkCamera::onError(ACameraDevice* device, int error) { if (ACAMERA_ERROR_CAMERA_DEVICE == error) { } XYLOG(XYLOG_SEVERITY_ERROR, "CameraStatus::onError CameraId: %s err=%d PhotoTaken=%d", ACameraDevice_getId(device), error, m_photoTaken ? 1 : 0); if (!m_photoTaken) { std::string msg = "NdkCamera error code=" + std::to_string(error); on_error(msg); } } void NdkCamera::onAvailabilityCallback(const char* cameraId) { std::string s(cameraId); m_locker.lock(); m_availableCameras.insert(s); m_locker.unlock(); } void NdkCamera::onUnavailabilityCallback(const char* cameraId) { std::string s(cameraId); m_locker.lock(); m_availableCameras.erase(s); m_locker.unlock(); } bool NdkCamera::IsCameraAvailable(const std::string& cameraId) { bool existed = false; m_locker.lock(); existed = (m_availableCameras.find(cameraId) != m_availableCameras.cend()); m_locker.unlock(); return existed; } int32_t NdkCamera::getOutputFormat() const { return AIMAGE_FORMAT_YUV_420_888; }