// Tencent is pleased to support the open source community by making ncnn available. // // Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // https://opensource.org/licenses/BSD-3-Clause // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. #include "ndkcamera.h" #include #include #include #include #include #include #include #include "mat.h" #include "gpu.h" #include "Camera2Helper.h" #include #include #include "DngCreator.h" static void onAvailabilityCallback(void* context, const char* cameraId) { ((NdkCamera*)context)->onAvailabilityCallback(cameraId); // ALOGI("CameraStatus::onAvailability CameraId: %s", cameraId); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onAvailability CameraId: %s", cameraId); } static void onUnavailabilityCallback(void* context, const char* cameraId) { ((NdkCamera*)context)->onUnavailabilityCallback(cameraId); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onUnavailability CameraId: %s", cameraId); } static void onDisconnected(void* context, ACameraDevice* device) { ((NdkCamera*)context)->onDisconnected(device); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onDisconnected CameraId: %s", ACameraDevice_getId(device)); } static void onError(void* context, ACameraDevice* device, int error) { ((NdkCamera*)context)->onError(device, error); } static void onImageAvailable(void* context, AImageReader* reader) { NdkCamera* pThis = reinterpret_cast(context); pThis->onImageAvailable(reader); } static void onSessionActive(void* context, ACameraCaptureSession *session) { ALOGD("onSessionActive %p", session); } static void onSessionReady(void* context, ACameraCaptureSession *session) { ALOGD("onSessionReady %p", session); ((NdkCamera*)context)->onSessionReady(session); } static void onSessionClosed(void* context, ACameraCaptureSession *session) { XYLOG(XYLOG_SEVERITY_INFO, "onSessionClosed %p", session); } void onCaptureFailed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure) { // XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason); ((NdkCamera*)context)->onCaptureFailed(session, request, failure); } void onCaptureSequenceCompleted(void* context, ACameraCaptureSession* session, int sequenceId, int64_t frameNumber) { ALOGD("onCaptureSequenceCompleted %p sequenceId=%d frameNumber=%ld", session, sequenceId, frameNumber); } void onCaptureSequenceAborted(void* context, ACameraCaptureSession* session, int sequenceId) { ALOGD("onCaptureSequenceAborted %p sequenceId=%d", session, sequenceId); } void onCaptureProgressed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { ((NdkCamera*)context)->onCaptureProgressed(session, request, result); } void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { ((NdkCamera*)context)->onCaptureCompleted(session, request, result); } NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params, int burstCaptures) : mBurstCaptures(burstCaptures) { camera_facing = 0; camera_orientation = 0; m_params = params; m_firstFrame = true; m_photoTaken = false; mWidth = width; mHeight = height; mFrameNumber.store(1); mCaptureTriggered = false; maxFrameDuration = 0; afSupported = false; awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO; aeLockAvailable = false; awbLockAvailable = false; sceneModeSupported = false; numberOfPrecaptures = 0; m_precaptureStartTime = 0; activeArraySize[0] = 0; activeArraySize[1] = 0; maxRegions[0] = 0; maxRegions[1] = 0; maxRegions[2] = 0; camera_manager_cb.context = this; camera_manager_cb.onCameraAvailable = ::onAvailabilityCallback; camera_manager_cb.onCameraUnavailable = ::onUnavailabilityCallback; mPreviewImageReader = NULL; mPreviewImageWindow = NULL; mPreviewOutputTarget = NULL; mImageReader = NULL; mImageWindow = NULL; mOutputTarget = NULL; camera_device = 0; capture_session_output_container = 0; capture_session = 0; lightDetected = false; mResult = { 0 }; mResult.avgY = ~0; } NdkCamera::~NdkCamera() { close(); } int NdkCamera::selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY) { camera_manager.Create(); // ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb); // find camera bool foundIt = false; // DisplayDimension disp(mWidth, mHeight); // DisplayDimension foundRes = disp; camera_status_t status = ACAMERA_OK; ACameraIdList* cameraIdList = NULL; status = ACameraManager_getCameraIdList(camera_manager, &cameraIdList); if (status != ACAMERA_OK) { return 1; } for (int i = 0; i < cameraIdList->numCameras; ++i) { const char *id = cameraIdList->cameraIds[i]; if (cameraId.compare(id) == 0) { foundIt = true; break; } } ACameraManager_deleteCameraIdList(cameraIdList); if (!foundIt) { return 2; } ACameraMetadata * camera_metadata = 0; status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata); if (status != ACAMERA_OK) { return 3; } { ACameraMetadata_const_entry e = { 0 }; camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e); // format of the data: format, width, height, input?, type int32 maxResolutionX = 0; maxResolutionY = 0; for (int i = 0; i < e.count; i += 4) { int32_t input = e.data.i32[i + 3]; int32_t format = e.data.i32[i + 0]; if (input) continue; if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/) { if (e.data.i32[i + 1] * e.data.i32[i + 2] > (maxResolutionX * maxResolutionY)) { maxResolutionX = e.data.i32[i + 1]; maxResolutionY = e.data.i32[i + 2]; } } } } return 0; } int NdkCamera::open(const std::string& cameraId) { XYLOG(XYLOG_SEVERITY_DEBUG, "DBG::try open %s", cameraId.c_str()); // camera_facing = _camera_facing; camera_manager.Create(); // ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb); // find camera bool foundIt = false; DisplayDimension disp(mWidth, mHeight); DisplayDimension foundRes = disp; camera_status_t status = ACAMERA_OK; ALOGD("Start ACameraManager_getCameraIdList"); { ACameraIdList *camera_id_list = 0; for (int retry = 0; retry < 100; retry++) { status = ACameraManager_getCameraIdList(camera_manager, &camera_id_list); AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraIdList return error, %d", status); for (int i = 0; i < camera_id_list->numCameras; ++i) { const char *id = camera_id_list->cameraIds[i]; if (cameraId.compare(id) == 0) { foundIt = true; break; } } ACameraManager_deleteCameraIdList(camera_id_list); if (foundIt) { break; } std::this_thread::sleep_for(std::chrono::milliseconds(16)); } ALOGD("End ACameraManager_getCameraIdList"); // ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); if (!foundIt) { XYLOG(XYLOG_SEVERITY_ERROR, "Camera Not Found on ID: %s", cameraId.c_str()); return 1; } mCameraId = cameraId; ACameraMetadata * camera_metadata = 0; status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata); AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraCharacteristics return error, %d", status); mCharacteristics = std::shared_ptr(camera_metadata, ACameraMetadata_free); { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e); // format of the data: format, width, height, input?, type int32 // DisplayDimension foundRes(4000, 4000); // DisplayDimension maxJPG(0, 0); foundIt = false; DisplayDimension temp; for (int i = 0; i < e.count; i += 4) { int32_t input = e.data.i32[i + 3]; if (input) continue; int32_t format = e.data.i32[i + 0]; if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/) { DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]); // XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height()); if (!disp.IsSameRatio(res)) { if (res.width() >= mWidth && res.height() >= mHeight) { temp = res; } continue; } if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp) { foundIt = true; foundRes = res; } } } if (!foundIt) { foundRes = temp; foundIt = true; } } if (!foundIt || foundRes.width() == 0 || foundRes.height() == 0) { // ACameraMetadata_free(camera_metadata); XYLOG(XYLOG_SEVERITY_ERROR, "Camera RES(%d, %d) Not Found on ID: %s", mWidth, mHeight, cameraId.c_str()); return 1; } // foundRes.Flip(); // query faceing acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e); AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_LENS_FACING return error, %d", status); if (status == ACAMERA_OK) { facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; } } camera_facing = facing; // query orientation int orientation = 0; { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e); AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_ORIENTATION return error, %d", status); if (status == ACAMERA_OK) { orientation = (int)e.data.i32[0]; } } camera_orientation = orientation; { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &e); } { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AF_AVAILABLE_MODES, &e); // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); #ifdef _DEBUG std::string afModes; for (int idx = 0; idx < e.count; idx++) { afModes += std::to_string(e.data.u8[idx]) + " "; } XYLOG(XYLOG_SEVERITY_DEBUG, "Available AF Mode: ", afModes.c_str()); #endif afSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AF_MODE_OFF)); } { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AWB_AVAILABLE_MODES, &e); // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); if (status == ACAMERA_OK) { for (int idx = 0; idx < e.count; idx++) { if (m_params.awbMode == e.data.u8[idx]) { awbMode = m_params.awbMode; break; } // unsigned int m = e.data.u8[idx]; // XYLOG(XYLOG_SEVERITY_DEBUG, "Available AWB Mode %u", m); } } // awbSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AWB_MODE_OFF)); } if (!afSupported) { XYLOG(XYLOG_SEVERITY_ERROR, "AF not Supported"); } { ACameraMetadata_const_entry val = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val); // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE return error, %d", status); if (status == ACAMERA_OK) { exposureRange.min_ = val.data.i64[0]; if (exposureRange.min_ < kMinExposureTime) { exposureRange.min_ = kMinExposureTime; } exposureRange.max_ = val.data.i64[1]; if (exposureRange.max_ > kMaxExposureTime) { exposureRange.max_ = kMaxExposureTime; } // exposureTime = exposureRange.value(2); } else { ALOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE"); exposureRange.min_ = exposureRange.max_ = 0l; // exposureTime_ = 0l; } } { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_LOCK_AVAILABLE, &e); // AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status); aeLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AE_LOCK_AVAILABLE_TRUE) : false; } { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AWB_LOCK_AVAILABLE, &e); awbLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_TRUE) : false; } { ACameraMetadata_const_entry val = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_ZOOM_RATIO_RANGE, &val); if (status == ACAMERA_OK) { float zoomRatioMin = val.data.f[0]; float zoomRatioMax = val.data.f[1]; ALOGI("Zoom Ratio Range: [%f,%f]", zoomRatioMin, zoomRatioMax); } } { ACameraMetadata_const_entry val = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_RANGE, &val); if (status == ACAMERA_OK) { aeCompensationRange.min_ = val.data.i32[0]; aeCompensationRange.max_ = val.data.i32[1]; XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_RANGE [%d,%d]", aeCompensationRange.min_, aeCompensationRange.max_); } else { ALOGW("Unsupported ACAMERA_CONTROL_AE_COMPENSATION_RANGE"); aeCompensationRange.min_ = aeCompensationRange.max_ = 0l; } } { ACameraMetadata_const_entry val = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_STEP, &val); if (status == ACAMERA_OK) { aeCompensationStep = val.data.r[0]; XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_STEP num=%d den=%d", aeCompensationStep.numerator, aeCompensationStep.denominator); } } { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION, &e); maxFrameDuration = (status == ACAMERA_OK) ? *e.data.i64 : 0; } { ACameraMetadata_const_entry val = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val); if (status == ACAMERA_OK) { sensitivityRange.min_ = val.data.i32[0]; sensitivityRange.max_ = val.data.i32[1]; } else { ALOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE"); sensitivityRange.min_ = sensitivityRange.max_ = 0; } } { ACameraMetadata_const_entry val = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &val); if (status == ACAMERA_OK) { activeArraySize[0] = val.data.i32[2]; activeArraySize[1] = val.data.i32[3]; } } { ACameraMetadata_const_entry val = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_MAX_REGIONS, &val); if (status == ACAMERA_OK) { maxRegions[0] = val.data.i32[0]; maxRegions[1] = val.data.i32[1]; maxRegions[2] = val.data.i32[2]; } } { ACameraMetadata_const_entry e = { 0 }; status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e); if (status == ACAMERA_OK) { for (int i = 0; i < e.count; i++) { if (m_params.sceneMode == e.data.u8[i]) { sceneModeSupported = true; break; } } } } // ACameraMetadata_free(camera_metadata); } // open camera { ACameraDevice_StateCallbacks camera_device_state_callbacks; camera_device_state_callbacks.context = this; camera_device_state_callbacks.onDisconnected = ::onDisconnected; camera_device_state_callbacks.onError = ::onError; status = ACameraManager_openCamera(camera_manager, cameraId.c_str(), &camera_device_state_callbacks, &camera_device); if (status != ACAMERA_OK) { XYLOG(XYLOG_SEVERITY_ERROR, "Failed to open camera %s res=%d", cameraId.c_str(), status); return 1; } } XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::Open %s Orientation=%d width=%d height=%d", cameraId.c_str(), camera_orientation, foundRes.width(), foundRes.height()); status = ACaptureSessionOutputContainer_create(&capture_session_output_container); // setup imagereader and its surface media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, 5, &mPreviewImageReader); if (mstatus == AMEDIA_OK) { AImageReader_ImageListener listener; listener.context = this; listener.onImageAvailable = ::onImageAvailable; mstatus = AImageReader_setImageListener(mPreviewImageReader, &listener); mstatus = AImageReader_getWindow(mPreviewImageReader, &mPreviewImageWindow); ANativeWindow_acquire(mPreviewImageWindow); } mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), mBurstCaptures, &mImageReader); if (mstatus == AMEDIA_OK) { AImageReader_ImageListener listener; listener.context = this; listener.onImageAvailable = ::onImageAvailable; mstatus = AImageReader_setImageListener(mImageReader, &listener); mstatus = AImageReader_getWindow(mImageReader, &mImageWindow); ANativeWindow_acquire(mImageWindow); } status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget); status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget); for (int idx = 0; idx <= mBurstCaptures; idx++) { CaptureRequest *request = new CaptureRequest(); std::memset(request, 0, sizeof(CaptureRequest)); request->pThis = this; request->imageReader = (idx == PREVIEW_REQUEST_IDX) ? mPreviewImageReader : mImageReader; request->imageWindow = (idx == PREVIEW_REQUEST_IDX) ? mPreviewImageWindow : mImageWindow; request->imageTarget = (idx == PREVIEW_REQUEST_IDX) ? mPreviewOutputTarget : mOutputTarget; request->templateId = (idx == PREVIEW_REQUEST_IDX) ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; mCaptureRequests.push_back(request); // capture request status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request); ACaptureRequest_setUserContext(request->request, request); // uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO; uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode); uint8_t captureIntent = request->templateId == TEMPLATE_PREVIEW ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent); uint8_t flashMode = ACAMERA_FLASH_MODE_OFF; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_FLASH_MODE, 1, &flashMode); uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode); uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST; // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_EDGE_MODE, 1, &edgeMode); if (afSupported && m_params.autoFocus) { if (!m_params.zoom) { if (maxRegions[2] > 0) { int32_t centerX = activeArraySize[0] >> 1; int32_t centerY = activeArraySize[1] >> 1; int32_t sizeX = activeArraySize[0] >> 4; int32_t sizeY = activeArraySize[1] >> 4; int32_t afRegions[] = { centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000 }; // status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions); if (status == ACAMERA_OK) { #ifdef _DEBUG int aa = 0; #endif } } // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO; uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE; // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_MODE, 1, &afMode); // uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL; // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); // trig = ACAMERA_CONTROL_AF_TRIGGER_START; // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); } } else { uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; // status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); } if (m_params.sceneMode != 0) { uint8_t sceneMode = m_params.sceneMode; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode); } if (m_params.autoExposure) { uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); // ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_); if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0) { int32_t compensation = m_params.compensation; if (compensation < aeCompensationRange.min_) { compensation = aeCompensationRange.min_; } if (compensation > aeCompensationRange.max_) { compensation = aeCompensationRange.max_; } // int32_t aeCompensation = aeCompensationRange.max_; status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); if (status != ACAMERA_OK) { int aa = 0; } } if (maxRegions[0] > 0) { int32_t aeRegions[] = { 0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000 }; // status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions); if (status == ACAMERA_OK) { #ifdef _DEBUG int aa = 0; #endif } } if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) { uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); mResult.aeLockSetted = 1; } else { uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); } uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); m_precaptureStartTime = m_startTime; // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); } else { uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); if (m_params.sensitivity > 0) { int32_t sensitivity = m_params.sensitivity; status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); } if (m_params.exposureTime > 0) { int64_t exposureTime = m_params.exposureTime; status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); } int64_t frameDuration = maxFrameDuration / 2; // status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration); } status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode); if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) { uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock); mResult.awbLockSetted = 1; XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState); } #if 0 uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode); uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker); #endif if (m_params.zoom) { float zoomRatio = m_params.zoomRatio; // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; status = ACaptureRequest_setEntry_float(request->request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio); if (status != ACAMERA_OK) { } } status = ACaptureRequest_addTarget(request->request, request->imageTarget); status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput); status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput); } // capture session ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks; camera_capture_session_state_callbacks.context = this; camera_capture_session_state_callbacks.onActive = onSessionActive; camera_capture_session_state_callbacks.onReady = ::onSessionReady; camera_capture_session_state_callbacks.onClosed = onSessionClosed; status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session); ACameraCaptureSession_captureCallbacks capture_session_capture_callbacks; capture_session_capture_callbacks.context = this; capture_session_capture_callbacks.onCaptureStarted = 0; capture_session_capture_callbacks.onCaptureProgressed = ::onCaptureProgressed; capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted; capture_session_capture_callbacks.onCaptureFailed = ::onCaptureFailed; capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted; capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted; capture_session_capture_callbacks.onCaptureBufferLost = 0; status = ACameraCaptureSession_setRepeatingRequest(capture_session, &capture_session_capture_callbacks, 1, &(mCaptureRequests[PREVIEW_REQUEST_IDX]->request), &(mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId)); ALOGW("Preview Request: seqId=%d", mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId); m_startTime = GetMicroTimeStamp(); m_precaptureStartTime = m_startTime; return status == ACAMERA_OK ? 0 : 1; } void NdkCamera::close() { XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str()); camera_status_t res = ACAMERA_OK; if ((ACameraManager *)camera_manager != NULL) { // res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); } if (capture_session) { // res = ACameraCaptureSession_stopRepeating(capture_session); ACameraCaptureSession_close(capture_session); capture_session = 0; } for (auto it = mCaptureRequests.begin(); it != mCaptureRequests.end(); ++it) { CaptureRequest* request = *it; if (request->request) { res = ACaptureRequest_removeTarget(request->request, request->imageTarget); ACaptureRequest_free(request->request); request->request = 0; } /* if (request->imageTarget) { ACameraOutputTarget_free(request->imageTarget); request->imageTarget = 0; } */ if (request->sessionOutput) { if (capture_session_output_container) { ACaptureSessionOutputContainer_remove(capture_session_output_container, request->sessionOutput); } ACaptureSessionOutput_free(request->sessionOutput); request->sessionOutput = 0; } delete request; } mCaptureRequests.clear(); if (mPreviewOutputTarget != NULL) { ACameraOutputTarget_free(mPreviewOutputTarget); mPreviewOutputTarget = 0; } if (mPreviewImageWindow != NULL) { ANativeWindow_release(mPreviewImageWindow); mPreviewImageWindow = 0; } if (mPreviewImageReader != NULL) { // AImageReader_setImageListener(image_reader, NULL); //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); AImageReader_delete(mPreviewImageReader); //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); mPreviewImageReader = 0; } if (mOutputTarget != NULL) { ACameraOutputTarget_free(mOutputTarget); mOutputTarget = 0; } if (mImageWindow != NULL) { ANativeWindow_release(mImageWindow); mImageWindow = 0; } if (mImageReader != NULL) { // AImageReader_setImageListener(image_reader, NULL); //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); AImageReader_delete(mImageReader); //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); mImageReader = 0; } if (capture_session_output_container) { ACaptureSessionOutputContainer_free(capture_session_output_container); capture_session_output_container = 0; } if (camera_device) { //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::close device %s, %p", mCameraId.c_str(), camera_device); ACameraDevice_close(camera_device); //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed device %s, %p", mCameraId.c_str(), camera_device); camera_device = 0; } //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed %s", mCameraId.c_str()); } void NdkCamera::onImageAvailable(AImageReader* reader) { AImage* image = 0; media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image); if (mstatus != AMEDIA_OK) { // error // https://stackoverflow.com/questions/67063562 if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) { XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); } return; } if (reader == mPreviewImageReader) { if (mResult.avgY == ~0) { uint8_t* y_data = 0; int y_len = 0; AImage_getPlaneData(image, 0, &y_data, &y_len); #if __cplusplus >= 201703L uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); #else uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); #endif avgY = avgY / (uint64_t)y_len; mResult.avgY = avgY; mFinalResult.avgY = avgY; } AImage_delete(image); return; } #if 0 if (!lightDetected) { AImage_getPlaneData(image, 0, &y_data, &y_len); lightDetected = true; #if 1 if (avgY < 50) { if (m_params.autoExposure) { uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY); status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY); status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY, mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity); } AImage_delete(image); return; } #endif } #endif m_photoTaken = true; XYLOG(XYLOG_SEVERITY_INFO, "Photo Taken: AES=%u AFS=%u AWBS=%u", (uint32_t)mFinalResult.aeState, (uint32_t)mFinalResult.awbState, (uint32_t)mFinalResult.afState); mFinalResult.duration = GetMicroTimeStamp() - m_startTime; mCaptureFrames.push_back(std::shared_ptr(image, AImage_delete)); } void NdkCamera::on_error(const std::string& msg) { } void NdkCamera::onDisconnected(ACameraDevice* device) { } bool NdkCamera::on_image(cv::Mat& rgb) { return false; } bool NdkCamera::onBurstCapture(std::shared_ptr characteristics, const std::vector >& results, const std::vector >& frames) { return false; } void NdkCamera::on_image(const unsigned char* nv21, int nv21_width, int nv21_height) { // ALOGW("nv21 size: %d x %d", nv21_width, nv21_height); // rotate nv21 int w = 0; int h = 0; int rotate_type = 0; cv::Mat nv21_rotated; const unsigned char* yuv420data = nv21; // TODO !!!??? /* if (camera_->GetSensorOrientation(&facing, &angle)) { if (facing == ACAMERA_LENS_FACING_FRONT) { imageRotation = (angle + rotation_) % 360; imageRotation = (360 - imageRotation) % 360; } else { imageRotation = (angle - rotation_ + 360) % 360; } } */ int orgWidth = mWidth; int orgHeight = mHeight; // int co = camera_orientation > 0 ? camera_orientation + 90 : camera_orientation; if (m_params.orientation != 0) { int co = 0; if (camera_facing == ACAMERA_LENS_FACING_FRONT) { co = (camera_orientation + (m_params.orientation - 1) * 90) % 360; co = (360 - co) % 360; } else { co = (camera_orientation - (m_params.orientation - 1) * 90 + 360) % 360; } XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing); // int co = 0; if (co == 0) { w = nv21_width; h = nv21_height; rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 2 : 1; } else if (co == 90) { w = nv21_height; h = nv21_width; orgWidth = mHeight; orgHeight = mWidth; rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 5 : 6; } else if (co == 180) { w = nv21_width; h = nv21_height; rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 4 : 3; } else if (co == 270) { w = nv21_height; h = nv21_width; orgWidth = mHeight; orgHeight = mWidth; rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 7 : 8; } nv21_rotated.create(h + h / 2, w, CV_8UC1); ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type); yuv420data = nv21_rotated.data; } else { w = nv21_width; h = nv21_height; XYLOG(XYLOG_SEVERITY_DEBUG, "NO Orientation Facing=%d", camera_facing); } // nv21_rotated to rgb cv::Mat rgb; if (w == orgWidth && h == orgHeight) { rgb.create(h, w, CV_8UC3); // ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data); ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data); } else { cv::Mat org(h, w, CV_8UC3); ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data); if (w * orgHeight == h * orgWidth) // Same Ratio { cv::resize(org, rgb, cv::Size(orgWidth, orgHeight)); } else { // Crop image if (w > orgWidth && h >= orgHeight) { int left = (w - orgWidth) / 2; int top = (h - orgHeight) / 2; rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth)); } else { rgb = org; } } } on_image(rgb); } void NdkCamera::onSessionReady(ACameraCaptureSession *session) { if (m_photoTaken) { AASSERT(mCaptureFrames.size() == mCaptureResults.size(), "Frame size %u doesn't equal to result size %u", (uint32_t)mCaptureFrames.size(), (uint32_t)mCaptureResults.size()); #ifndef NDEBUG for (int idx = 0; idx < mCaptureFrames.size(); idx++) { std::shared_ptr spImage = mCaptureFrames[idx]; int32_t format; AImage_getFormat(spImage.get(), &format); if (format == AIMAGE_FORMAT_YUV_420_888) { } else { ALOGW("Capture Available TID=%lld", (long long)getThreadIdOfULL()); uint32_t frameNumber = mFrameNumber.fetch_add(1); std::string path = "/sdcard/com.xypower.mpapp/tmp/" + std::to_string(frameNumber); if (format == AIMAGE_FORMAT_JPEG) { path += ".jpg"; writeJpegFile(spImage.get(), path.c_str()); } else { path += ".dng"; writeRawFile(spImage.get(), mCharacteristics.get(), mCaptureResults[idx].get(), path.c_str()); } } } #endif // NDEBUG } } void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { } void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { void* context = NULL; ACaptureRequest_getUserContext(request, &context); CaptureRequest* pCaptureRequest = reinterpret_cast(context); if (pCaptureRequest->request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request) { if (mCaptureTriggered) { return; } bool readyForCapture = true; camera_status_t status = ACAMERA_ERROR_BASE; unsigned long long ts = GetMicroTimeStamp(); uint8_t aeState = ACAMERA_CONTROL_AE_STATE_INACTIVE; uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE; uint8_t afState = ACAMERA_CONTROL_AF_STATE_INACTIVE; ACameraMetadata_const_entry val = { 0 }; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val); awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; // ALOGW("Preview State AFS=%u AES=%u AWBS=%u Time=%u", // (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); // Check if timeout if (ts - m_startTime < m_params.focusTimeout) { if (afSupported && (m_params.autoFocus != 0)) { /* if (afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED) { // Will lock it if (mResult.afLockSetted == 0) { uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); mResult.afLockSetted = 1; //XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger AF AFS=%u", (uint32_t)mResult.afState); readyForCapture = false; } } */ if (afState != ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED && afState != ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED && afState != ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) // if (afState != ACAMERA_CONTROL_AF_STATE_INACTIVE) { //XYLOG(XYLOG_SEVERITY_DEBUG, "AF Enabled And Focused"); readyForCapture = false; } } if (m_params.autoExposure != 0) { if (aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE) { uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); //XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState); readyForCapture = false; numberOfPrecaptures = 0; m_precaptureStartTime = ts; } if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) { if (aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { readyForCapture = false; } else { #if 0 //XYLOG(XYLOG_SEVERITY_DEBUG, "AE Locked"); #endif } } else { if (aeState != ACAMERA_CONTROL_AE_STATE_CONVERGED && aeState != ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED && aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { readyForCapture = false; } else { #if 0 XYLOG(XYLOG_SEVERITY_DEBUG, "AWB CONVERGED Or Locked"); #endif } } } if (awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) { if (awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) { if (awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) { readyForCapture = false; } else { #if 0 //XYLOG(XYLOG_SEVERITY_DEBUG, "AWB Locked"); #endif } } else { if (awbState != ACAMERA_CONTROL_AWB_STATE_CONVERGED && awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) { readyForCapture = false; } else { #if 0 XYLOG(XYLOG_SEVERITY_DEBUG, "AE CONVERGED Or Locked"); #endif } } } } else { #if 0 XYLOG(XYLOG_SEVERITY_WARNING, "Prepare Capture Timeout for 3A And will Capture AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); #endif } if (readyForCapture && mCaptureRequests.size() > 1) { ALOGW("Ready for Capture AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); std::vector requests; std::vector sequenceIds; requests.reserve(mCaptureRequests.size() - 1); sequenceIds.resize(mCaptureRequests.size() - 1, 0); for (int idx = 1; idx < mCaptureRequests.size(); idx++) { requests.push_back(mCaptureRequests[idx]->request); } ALOGW("Will Stop Repeating Request"); status = ACameraCaptureSession_stopRepeating(capture_session); ALOGW("Finished Repeating Request"); ACameraCaptureSession_captureCallbacks capture_session_capture_cb; capture_session_capture_cb.context = this; capture_session_capture_cb.onCaptureStarted = 0; capture_session_capture_cb.onCaptureProgressed = ::onCaptureProgressed; capture_session_capture_cb.onCaptureCompleted = ::onCaptureCompleted; capture_session_capture_cb.onCaptureFailed = ::onCaptureFailed; capture_session_capture_cb.onCaptureSequenceCompleted = onCaptureSequenceCompleted; capture_session_capture_cb.onCaptureSequenceAborted = onCaptureSequenceAborted; capture_session_capture_cb.onCaptureBufferLost = 0; int numberOfRequests = requests.size(); status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb, numberOfRequests, &requests[0], &sequenceIds[0]); ALOGW("Capture num = %d ", numberOfRequests); for (int idx = 0; idx < sequenceIds.size(); idx++) { // mCaptureRequests[idx + 1]->request = requests[idx]; mCaptureRequests[idx + 1]->sessionSequenceId = sequenceIds[idx]; ALOGW("Capture Request idx=%d sequenceId=%d", idx, sequenceIds[idx]); } mCaptureTriggered = true; } } else { uint64_t tid = getThreadIdOfULL(); ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid); ACameraMetadata* pCopy = ACameraMetadata_copy(result); mCaptureResults.push_back(std::shared_ptr(pCopy, ACameraMetadata_free)); } } void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure) { XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason); char msg[32] = { 0 }; snprintf(msg, sizeof(msg), "CaptureFailed reason=%d PhotoTaken=%d", failure->reason, m_photoTaken ? 1 : 0); if (!m_photoTaken) { on_error(msg); } } void NdkCamera::onError(ACameraDevice* device, int error) { if (ACAMERA_ERROR_CAMERA_DEVICE == error) { } XYLOG(XYLOG_SEVERITY_ERROR, "CameraStatus::onError CameraId: %s err=%d PhotoTaken=%d", ACameraDevice_getId(device), error, m_photoTaken ? 1 : 0); if (!m_photoTaken) { std::string msg = "NdkCamera error code=" + std::to_string(error); on_error(msg); } } void NdkCamera::onAvailabilityCallback(const char* cameraId) { std::string s(cameraId); m_locker.lock(); m_availableCameras.insert(s); m_locker.unlock(); } void NdkCamera::onUnavailabilityCallback(const char* cameraId) { std::string s(cameraId); m_locker.lock(); m_availableCameras.erase(s); m_locker.unlock(); } bool NdkCamera::IsCameraAvailable(const std::string& cameraId) { bool existed = false; m_locker.lock(); existed = (m_availableCameras.find(cameraId) != m_availableCameras.cend()); m_locker.unlock(); return existed; } int32_t NdkCamera::getOutputFormat() const { return AIMAGE_FORMAT_YUV_420_888; } void NdkCamera::CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manualPreview, int32_t imageRotation, int32_t width, int32_t height) { media_status_t status; /* // Create output from this app's ANativeWindow, and add into output container requests[PREVIEW_REQUEST_IDX].outputNativeWindow = previewWindow; requests[PREVIEW_REQUEST_IDX].templateId = TEMPLATE_PREVIEW; //requests_[JPG_CAPTURE_REQUEST_IDX].outputNativeWindow_ = jpgWindow; //requests_[JPG_CAPTURE_REQUEST_IDX].template_ = TEMPLATE_STILL_CAPTURE; ACaptureSessionOutputContainer_create(&capture_session_output_container); for (auto& req : requests) { if (!req.outputNativeWindow) continue; ANativeWindow_acquire(req.outputNativeWindow); ACaptureSessionOutput_create(req.outputNativeWindow, &req.sessionOutput); ACaptureSessionOutputContainer_add(capture_session_output_container, req.sessionOutput); ACameraOutputTarget_create(req.outputNativeWindow, &req.target); ACameraDevice_createCaptureRequest(camera_device, req.templateId, &req.request); ACaptureRequest_addTarget(req.request, req.target); // To capture images media_status_t mstatus = AImageReader_new(width, height, getOutputFormat(), 1, &req.imageReader); if (mstatus == AMEDIA_OK) { AImageReader_ImageListener listener; listener.context = this; listener.onImageAvailable = ::onImageAvailable; mstatus = AImageReader_setImageListener(req.imageReader, &listener); } // req.imageReader = createJpegReader(); status = AImageReader_getWindow(req.imageReader, &req.imageWindow); ANativeWindow_acquire(req.outputNativeWindow); ACameraOutputTarget_create(req.imageWindow, &req.imageTarget); ACaptureRequest_addTarget(req.request, req.imageTarget); ACaptureSessionOutput_create(req.imageWindow, &req.imageOutput); ACaptureSessionOutputContainer_add(capture_session_output_container, req.imageOutput); //ACameraOutputTarget_create(imageWindow, &imageTarget); //ACaptureRequest_addTarget(req.request_, imageTarget); //ACaptureSessionOutput_create(imageWindow, &imageOutput); //ACaptureSessionOutputContainer_add(outputContainer_, imageOutput); } // Create a capture session for the given preview request ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks; camera_capture_session_state_callbacks.context = this; camera_capture_session_state_callbacks.onActive = onSessionActive; camera_capture_session_state_callbacks.onReady = ::onSessionReady; camera_capture_session_state_callbacks.onClosed = onSessionClosed; ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session); if (jpgWindow) { ACaptureRequest_setEntry_i32(requests[JPG_CAPTURE_REQUEST_IDX].request, ACAMERA_JPEG_ORIENTATION, 1, &imageRotation); } if (!manualPreview) { return; } // // Only preview request is in manual mode, JPG is always in Auto mode // JPG capture mode could also be switch into manual mode and control // the capture parameters, this sample leaves JPG capture to be auto mode // (auto control has better effect than author's manual control) //uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF; //ACaptureRequest_setEntry_u8(requests[PREVIEW_REQUEST_IDX].request, // ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff)); //ACaptureRequest_setEntry_i32(requests[PREVIEW_REQUEST_IDX].request, // ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity)); //ACaptureRequest_setEntry_i64(requests[PREVIEW_REQUEST_IDX].request, // ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime)); */ } void NdkCamera::CreateSession(ANativeWindow* previewWindow) { CreateSession(previewWindow, NULL, false, 0, 1920, 1080); } void NdkCamera::DestroySession() { /* for (auto& req : requests) { if (!req.outputNativeWindow) continue; ACaptureRequest_removeTarget(req.request, req.target); ACaptureRequest_free(req.request); ACameraOutputTarget_free(req.target); ACaptureSessionOutputContainer_remove(capture_session_output_container, req.sessionOutput); ACaptureSessionOutput_free(req.sessionOutput); ANativeWindow_release(req.outputNativeWindow); AImageReader_delete(req.imageReader); req.imageReader = nullptr; } */ } void NdkCamera::writeJpegFile(AImage *image, const char* path) { int planeCount; media_status_t status = AImage_getNumberOfPlanes(image, &planeCount); // ASSERT(status == AMEDIA_OK && planeCount == 1, // "Error: getNumberOfPlanes() planeCount = %d", planeCount); uint8_t *data = nullptr; int len = 0; AImage_getPlaneData(image, 0, &data, &len); FILE *file = fopen(path, "wb"); if (file) { if (data && len) { fwrite(data, 1, len, file); } fclose(file); } } void NdkCamera::writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path) { // dngCreator. int32_t width; int32_t height; AImage_getWidth(image, &width); AImage_getHeight(image, &height); int planeCount; media_status_t status = AImage_getNumberOfPlanes(image, &planeCount); // ASSERT(status == AMEDIA_OK && planeCount == 1, // "Error: getNumberOfPlanes() planeCount = %d", planeCount); uint8_t *data = nullptr; int len = 0; AImage_getPlaneData(image, 0, &data, &len); DngCreator dngCreator(characteristics, result); std::vector dngFile; // std::vector& out, const uint8_t* rawBuffer, size_t bufferLen, uint32_t width, uint32_t height, long offset); dngCreator.writeInputBuffer(dngFile, data, len, width, height, 0); if (dngFile.empty()) { return; } FILE *file = fopen(path, "wb"); if (file) { if (data && len) { fwrite(&dngFile[0], 1, dngFile.size(), file); } fclose(file); } } bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height) { media_status_t status; status = AImage_getWidth(image, &width); status = AImage_getHeight(image, &height); int32_t y_pixelStride = 0; int32_t u_pixelStride = 0; int32_t v_pixelStride = 0; AImage_getPlanePixelStride(image, 0, &y_pixelStride); AImage_getPlanePixelStride(image, 1, &u_pixelStride); AImage_getPlanePixelStride(image, 2, &v_pixelStride); int32_t y_rowStride = 0; int32_t u_rowStride = 0; int32_t v_rowStride = 0; AImage_getPlaneRowStride(image, 0, &y_rowStride); AImage_getPlaneRowStride(image, 1, &u_rowStride); AImage_getPlaneRowStride(image, 2, &v_rowStride); uint8_t* y_data = 0; uint8_t* u_data = 0; uint8_t* v_data = 0; int y_len = 0; int u_len = 0; int v_len = 0; AImage_getPlaneData(image, 0, &y_data, &y_len); AImage_getPlaneData(image, 1, &u_data, &u_len); AImage_getPlaneData(image, 2, &v_data, &v_len); if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) { // already nv21 :) // on_image((unsigned char*)y_data, (int)width, (int)height); } else { // construct nv21 unsigned char* nv21 = new unsigned char[width * height + width * height / 2]; { // Y unsigned char* yptr = nv21; for (int y = 0; y < height; y++) { const unsigned char* y_data_ptr = y_data + y_rowStride * y; for (int x = 0; x < width; x++) { yptr[0] = y_data_ptr[0]; yptr++; y_data_ptr += y_pixelStride; } } // UV unsigned char* uvptr = nv21 + width * height; for (int y = 0; y < height / 2; y++) { const unsigned char* v_data_ptr = v_data + v_rowStride * y; const unsigned char* u_data_ptr = u_data + u_rowStride * y; for (int x = 0; x < width / 2; x++) { uvptr[0] = v_data_ptr[0]; uvptr[1] = u_data_ptr[0]; uvptr += 2; v_data_ptr += v_pixelStride; u_data_ptr += u_pixelStride; } } } // on_image((unsigned char*)nv21, (int)width, (int)height); delete[] nv21; } }