优化拍照

TempBranch
Matthew 8 months ago
parent 358505aeb0
commit cf0f3f52d3

@ -582,6 +582,10 @@ int NdkCamera::open(const std::string& cameraId) {
mstatus = AImageReader_getWindow(mPreviewImageReader, &mPreviewImageWindow); mstatus = AImageReader_getWindow(mPreviewImageReader, &mPreviewImageWindow);
ANativeWindow_acquire(mPreviewImageWindow); ANativeWindow_acquire(mPreviewImageWindow);
} }
status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget);
status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 2, &mImageReader); mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 2, &mImageReader);
if (mstatus == AMEDIA_OK) if (mstatus == AMEDIA_OK)
@ -593,10 +597,15 @@ int NdkCamera::open(const std::string& cameraId) {
mstatus = AImageReader_getWindow(mImageReader, &mImageWindow); mstatus = AImageReader_getWindow(mImageReader, &mImageWindow);
ANativeWindow_acquire(mImageWindow); ANativeWindow_acquire(mImageWindow);
} }
status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget);
status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget); status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget);
status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput);
CaptureRequest *request = CreateRequest(true);
mCaptureRequests.push_back(request);
#if 0
for (int idx = 0; idx <= burstCaptures; idx++) for (int idx = 0; idx <= burstCaptures; idx++)
{ {
CaptureRequest *request = new CaptureRequest(); CaptureRequest *request = new CaptureRequest();
@ -610,8 +619,6 @@ int NdkCamera::open(const std::string& cameraId) {
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget; request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget;
request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
mCaptureRequests.push_back(request);
// capture request // capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request); status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
ACaptureRequest_setUserContext(request->request, request); ACaptureRequest_setUserContext(request->request, request);
@ -792,6 +799,7 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput); status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput); status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput);
} }
#endif
// capture session // capture session
ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks; ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks;
@ -822,6 +830,205 @@ int NdkCamera::open(const std::string& cameraId) {
return status == ACAMERA_OK ? 0 : 1; return status == ACAMERA_OK ? 0 : 1;
} }
NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
{
camera_status_t status = ACAMERA_OK;
CaptureRequest *request = new CaptureRequest();
std::memset(request, 0, sizeof(CaptureRequest));
request->pThis = this;
request->imageReader = isPreviewRequest ? mPreviewImageReader : mImageReader;
request->imageWindow = isPreviewRequest ? mPreviewImageWindow : mImageWindow;
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget;
request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : mSessionOutput;
request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
// mCaptureRequests.push_back(request);
// capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
ACaptureRequest_setUserContext(request->request, request);
// uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO;
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_FLASH_MODE, 1, &flashMode);
uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode);
uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_EDGE_MODE, 1, &edgeMode);
if (afSupported && m_params.autoFocus)
{
if (!m_params.zoom)
{
if (maxRegions[2] > 0)
{
int32_t centerX = activeArraySize[0] >> 1;
int32_t centerY = activeArraySize[1] >> 1;
int32_t sizeX = activeArraySize[0] >> 4;
int32_t sizeY = activeArraySize[1] >> 4;
int32_t afRegions[] = { centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000 };
// status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions);
if (status == ACAMERA_OK)
{
#ifdef _DEBUG
int aa = 0;
#endif
}
}
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_MODE, 1, &afMode);
// uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
// trig = ACAMERA_CONTROL_AF_TRIGGER_START;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
}
}
else
{
uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
}
if (m_params.sceneMode != 0)
{
uint8_t sceneMode = m_params.sceneMode;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode);
}
if (m_params.autoExposure)
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
// ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_);
if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;
if (compensation < aeCompensationRange.min_)
{
compensation = aeCompensationRange.min_;
}
if (compensation > aeCompensationRange.max_)
{
compensation = aeCompensationRange.max_;
}
// int32_t aeCompensation = aeCompensationRange.max_;
status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation);
if (status != ACAMERA_OK)
{
int aa = 0;
}
}
if (maxRegions[0] > 0)
{
int32_t aeRegions[] = { 0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000 };
// status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions);
if (status == ACAMERA_OK)
{
#ifdef _DEBUG
int aa = 0;
#endif
}
}
if (isPreviewRequest)
{
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE");
mResult.aeLockSetted = 1;
}
else
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported");
}
uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status);
m_precaptureStartTime = m_startTime;
// ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff);
}
}
else
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
if (m_params.sensitivity > 0)
{
int32_t sensitivity = m_params.sensitivity;
status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
}
if (m_params.exposureTime > 0)
{
int64_t exposureTime = m_params.exposureTime;
status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime);
}
int64_t frameDuration = maxFrameDuration / 2;
// status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration);
}
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode);
if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED))
{
uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock);
mResult.awbLockSetted = 1;
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState);
}
#if 0
uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode);
uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker);
#endif
if (m_params.zoom)
{
float zoomRatio = m_params.zoomRatio;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_float(request->request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio);
if (status != ACAMERA_OK)
{
}
}
status = ACaptureRequest_addTarget(request->request, request->imageTarget);
// status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput);
// status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput);
return request;
}
void NdkCamera::close() void NdkCamera::close()
{ {
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str()); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str());
@ -860,17 +1067,6 @@ void NdkCamera::close()
} }
*/ */
if (request->sessionOutput)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, request->sessionOutput);
}
ACaptureSessionOutput_free(request->sessionOutput);
request->sessionOutput = 0;
}
delete request; delete request;
} }
mCaptureRequests.clear(); mCaptureRequests.clear();
@ -919,6 +1115,26 @@ void NdkCamera::close()
mImageReader = 0; mImageReader = 0;
} }
if (mPreviewSessionOutput != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mPreviewSessionOutput);
}
ACaptureSessionOutput_free(mPreviewSessionOutput);
mPreviewSessionOutput = 0;
}
if (mSessionOutput != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput);
}
ACaptureSessionOutput_free(mSessionOutput);
mSessionOutput = 0;
}
if (capture_session_output_container) if (capture_session_output_container)
{ {
ACaptureSessionOutputContainer_free(capture_session_output_container); ACaptureSessionOutputContainer_free(capture_session_output_container);
@ -1304,20 +1520,27 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
#endif #endif
} }
if (readyForCapture && mCaptureRequests.size() > 1) if (readyForCapture/* && mCaptureRequests.size() > 1*/)
{ {
ALOGW("Ready for Capture AFS=%u AES=%u AWBS=%u Time=%u", ALOGW("Ready for Capture AFS=%u AES=%u AWBS=%u Time=%u",
(unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime));
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
{
burstCaptures = 1;
}
std::vector<ACaptureRequest*> requests; std::vector<ACaptureRequest*> requests;
int sequenceId = 0; int sequenceId = 0;
requests.reserve(burstCaptures);
requests.reserve(mCaptureRequests.size() - 1); for (int idx = 0; idx < burstCaptures; idx++)
for (int idx = 1; idx < mCaptureRequests.size(); idx++)
{ {
CaptureRequest* request = CreateRequest(false);
mCaptureRequests.push_back(request);
// CopyPreviewRequest(mCaptureRequests[idx]->request, result); // CopyPreviewRequest(mCaptureRequests[idx]->request, result);
requests.push_back(mCaptureRequests[idx]->request); requests.push_back(request->request);
} }
// ALOGW("Will Stop Repeating Request"); // ALOGW("Will Stop Repeating Request");

@ -123,6 +123,7 @@ public:
AImageReader* imageReader; AImageReader* imageReader;
ANativeWindow* imageWindow; ANativeWindow* imageWindow;
ACameraOutputTarget* imageTarget; ACameraOutputTarget* imageTarget;
ACaptureSessionOutput* sessionOutput; ACaptureSessionOutput* sessionOutput;
ACaptureRequest* request; ACaptureRequest* request;
@ -157,6 +158,8 @@ public:
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height); void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height);
void CreateSession(ANativeWindow* previewWindow); void CreateSession(ANativeWindow* previewWindow);
CaptureRequest* CreateRequest(bool isPreviewRequest);
void DestroySession(); void DestroySession();
virtual bool on_image(cv::Mat& rgb); virtual bool on_image(cv::Mat& rgb);
@ -188,6 +191,7 @@ protected:
std::set<std::string> m_availableCameras; std::set<std::string> m_availableCameras;
CAMERA_PARAMS m_params; CAMERA_PARAMS m_params;
DisplayDimension foundRes;
int camera_facing; int camera_facing;
int camera_orientation; int camera_orientation;
bool m_firstFrame; bool m_firstFrame;
@ -233,10 +237,12 @@ protected:
AImageReader* mPreviewImageReader; AImageReader* mPreviewImageReader;
ANativeWindow* mPreviewImageWindow; ANativeWindow* mPreviewImageWindow;
ACameraOutputTarget* mPreviewOutputTarget; ACameraOutputTarget* mPreviewOutputTarget;
ACaptureSessionOutput* mPreviewSessionOutput;
AImageReader* mImageReader; AImageReader* mImageReader;
ANativeWindow* mImageWindow; ANativeWindow* mImageWindow;
ACameraOutputTarget* mOutputTarget; ACameraOutputTarget* mOutputTarget;
ACaptureSessionOutput* mSessionOutput;
std::shared_ptr<ACameraMetadata> mCharacteristics; std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests; std::vector<CaptureRequest*> mCaptureRequests;

Loading…
Cancel
Save