|
|
|
@ -216,7 +216,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
|
|
|
|
|
mResult = { 0 };
|
|
|
|
|
mLdr = ~0;
|
|
|
|
|
mFinalLdr = 0;
|
|
|
|
|
mFinalBurstCaptures = m_params.burstRawCapture == 0 ? 1 : m_params.burstCaptures;
|
|
|
|
|
mFinalBurstCaptures = m_params.burstRawCapture == 0 ? m_params.burstCaptures : m_params.burstCaptures;
|
|
|
|
|
if (mFinalBurstCaptures == 0)
|
|
|
|
|
{
|
|
|
|
|
mFinalBurstCaptures = 1;
|
|
|
|
@ -1004,6 +1004,7 @@ void NdkCamera::close()
|
|
|
|
|
mPreviewResults.reset();
|
|
|
|
|
mCaptureResults.clear();
|
|
|
|
|
mCaptureFrames.clear();
|
|
|
|
|
mCaptureResultMap.clear();
|
|
|
|
|
|
|
|
|
|
if ((ACameraManager *)camera_manager != NULL)
|
|
|
|
|
{
|
|
|
|
@ -1216,134 +1217,157 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
uint32_t burstCaptures = getBurstCaptures();
|
|
|
|
|
uint64_t ts = GetMicroTimeStamp();
|
|
|
|
|
size_t expectedTimes = mCaptureRequests.size() - 1;
|
|
|
|
|
if (burstCaptures == 0)
|
|
|
|
|
{
|
|
|
|
|
burstCaptures = 1;
|
|
|
|
|
}
|
|
|
|
|
if (burstCaptures == 1)
|
|
|
|
|
if (m_params.burstRawCapture == 0)
|
|
|
|
|
{
|
|
|
|
|
mstatus = AImageReader_acquireNextImage(reader, &image);
|
|
|
|
|
if (mstatus != AMEDIA_OK)
|
|
|
|
|
while (1)
|
|
|
|
|
{
|
|
|
|
|
// https://stackoverflow.com/questions/67063562
|
|
|
|
|
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
|
|
|
|
|
mstatus = AImageReader_acquireNextImage(reader, &image);
|
|
|
|
|
if (mstatus != AMEDIA_OK)
|
|
|
|
|
{
|
|
|
|
|
if (mCaptureFrames.size() < burstCaptures)
|
|
|
|
|
// https://stackoverflow.com/questions/67063562
|
|
|
|
|
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
|
|
|
|
|
{
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus);
|
|
|
|
|
if (mCaptureFrames.size() < burstCaptures)
|
|
|
|
|
{
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsigned long long ts = GetMicroTimeStamp();
|
|
|
|
|
int32_t format;
|
|
|
|
|
mstatus = AImage_getFormat(image, &format);
|
|
|
|
|
|
|
|
|
|
int32_t format;
|
|
|
|
|
mstatus = AImage_getFormat(image, &format);
|
|
|
|
|
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888)
|
|
|
|
|
{
|
|
|
|
|
int32_t width;
|
|
|
|
|
int32_t height;
|
|
|
|
|
mstatus = AImage_getWidth(image, &width);
|
|
|
|
|
mstatus = AImage_getHeight(image, &height);
|
|
|
|
|
|
|
|
|
|
int32_t y_pixelStride = 0;
|
|
|
|
|
int32_t u_pixelStride = 0;
|
|
|
|
|
int32_t v_pixelStride = 0;
|
|
|
|
|
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
|
|
|
|
|
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
|
|
|
|
|
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
|
|
|
|
|
|
|
|
|
|
int32_t y_rowStride = 0;
|
|
|
|
|
int32_t u_rowStride = 0;
|
|
|
|
|
int32_t v_rowStride = 0;
|
|
|
|
|
AImage_getPlaneRowStride(image, 0, &y_rowStride);
|
|
|
|
|
AImage_getPlaneRowStride(image, 1, &u_rowStride);
|
|
|
|
|
AImage_getPlaneRowStride(image, 2, &v_rowStride);
|
|
|
|
|
|
|
|
|
|
uint8_t* y_data = 0;
|
|
|
|
|
uint8_t* u_data = 0;
|
|
|
|
|
uint8_t* v_data = 0;
|
|
|
|
|
int y_len = 0;
|
|
|
|
|
int u_len = 0;
|
|
|
|
|
int v_len = 0;
|
|
|
|
|
AImage_getPlaneData(image, 0, &y_data, &y_len);
|
|
|
|
|
AImage_getPlaneData(image, 1, &u_data, &u_len);
|
|
|
|
|
AImage_getPlaneData(image, 2, &v_data, &v_len);
|
|
|
|
|
|
|
|
|
|
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
|
|
|
|
|
{
|
|
|
|
|
// already nv21
|
|
|
|
|
ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation,
|
|
|
|
|
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
cv::Mat frame;
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888)
|
|
|
|
|
{
|
|
|
|
|
// construct nv21
|
|
|
|
|
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
|
|
|
|
|
int32_t width;
|
|
|
|
|
int32_t height;
|
|
|
|
|
mstatus = AImage_getWidth(image, &width);
|
|
|
|
|
mstatus = AImage_getHeight(image, &height);
|
|
|
|
|
|
|
|
|
|
int32_t y_pixelStride = 0;
|
|
|
|
|
int32_t u_pixelStride = 0;
|
|
|
|
|
int32_t v_pixelStride = 0;
|
|
|
|
|
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
|
|
|
|
|
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
|
|
|
|
|
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
|
|
|
|
|
|
|
|
|
|
int32_t y_rowStride = 0;
|
|
|
|
|
int32_t u_rowStride = 0;
|
|
|
|
|
int32_t v_rowStride = 0;
|
|
|
|
|
AImage_getPlaneRowStride(image, 0, &y_rowStride);
|
|
|
|
|
AImage_getPlaneRowStride(image, 1, &u_rowStride);
|
|
|
|
|
AImage_getPlaneRowStride(image, 2, &v_rowStride);
|
|
|
|
|
|
|
|
|
|
uint8_t* y_data = 0;
|
|
|
|
|
uint8_t* u_data = 0;
|
|
|
|
|
uint8_t* v_data = 0;
|
|
|
|
|
int y_len = 0;
|
|
|
|
|
int u_len = 0;
|
|
|
|
|
int v_len = 0;
|
|
|
|
|
AImage_getPlaneData(image, 0, &y_data, &y_len);
|
|
|
|
|
AImage_getPlaneData(image, 1, &u_data, &u_len);
|
|
|
|
|
AImage_getPlaneData(image, 2, &v_data, &v_len);
|
|
|
|
|
|
|
|
|
|
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
|
|
|
|
|
{
|
|
|
|
|
// Y
|
|
|
|
|
uint8_t* yptr = nv21;
|
|
|
|
|
for (int y = 0; y < height; y++)
|
|
|
|
|
// already nv21
|
|
|
|
|
ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation,
|
|
|
|
|
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
// construct nv21
|
|
|
|
|
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
|
|
|
|
|
{
|
|
|
|
|
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
|
|
|
|
|
for (int x = 0; x < width; x++)
|
|
|
|
|
// Y
|
|
|
|
|
uint8_t* yptr = nv21;
|
|
|
|
|
for (int y = 0; y < height; y++)
|
|
|
|
|
{
|
|
|
|
|
yptr[0] = y_data_ptr[0];
|
|
|
|
|
yptr++;
|
|
|
|
|
y_data_ptr += y_pixelStride;
|
|
|
|
|
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
|
|
|
|
|
for (int x = 0; x < width; x++)
|
|
|
|
|
{
|
|
|
|
|
yptr[0] = y_data_ptr[0];
|
|
|
|
|
yptr++;
|
|
|
|
|
y_data_ptr += y_pixelStride;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// UV
|
|
|
|
|
uint8_t* uvptr = nv21 + width * height;
|
|
|
|
|
for (int y = 0; y < height / 2; y++)
|
|
|
|
|
{
|
|
|
|
|
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
|
|
|
|
|
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
|
|
|
|
|
for (int x = 0; x < width / 2; x++)
|
|
|
|
|
// UV
|
|
|
|
|
uint8_t* uvptr = nv21 + width * height;
|
|
|
|
|
for (int y = 0; y < height / 2; y++)
|
|
|
|
|
{
|
|
|
|
|
uvptr[0] = v_data_ptr[0];
|
|
|
|
|
uvptr[1] = u_data_ptr[0];
|
|
|
|
|
uvptr += 2;
|
|
|
|
|
v_data_ptr += v_pixelStride;
|
|
|
|
|
u_data_ptr += u_pixelStride;
|
|
|
|
|
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
|
|
|
|
|
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
|
|
|
|
|
for (int x = 0; x < width / 2; x++)
|
|
|
|
|
{
|
|
|
|
|
uvptr[0] = v_data_ptr[0];
|
|
|
|
|
uvptr[1] = u_data_ptr[0];
|
|
|
|
|
uvptr += 2;
|
|
|
|
|
v_data_ptr += v_pixelStride;
|
|
|
|
|
u_data_ptr += u_pixelStride;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation,
|
|
|
|
|
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame);
|
|
|
|
|
ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation,
|
|
|
|
|
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame);
|
|
|
|
|
|
|
|
|
|
delete[] nv21;
|
|
|
|
|
delete[] nv21;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
m_photoTaken = true;
|
|
|
|
|
m_photoTaken = true;
|
|
|
|
|
|
|
|
|
|
AImage_delete(image);
|
|
|
|
|
int64_t frameTs = 0;
|
|
|
|
|
mstatus = AImage_getTimestamp(image, &frameTs);
|
|
|
|
|
AImage_delete(image);
|
|
|
|
|
|
|
|
|
|
std::shared_ptr<ACameraMetadata> result;
|
|
|
|
|
bool captureCompleted = false;
|
|
|
|
|
bool captureDispatchable = false;
|
|
|
|
|
m_locker.lock();
|
|
|
|
|
if (!mCaptureResults.empty())
|
|
|
|
|
{
|
|
|
|
|
captureCompleted = true;
|
|
|
|
|
result = mCaptureResults[0];
|
|
|
|
|
}
|
|
|
|
|
if (captureCompleted && !mCaptureDispatched)
|
|
|
|
|
{
|
|
|
|
|
mCaptureDispatched = true;
|
|
|
|
|
captureDispatchable = true;
|
|
|
|
|
}
|
|
|
|
|
m_locker.unlock();
|
|
|
|
|
bool captureCompleted = false;
|
|
|
|
|
bool captureDispatchable = false;
|
|
|
|
|
m_locker.lock();
|
|
|
|
|
if (!frame.empty())
|
|
|
|
|
{
|
|
|
|
|
mOneFrame.push_back(std::make_pair<>(frameTs, frame));
|
|
|
|
|
}
|
|
|
|
|
if (mOneFrame.size() >= expectedTimes)
|
|
|
|
|
{
|
|
|
|
|
bool allExisted = true;
|
|
|
|
|
for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame)
|
|
|
|
|
{
|
|
|
|
|
if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend())
|
|
|
|
|
{
|
|
|
|
|
allExisted = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (allExisted)
|
|
|
|
|
{
|
|
|
|
|
captureCompleted = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (captureCompleted && captureDispatchable)
|
|
|
|
|
{
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable");
|
|
|
|
|
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
|
|
|
|
|
onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame);
|
|
|
|
|
if (captureCompleted && !mCaptureDispatched)
|
|
|
|
|
{
|
|
|
|
|
mCaptureDispatched = true;
|
|
|
|
|
captureDispatchable = true;
|
|
|
|
|
}
|
|
|
|
|
m_locker.unlock();
|
|
|
|
|
|
|
|
|
|
if (captureCompleted && captureDispatchable)
|
|
|
|
|
{
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable");
|
|
|
|
|
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
|
|
|
|
|
FireOneCapture(ts);
|
|
|
|
|
// onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
@ -1375,7 +1399,6 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
|
|
|
|
|
bool captureCompleted = false;
|
|
|
|
|
bool captureDispatchable = false;
|
|
|
|
|
|
|
|
|
|
size_t expectedTimes = mCaptureRequests.size() - 1;
|
|
|
|
|
m_locker.lock();
|
|
|
|
|
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
|
|
|
|
|
if (captureCompleted && !mCaptureDispatched)
|
|
|
|
@ -1807,12 +1830,32 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
|
|
|
|
|
bool captureDispatchable = false;
|
|
|
|
|
size_t expectedTimes = mCaptureRequests.size() - 1;
|
|
|
|
|
|
|
|
|
|
int64_t resultTimestamp = GetTimestamp(result);
|
|
|
|
|
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
|
|
|
|
|
if (expectedTimes == 1)
|
|
|
|
|
|
|
|
|
|
if (m_params.burstRawCapture == 0)
|
|
|
|
|
{
|
|
|
|
|
m_locker.lock();
|
|
|
|
|
mCaptureResults.push_back(captureResult);
|
|
|
|
|
captureCompleted = !mOneFrame.empty();
|
|
|
|
|
mCaptureResultMap[resultTimestamp] = captureResult;
|
|
|
|
|
|
|
|
|
|
if (mOneFrame.size() >= expectedTimes)
|
|
|
|
|
{
|
|
|
|
|
bool allExisted = true;
|
|
|
|
|
for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame)
|
|
|
|
|
{
|
|
|
|
|
if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend())
|
|
|
|
|
{
|
|
|
|
|
allExisted = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (allExisted)
|
|
|
|
|
{
|
|
|
|
|
captureCompleted = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (captureCompleted && !mCaptureDispatched)
|
|
|
|
|
{
|
|
|
|
|
mCaptureDispatched = true;
|
|
|
|
@ -1824,7 +1867,8 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
|
|
|
|
|
{
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onCaptureCompleted");
|
|
|
|
|
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
|
|
|
|
|
onOneCapture(mCharacteristics, captureResult, mFinalLdr, ts - m_startTime, mOneFrame);
|
|
|
|
|
|
|
|
|
|
FireOneCapture(ts);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
@ -1848,6 +1892,44 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int64_t NdkCamera::GetTimestamp(const ACameraMetadata* result)
|
|
|
|
|
{
|
|
|
|
|
ACameraMetadata_const_entry entry;
|
|
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_TIMESTAMP, &entry);
|
|
|
|
|
|
|
|
|
|
if (status == ACAMERA_OK && entry.count > 0) {
|
|
|
|
|
return entry.data.i64[0];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void NdkCamera::FireOneCapture(uint64_t ts)
|
|
|
|
|
{
|
|
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
|
|
if (mWidth == 1920 && mOneFrame.size() > 1)
|
|
|
|
|
{
|
|
|
|
|
std::string dt = FormatLocalDateTime("%d%02d%02d%02d%02d%02d", ts / 1000);
|
|
|
|
|
std::vector<int> params;
|
|
|
|
|
params.push_back(cv::IMWRITE_JPEG_QUALITY);
|
|
|
|
|
params.push_back(50);
|
|
|
|
|
|
|
|
|
|
for (auto it = mOneFrame.cbegin(); it != mOneFrame.cend(); ++it)
|
|
|
|
|
{
|
|
|
|
|
std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt;
|
|
|
|
|
size_t idx = std::distance(mOneFrame.cbegin(), it);
|
|
|
|
|
std::shared_ptr<ACameraMetadata> result = mCaptureResults[idx];
|
|
|
|
|
CAPTURE_RESULT captureResult = { 0 };
|
|
|
|
|
EnumCameraResult(result.get(), captureResult);
|
|
|
|
|
|
|
|
|
|
fileName += "_" + mCameraId + "_" + std::to_string(captureResult.aeState) + "_" + std::to_string(idx) + ".jpg";
|
|
|
|
|
cv::imwrite(fileName, it->second, params);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
onOneCapture(mCharacteristics, mCaptureResults.back(), mFinalLdr, ts - m_startTime, mOneFrame.back().second);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void NdkCamera::FireBurstCapture()
|
|
|
|
|
{
|
|
|
|
|
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
|
|
|
|
|