拍照功能实现

serial
Matthew 2 years ago
parent 2d8cb9f1e9
commit 09503b82d0

@ -29,6 +29,8 @@
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.DEVICE_POWER" tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
<uses-feature android:name="android.hardware.camera" />
<application

@ -0,0 +1,88 @@
/*
* Copyright 2015 Rockchip Electronics Co. LTD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CAMERA2_HELPER_H__
#define __CAMERA2_HELPER_H__
struct ImageFormat {
int32_t width;
int32_t height;
int32_t format; // Through out this demo, the format is fixed to
// YUV_420 format
};
/**
* A helper class to assist image size comparison, by comparing the absolute
* size
* regardless of the portrait or landscape mode.
*/
class DisplayDimension {
public:
DisplayDimension(int32_t w, int32_t h) : w_(w), h_(h), portrait_(false) {
if (h > w) {
// make it landscape
w_ = h;
h_ = w;
portrait_ = true;
}
}
DisplayDimension(const DisplayDimension& other) {
w_ = other.w_;
h_ = other.h_;
portrait_ = other.portrait_;
}
DisplayDimension(void) {
w_ = 0;
h_ = 0;
portrait_ = false;
}
DisplayDimension& operator=(const DisplayDimension& other) {
w_ = other.w_;
h_ = other.h_;
portrait_ = other.portrait_;
return (*this);
}
bool IsSameRatio(DisplayDimension& other) {
return (w_ * other.h_ == h_ * other.w_);
}
bool operator>(DisplayDimension& other) {
return (w_ >= other.w_ & h_ >= other.h_);
}
bool operator==(DisplayDimension& other) {
return (w_ == other.w_ && h_ == other.h_ && portrait_ == other.portrait_);
}
DisplayDimension operator-(DisplayDimension& other) {
DisplayDimension delta(w_ - other.w_, h_ - other.h_);
return delta;
}
void Flip(void) { portrait_ = !portrait_; }
bool IsPortrait(void) { return portrait_; }
int32_t width(void) { return w_; }
int32_t height(void) { return h_; }
int32_t org_width(void) { return (portrait_ ? h_ : w_); }
int32_t org_height(void) { return (portrait_ ? w_ : h_); }
private:
int32_t w_, h_;
bool portrait_;
};
#endif /* __CAMERA2_HELPER_H__ */

@ -32,6 +32,7 @@ bool GetJniEnv(JavaVM *vm, JNIEnv **env)
{
// Unsupported JNI version. Throw an exception if you want to.
}
return did_attach_thread;
}
@ -118,26 +119,21 @@ Java_com_xinyingpower_microphoto_MicroPhotoService_init(
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xinyingpower_microphoto_MicroPhotoService_takePhoto(
Java_com_xinyingpower_microphoto_MicroPhotoService_notifyToTakePhoto(
JNIEnv* env,
jobject pThis, jint channel, jint preset, jstring path, jstring fileName, jboolean sendToCma) {
jobject pThis, jint channel, jint preset, jlong scheduleTime, jstring path, jstring fileName, jboolean sendToCma) {
CTerminal* pTerminal = CTermClient::GetService().GetTerminal();
if (pTerminal == NULL)
if (channel < 1 || channel > 0xFF)
{
return JNI_FALSE;
}
if (channel < 1 || channel > 0xFF)
CTerminal* pTerminal = CTermClient::GetService().GetTerminal();
if (pTerminal == NULL)
{
return JNI_FALSE;
}
unsigned char id = (unsigned char)channel - 1;
// env->ReleaseStringUTFChars(fileName, fileNameStr);
// env->ReleaseStringUTFChars(path, pathStr);
pTerminal->TakeAndSendPhoto(channel, preset, 0);
pTerminal->NotifyToTakePhoto((unsigned int)channel, (unsigned int)preset, 0, (unsigned long)scheduleTime);
return JNI_TRUE;
}

@ -87,6 +87,7 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service)
mRegisterTimerMid = env->GetMethodID(classService, "registerTimer", "(JI)Z");
mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(I)V");
mUnregisterTimerMid = env->GetMethodID(classService, "unregisterTimer", "(J)Z");
mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z");
env->DeleteLocalRef(classService);
@ -116,6 +117,21 @@ void CPhoneDevice::SetListener(IListener* listener)
m_listener = listener;
}
bool CPhoneDevice::UpdateTime(time_t ts)
{
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = GetJniEnv(m_vm, &env);
if (attached)
{
jlong timeInMillis = ((jlong)ts) * 1000;
ret = env->CallBooleanMethod(m_javaService, mUpdateTimeMid, timeInMillis);
m_vm->DetachCurrentThread();
}
return (ret == JNI_TRUE);
}
bool CPhoneDevice::Reboot()
{
return false;
@ -214,6 +230,8 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const string&
mPhotoInfo = photoInfo;
mPath = path;
mDisplayDimension = DisplayDimension(photoInfo.width, photoInfo.height);
LOGE("Image Buffer Size: %d", photoInfo.width * photoInfo.height * 4);
imageBuffer_ = (uint8_t*)malloc(photoInfo.width * photoInfo.height * 4);
ASSERT(imageBuffer_ != nullptr, "Failed to allocate imageBuffer_");
@ -258,6 +276,27 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const string&
LOGI("Failed to get camera meta data of ID:%s\n", selectedCameraId);
}
ACameraMetadata_const_entry face, orientation;
camera_status = ACameraMetadata_getConstEntry(cameraMetadata, ACAMERA_LENS_FACING, &face);
uint32_t cameraFacing_ = static_cast<int32_t>(face.data.u8[0]);
if (cameraFacing_ == ACAMERA_LENS_FACING_FRONT)
{
int aa = 0;
}
camera_status = ACameraMetadata_getConstEntry(cameraMetadata, ACAMERA_SENSOR_ORIENTATION, &orientation);
LOGI("====Current SENSOR_ORIENTATION: %8d", orientation.data.i32[0]);
uint32_t cameraOrientation_ = orientation.data.i32[0];
if (cameraOrientation_ == 90 || cameraOrientation_ == 270)
{
mDisplayDimension.Flip();
}
ImageFormat resCap = {(int32_t)photoInfo.width, (int32_t)photoInfo.height, AIMAGE_FORMAT_YUV_420_888};
MatchCaptureSizeRequest(cameraManager, selectedCameraId, photoInfo.width, photoInfo.height, cameraOrientation_, &resCap);
deviceStateCallbacks.onDisconnected = camera_device_on_disconnected;
deviceStateCallbacks.onError = camera_device_on_error;
@ -287,7 +326,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const string&
media_status_t status;
// status = AImageReader_new(1920, 1080, AIMAGE_FORMAT_YUV_420_888, 5, &mAImageReader);
status = AImageReader_new(photoInfo.width, photoInfo.height, AIMAGE_FORMAT_YUV_420_888/*AIMAGE_FORMAT_JPEG*/, 5, &mAImageReader);
status = AImageReader_new(resCap.width, resCap.height, resCap.format, 5, &mAImageReader);
if (status != AMEDIA_OK)
{
LOGI("AImageReader_new error\n");
@ -311,6 +350,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const string&
}
LOGI("Surface is prepared in %p.\n", theNativeWindow);
// theNativeWindow
ACameraOutputTarget_create(theNativeWindow, &cameraOutputTarget);
ACaptureRequest_addTarget(captureRequest, cameraOutputTarget);
@ -347,11 +387,21 @@ void CPhoneDevice::ImageCallback(AImageReader *reader)
media_status_t status = AImageReader_acquireNextImage(reader, &image);
if (status == AMEDIA_OK && image)
{
int32_t srcFormat = -1;
AImage_getFormat(image, &srcFormat);
ASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
int32_t srcPlanes = 0;
AImage_getNumberOfPlanes(image, &srcPlanes);
ASSERT(srcPlanes == 3, "Is not 3 planes");
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t width = srcRect.right - srcRect.left;
int32_t height = srcRect.bottom - srcRect.top;
// int32_t height = srcRect.right - srcRect.left;
// int32_t width = srcRect.bottom - srcRect.top;
uint8_t *yPixel = nullptr;
uint8_t *uPixel = nullptr;
uint8_t *vPixel = nullptr;
@ -360,8 +410,6 @@ void CPhoneDevice::ImageCallback(AImageReader *reader)
int32_t uLen = 0;
int32_t vLen = 0;
cv::Mat _yuv_rgb_img, _yuv_gray_img;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &uPixel, &uLen);
AImage_getPlaneData(image, 2, &vPixel, &vLen);
@ -371,15 +419,20 @@ void CPhoneDevice::ImageCallback(AImageReader *reader)
memcpy(data+yLen, vPixel, vLen);
memcpy(data+yLen+vLen, uPixel, uLen);
cv::Mat mYUV = cv::Mat(height * 1.5, width, CV_8UC1, data);
cv::Mat mYUV = cv::Mat(((height * 3) >> 1), width, CV_8UC1, data);
cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
// cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
// cv::Mat mYUV = cv::Mat(height, yStride, CV_8UC4, data);
cv::Mat _yuv_rgb_img(height, width, CV_8UC4), _yuv_gray_img;
cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
cv::rotate(_yuv_rgb_img, _yuv_rgb_img, cv::ROTATE_90_CLOCKWISE);
cv::rotate(_yuv_rgb_img, _yuv_rgb_img, cv::ROTATE_180);
// cv::Mat rgbMat(height, width, CV_8UC3);
// 通过cv::cvtColor将yuv420转换为rgb格式
// cvtColor(_yuv_rgb_img, rgbMat, cv::COLOR_YUV2RGB_I420);
// cv::Mat mat = cv::Mat(buffer.height, buffer.stride, CV_8UC4, buffer.bits);
@ -389,7 +442,7 @@ void CPhoneDevice::ImageCallback(AImageReader *reader)
vector <int> compression_params;
compression_params.push_back(cv::IMWRITE_JPEG_QUALITY);
compression_params.push_back(75);
compression_params.push_back(80);
res = cv::imwrite(mPath.c_str(), _yuv_rgb_img, compression_params);
@ -400,7 +453,7 @@ void CPhoneDevice::ImageCallback(AImageReader *reader)
int aa = 0;
}
// bool res = WriteFile(image);
// res = WriteFile(image, GetFileName() + ".org.jpg");
AImage_delete(image);
// delete pThis;
@ -417,7 +470,7 @@ void CPhoneDevice::OnImageCallback(void *ctx, AImageReader *reader)
}
bool CPhoneDevice::WriteFile(AImage *image)
bool CPhoneDevice::WriteFile(AImage *image, const string& path)
{
int planeCount = 0;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
@ -433,8 +486,6 @@ bool CPhoneDevice::WriteFile(AImage *image)
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
std::string path = GetFileName();
bool res = false;
FILE *file = fopen(path.c_str(), "wb");
if (file && data && len)
@ -458,13 +509,69 @@ bool CPhoneDevice::WriteFile(AImage *image)
bool CPhoneDevice::WriteFile(CPhoneDevice* pThis, AImage *image)
{
return pThis->WriteFile(image);
return pThis->WriteFile(image, pThis->GetFileName());
}
std::string CPhoneDevice::GetFileName() const
{
return mPath;
}
/*
const char *selectedCameraId = NULL;
ACameraManager *cameraManager = ACameraManager_create();
*/
bool CPhoneDevice::MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
ImageFormat* resCap) {
DisplayDimension disp(resCap->width,resCap->height);
if (cameraOrientation_ == 90 || cameraOrientation_ == 270) {
disp.Flip();
}
ACameraMetadata* metadata;
camera_status_t camera_status = ACAMERA_OK;
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId, &metadata);
ACameraMetadata_const_entry entry;
camera_status = ACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
// format of the data: format, width, height, input?, type int32
bool foundIt = false;
DisplayDimension foundRes(16384, 16384);
DisplayDimension maxJPG(0, 0);
for (int i = 0; i < entry.count; i += 4) {
int32_t input = entry.data.i32[i + 3];
int32_t format = entry.data.i32[i + 0];
if (input) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_JPEG) {
DisplayDimension res(entry.data.i32[i + 1], entry.data.i32[i + 2]);
LOGI("Camera Resolution: %d x %d fmt=%d", res.width(), res.height(), format);
if (!disp.IsSameRatio(res)) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 && res > disp) {
foundIt = true;
foundRes = res;
} else if (format == AIMAGE_FORMAT_JPEG && res > maxJPG) {
maxJPG = res;
}
}
}
if (foundIt) {
// resView->width = foundRes.org_width();
// resView->height = foundRes.org_height();
resCap->width = foundRes.org_width();
resCap->height = foundRes.org_height();
} else {
LOGI("Did not find any compatible camera resolution, taking 640x480");
resCap->width = disp.org_width();
resCap->height = disp.org_height();
// *resCap = *resView;
}
// resView->format = AIMAGE_FORMAT_YUV_420_888;
// resCap->format = AIMAGE_FORMAT_JPEG;
return foundIt;
}
/**
* Convert yuv image inside AImage into ANativeWindow_Buffer
@ -533,6 +640,9 @@ void CPhoneDevice::PresentImage(ANativeWindow_Buffer *buf, AImage *image) {
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t rowStride;
AImage_getPlaneRowStride(image, 0, &rowStride);
int32_t height = std::min(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->width, (srcRect.right - srcRect.left));

@ -25,6 +25,8 @@
#include <Client/Device.h>
#include <string>
#include "Camera2Helper.h"
class CPhoneDevice : public IDevice
{
public:
@ -32,6 +34,7 @@ public:
virtual ~CPhoneDevice();
virtual void SetListener(IListener* listener);
virtual bool UpdateTime(time_t ts);
virtual bool Reboot();
virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout);
virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const string& path);
@ -45,7 +48,8 @@ protected:
std::string GetFileName() const;
bool SendBroadcastMessage(std::string action, int value);
bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
ImageFormat* resCap);
bool DisplayImage(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage(ANativeWindow_Buffer* buf, AImage* image);
@ -61,7 +65,7 @@ protected:
void ImageCallback(AImageReader *reader);
static void OnImageCallback(void *ctx, AImageReader *reader);
bool WriteFile(AImage *image);
bool WriteFile(AImage *image, const string& path);
static bool WriteFile(CPhoneDevice* pThis, AImage *image);
inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime)
@ -84,6 +88,7 @@ protected:
jmethodID mRegisterTimerMid;
jmethodID mRegisterHeartbeatMid;
jmethodID mUnregisterTimerMid;
jmethodID mUpdateTimeMid;
std::string mPath;
IDevice::PHOTO_INFO mPhotoInfo;
@ -104,6 +109,7 @@ protected:
ACameraDevice_StateCallbacks deviceStateCallbacks;
ACameraCaptureSession_stateCallbacks captureSessionStateCallbacks;
DisplayDimension mDisplayDimension;
int32_t presentRotation_;
int32_t imageHeight_;

@ -0,0 +1,418 @@
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#include "ndkcamera.h"
#include <string>
#include <android/log.h>
#include <opencv2/core/core.hpp>
#include "mat.h"
static void onDisconnected(void* context, ACameraDevice* device)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onDisconnected %p", device);
}
static void onError(void* context, ACameraDevice* device, int error)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onError %p %d", device, error);
}
static void onImageAvailable(void* context, AImageReader* reader)
{
// __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onImageAvailable %p", reader);
AImage* image = 0;
media_status_t status = AImageReader_acquireLatestImage(reader, &image);
if (status != AMEDIA_OK)
{
// error
return;
}
int32_t format;
AImage_getFormat(image, &format);
// assert format == AIMAGE_FORMAT_YUV_420_888
int32_t width = 0;
int32_t height = 0;
AImage_getWidth(image, &width);
AImage_getHeight(image, &height);
int32_t y_pixelStride = 0;
int32_t u_pixelStride = 0;
int32_t v_pixelStride = 0;
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
int32_t y_rowStride = 0;
int32_t u_rowStride = 0;
int32_t v_rowStride = 0;
AImage_getPlaneRowStride(image, 0, &y_rowStride);
AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t* y_data = 0;
uint8_t* u_data = 0;
uint8_t* v_data = 0;
int y_len = 0;
int u_len = 0;
int v_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
{
// already nv21 :)
((NdkCamera*)context)->on_image((unsigned char*)y_data, (int)width, (int)height);
}
else
{
// construct nv21
unsigned char* nv21 = new unsigned char[width * height + width * height / 2];
{
// Y
unsigned char* yptr = nv21;
for (int y=0; y<height; y++)
{
const unsigned char* y_data_ptr = y_data + y_rowStride * y;
for (int x=0; x<width; x++)
{
yptr[0] = y_data_ptr[0];
yptr++;
y_data_ptr += y_pixelStride;
}
}
// UV
unsigned char* uvptr = nv21 + width * height;
for (int y=0; y<height/2; y++)
{
const unsigned char* v_data_ptr = v_data + v_rowStride * y;
const unsigned char* u_data_ptr = u_data + u_rowStride * y;
for (int x=0; x<width/2; x++)
{
uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0];
uvptr += 2;
v_data_ptr += v_pixelStride;
u_data_ptr += u_pixelStride;
}
}
}
((NdkCamera*)context)->on_image((unsigned char*)nv21, (int)width, (int)height);
delete[] nv21;
}
AImage_delete(image);
}
static void onSessionActive(void* context, ACameraCaptureSession *session)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onSessionActive %p", session);
}
static void onSessionReady(void* context, ACameraCaptureSession *session)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onSessionReady %p", session);
}
static void onSessionClosed(void* context, ACameraCaptureSession *session)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onSessionClosed %p", session);
}
void onCaptureFailed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureFailed %p %p %p", session, request, failure);
}
void onCaptureSequenceCompleted(void* context, ACameraCaptureSession* session, int sequenceId, int64_t frameNumber)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureSequenceCompleted %p %d %ld", session, sequenceId, frameNumber);
}
void onCaptureSequenceAborted(void* context, ACameraCaptureSession* session, int sequenceId)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureSequenceAborted %p %d", session, sequenceId);
}
void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
// __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureCompleted %p %p %p", session, request, result);
}
NdkCamera::NdkCamera()
{
camera_facing = 0;
camera_orientation = 0;
camera_manager = 0;
camera_device = 0;
image_reader = 0;
image_reader_surface = 0;
image_reader_target = 0;
capture_request = 0;
capture_session_output_container = 0;
capture_session_output = 0;
capture_session = 0;
// setup imagereader and its surface
{
AImageReader_new(640, 480, AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader);
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = onImageAvailable;
AImageReader_setImageListener(image_reader, &listener);
AImageReader_getWindow(image_reader, &image_reader_surface);
ANativeWindow_acquire(image_reader_surface);
}
}
NdkCamera::~NdkCamera()
{
close();
if (image_reader)
{
AImageReader_delete(image_reader);
image_reader = 0;
}
if (image_reader_surface)
{
ANativeWindow_release(image_reader_surface);
image_reader_surface = 0;
}
}
int NdkCamera::open(int _camera_facing)
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "open");
camera_facing = _camera_facing;
camera_manager = ACameraManager_create();
// find front camera
std::string camera_id;
{
ACameraIdList* camera_id_list = 0;
ACameraManager_getCameraIdList(camera_manager, &camera_id_list);
for (int i = 0; i < camera_id_list->numCameras; ++i)
{
const char* id = camera_id_list->cameraIds[i];
ACameraMetadata* camera_metadata = 0;
ACameraManager_getCameraCharacteristics(camera_manager, id, &camera_metadata);
// query faceing
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
{
ACameraMetadata_const_entry e = { 0 };
ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e);
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
}
if (camera_facing == 0 && facing != ACAMERA_LENS_FACING_FRONT)
{
ACameraMetadata_free(camera_metadata);
continue;
}
if (camera_facing == 1 && facing != ACAMERA_LENS_FACING_BACK)
{
ACameraMetadata_free(camera_metadata);
continue;
}
camera_id = id;
// query orientation
int orientation = 0;
{
ACameraMetadata_const_entry e = { 0 };
ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e);
orientation = (int)e.data.i32[0];
}
camera_orientation = orientation;
ACameraMetadata_free(camera_metadata);
break;
}
ACameraManager_deleteCameraIdList(camera_id_list);
}
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "open %s %d", camera_id.c_str(), camera_orientation);
// open camera
{
ACameraDevice_StateCallbacks camera_device_state_callbacks;
camera_device_state_callbacks.context = this;
camera_device_state_callbacks.onDisconnected = onDisconnected;
camera_device_state_callbacks.onError = onError;
ACameraManager_openCamera(camera_manager, camera_id.c_str(), &camera_device_state_callbacks, &camera_device);
}
// capture request
{
ACameraDevice_createCaptureRequest(camera_device, TEMPLATE_PREVIEW, &capture_request);
ACameraOutputTarget_create(image_reader_surface, &image_reader_target);
ACaptureRequest_addTarget(capture_request, image_reader_target);
}
// capture session
{
ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks;
camera_capture_session_state_callbacks.context = this;
camera_capture_session_state_callbacks.onActive = onSessionActive;
camera_capture_session_state_callbacks.onReady = onSessionReady;
camera_capture_session_state_callbacks.onClosed = onSessionClosed;
ACaptureSessionOutputContainer_create(&capture_session_output_container);
ACaptureSessionOutput_create(image_reader_surface, &capture_session_output);
ACaptureSessionOutputContainer_add(capture_session_output_container, capture_session_output);
ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session);
ACameraCaptureSession_captureCallbacks camera_capture_session_capture_callbacks;
camera_capture_session_capture_callbacks.context = this;
camera_capture_session_capture_callbacks.onCaptureStarted = 0;
camera_capture_session_capture_callbacks.onCaptureProgressed = 0;
camera_capture_session_capture_callbacks.onCaptureCompleted = onCaptureCompleted;
camera_capture_session_capture_callbacks.onCaptureFailed = onCaptureFailed;
camera_capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted;
camera_capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted;
camera_capture_session_capture_callbacks.onCaptureBufferLost = 0;
ACameraCaptureSession_setRepeatingRequest(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, nullptr);
}
return 0;
}
void NdkCamera::close()
{
__android_log_print(ANDROID_LOG_WARN, "NdkCamera", "close");
if (capture_session)
{
ACameraCaptureSession_stopRepeating(capture_session);
ACameraCaptureSession_close(capture_session);
capture_session = 0;
}
if (camera_device)
{
ACameraDevice_close(camera_device);
camera_device = 0;
}
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_free(capture_session_output_container);
capture_session_output_container = 0;
}
if (capture_session_output)
{
ACaptureSessionOutput_free(capture_session_output);
capture_session_output = 0;
}
if (capture_request)
{
ACaptureRequest_free(capture_request);
capture_request = 0;
}
if (image_reader_target)
{
ACameraOutputTarget_free(image_reader_target);
image_reader_target = 0;
}
if (camera_manager)
{
ACameraManager_delete(camera_manager);
camera_manager = 0;
}
}
void NdkCamera::on_image(const cv::Mat& rgb) const
{
}
void NdkCamera::on_image(const unsigned char* nv21, int nv21_width, int nv21_height) const
{
// rotate nv21
int w = 0;
int h = 0;
int rotate_type = 0;
if (camera_orientation == 0)
{
w = nv21_width;
h = nv21_height;
rotate_type = camera_facing == 0 ? 2 : 1;
}
if (camera_orientation == 90)
{
w = nv21_height;
h = nv21_width;
rotate_type = camera_facing == 0 ? 5 : 6;
}
if (camera_orientation == 180)
{
w = nv21_width;
h = nv21_height;
rotate_type = camera_facing == 0 ? 4 : 3;
}
if (camera_orientation == 270)
{
w = nv21_height;
h = nv21_width;
rotate_type = camera_facing == 0 ? 7 : 8;
}
cv::Mat nv21_rotated(h + h / 2, w, CV_8UC1);
ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type);
// nv21_rotated to rgb
cv::Mat rgb(h, w, CV_8UC3);
ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data);
on_image(rgb);
}

@ -0,0 +1,55 @@
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#ifndef NDKCAMERA_H
#define NDKCAMERA_H
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraMetadata.h>
#include <media/NdkImageReader.h>
#include <opencv2/core/core.hpp>
class NdkCamera
{
public:
NdkCamera();
virtual ~NdkCamera();
// facing 0=front 1=back
int open(int camera_facing = 0);
void close();
virtual void on_image(const cv::Mat& rgb) const;
virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height) const;
public:
int camera_facing;
int camera_orientation;
private:
ACameraManager* camera_manager;
ACameraDevice* camera_device;
AImageReader* image_reader;
ANativeWindow* image_reader_surface;
ACameraOutputTarget* image_reader_target;
ACaptureRequest* capture_request;
ACaptureSessionOutputContainer* capture_session_output_container;
ACaptureSessionOutput* capture_session_output;
ACameraCaptureSession* capture_session;
};
#endif // NDKCAMERA_H

@ -40,7 +40,7 @@ public class MainActivity extends AppCompatActivity {
binding = ActivityMainBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
this.binding.start.setOnClickListener(new View.OnClickListener() {
this.binding.startServBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
@ -48,7 +48,8 @@ public class MainActivity extends AppCompatActivity {
String[] accessPermissions = new String[] {
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.FOREGROUND_SERVICE
Manifest.permission.FOREGROUND_SERVICE,
/*Manifest.permission.SET_TIME,*/
};
boolean needRequire = false;
for(String access : accessPermissions) {
@ -80,15 +81,22 @@ public class MainActivity extends AppCompatActivity {
MainActivity.this.startService(intent);
}
binding.start.setEnabled(false);
binding.stop.setEnabled(true);
binding.startServBtn.setEnabled(false);
binding.stopServBtn.setEnabled(true);
}
});
this.binding.stop.setOnClickListener(new View.OnClickListener() {
this.binding.takePhotoBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
MicroPhotoService.takePhoto(MainActivity.this, 1, 255, true);
}
});
this.binding.stopServBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
@ -97,8 +105,8 @@ public class MainActivity extends AppCompatActivity {
MainActivity.this.stopService(intent);
binding.start.setEnabled(true);
binding.stop.setEnabled(false);
binding.startServBtn.setEnabled(true);
binding.stopServBtn.setEnabled(false);
}
@ -108,7 +116,7 @@ public class MainActivity extends AppCompatActivity {
Runnable runnable=new Runnable(){
@Override
public void run() {
binding.start.performClick();
binding.startServBtn.performClick();
}
};
handler.postDelayed(runnable, 1000);

@ -16,6 +16,7 @@ import android.os.SystemClock;
import androidx.core.app.NotificationCompat;
import android.text.TextUtils;
import android.text.format.DateFormat;
import android.util.Log;
import android.widget.RemoteViews;
import android.widget.Toast;
@ -52,13 +53,16 @@ public class MicroPhotoService extends Service {
private static String ACTION_HEARTBEAT = "ACT_HB";
private static String ACTION_TAKE_PHOTO = "ACT_TP";
private static String ACTION_TAKE_PHOTO_MANUALLY = "ACT_TP_M";
private static String ACTION_TIMEOUT = "ACT_TIMEOUT";
private static String EXTRA_PARAM_CHANNEL = "Channel";
private static String EXTRA_PARAM_PRESET = "Preset";
private static String EXTRA_PARAM_PHOTO_OR_VIDEO = "PhotoOrVideo";
private static String EXTRA_PARAM_SCHEDULES = "Schedules";
private static String EXTRA_PARAM_SCHEDULE = "Schedule_";
private static String EXTRA_PARAM_PRESET = "Preset";
private static String EXTRA_PARAM_TIME = "Time";
// private static String EXTRA_PARAM_FILENAME = "FileName";
@ -103,6 +107,7 @@ public class MicroPhotoService extends Service {
IntentFilter intentFilter = new IntentFilter(ACTION_HEARTBEAT);
intentFilter.addAction(ACTION_TAKE_PHOTO);
intentFilter.addAction(ACTION_TIMEOUT);
intentFilter.addAction(ACTION_TAKE_PHOTO_MANUALLY);
registerReceiver( alarmReceiver, intentFilter);
AlarmManager alarmManager = (AlarmManager) getSystemService(ALARM_SERVICE);
@ -138,7 +143,7 @@ public class MicroPhotoService extends Service {
}
// File path = getApplicationContext().getFilesDir();
String appPath = path.getAbsolutePath();
Log.i("XYMP", "AppPath=" + appPath);
Log.i(TAG, "AppPath=" + appPath);
String ip = "180.166.218.222";
int port = 40032;
String cmdid = "XYDEV100230100012";
@ -183,7 +188,7 @@ public class MicroPhotoService extends Service {
int channel = (int)((val & 0xFF0000L) >> 16);
int preset = (int)((val & 0xFF00L) >> 8);
mService.takePhoto(channel, preset, mService.buildPhotoDir(channel), mService.buildPhotoFileName(channel, preset, ts), true);
mService.notifyToTakePhoto(channel, preset, ts, mService.buildPhotoDir(channel), mService.buildPhotoFileName(channel, preset, ts), true);
}
}
@ -198,6 +203,15 @@ public class MicroPhotoService extends Service {
mService.registerCaptureSchedule(startTime, baseTime);
}
else if(TextUtils.equals(ACTION_TAKE_PHOTO_MANUALLY, action)) {
int channel = intent.getIntExtra(EXTRA_PARAM_CHANNEL, 0);
int preset = intent.getIntExtra(EXTRA_PARAM_PRESET, 0);
// long ts = intent.getLongExtra(EXTRA_PARAM_TIME, 0);
boolean photoOrVideo = intent.getBooleanExtra(EXTRA_PARAM_PHOTO_OR_VIDEO, true);
long ts = System.currentTimeMillis() / 1000;
mService.notifyToTakePhoto(channel, preset, ts, mService.buildPhotoDir(channel), mService.buildPhotoFileName(channel, preset, ts), photoOrVideo);
}
else if(TextUtils.equals(ACTION_TIMEOUT, action)) {
long uid = intent.getLongExtra(EXTRA_PARAM_TIMER_UID, 0);
Log.i(TAG, "Timeout:" + uid);
@ -256,7 +270,8 @@ public class MicroPhotoService extends Service {
long currentTimeMillis = System.currentTimeMillis();
Date date = new Date(currentTimeMillis + timeout);
Log.d(TAG, "Register Photo Timer: " + date.toString() + " currentTimeMillis=" + currentTimeMillis + " timeout=" + timeout + " Channels=" + channelStr);
String dateStr = (String) DateFormat.format("MM-dd kk:mm:ss", date);
Log.d(TAG, "Register Photo Timer: " + dateStr + " currentTimeMillis=" + currentTimeMillis + " timeout=" + timeout + " Channels=" + channelStr);
alarmManager.setExactAndAllowWhileIdle(AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + timeout, pendingIntent);
}
@ -354,6 +369,15 @@ public class MicroPhotoService extends Service {
return true;
}
public static void takePhoto(Context context, int channel, int preset, boolean photoOrVideo) {
Intent intent = new Intent(ACTION_TAKE_PHOTO_MANUALLY);
intent.putExtra(EXTRA_PARAM_CHANNEL, channel);
intent.putExtra(EXTRA_PARAM_PRESET, preset);
intent.putExtra(EXTRA_PARAM_PHOTO_OR_VIDEO, photoOrVideo);
context.sendBroadcast(intent);
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
@ -501,12 +525,25 @@ public class MicroPhotoService extends Service {
stateService = STATE_SERVICE.NOT_CONNECTED;
uninit();
DSCameraManager.getInstace().unInit();
super.onDestroy();
}
protected boolean updateTime(long timeInMillis) {
boolean res = false;
try {
// Calendar c = Calendar.getInstance();
// c.set(2010, 1, 1, 12, 00, 00);
AlarmManager am = (AlarmManager) this.getSystemService(Context.ALARM_SERVICE);
am.setTime(timeInMillis);
res = true;
} catch (Exception ex) {
int aa = 0;
}
return true;
}
public String buildPhotoDir(int channel) {
File path = new File(Environment.getExternalStorageDirectory(), "com.xyp.mp/photos/");
@ -532,7 +569,7 @@ public class MicroPhotoService extends Service {
protected native boolean init(String appPath, String ip, int port, String cmdid);
protected native long getHeartbeatDuration();
protected native long[] getPhotoTimeData();
protected native boolean takePhoto(int channel, int preset, String path, String fileName, boolean sendToCma);
protected native boolean notifyToTakePhoto(int channel, int preset, long scheduleTime, String path, String fileName, boolean sendToCma);
protected native boolean sendHeartbeat();
protected native boolean fireTimeout(long uid);
protected native boolean uninit();

@ -17,7 +17,7 @@
app:layout_constraintTop_toTopOf="parent" />
<Button
android:id="@+id/start"
android:id="@+id/startServBtn"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="66dp"
@ -27,14 +27,14 @@
app:layout_constraintTop_toBottomOf="@+id/port" />
<Button
android:id="@+id/stop"
android:id="@+id/stopServBtn"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="68dp"
android:layout_marginTop="80dp"
android:enabled="false"
android:text="Stop"
app:layout_constraintStart_toEndOf="@+id/start"
app:layout_constraintStart_toEndOf="@+id/startServBtn"
app:layout_constraintTop_toBottomOf="@+id/port" />
<EditText
@ -83,4 +83,14 @@
app:layout_constraintStart_toStartOf="parent"
tools:layout_editor_absoluteY="288dp" />
<Button
android:id="@+id/takePhotoBtn"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="64dp"
android:layout_marginTop="296dp"
android:text="Take Photo"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>
Loading…
Cancel
Save