|
|
#include "PhoneDevice.h"
|
|
|
#include <AndroidHelper.h>
|
|
|
#include <SpecData_JSON.h>
|
|
|
#include <Client/Terminal.h>
|
|
|
#include <Utils.h>
|
|
|
#include <LogThread.h>
|
|
|
#include "ncnn/yolov5ncnn.h"
|
|
|
#include "GPIOControl.h"
|
|
|
#include "CvText.h"
|
|
|
#include "PositionHelper.h"
|
|
|
#include "DngCreator.h"
|
|
|
#include "media/Streaming.h"
|
|
|
|
|
|
#include "netcamera/VendorCtrl.h"
|
|
|
#include "netcamera/YuShiCtrl.h"
|
|
|
#include "netcamera/HangYuCtrl.h"
|
|
|
|
|
|
#include "media/RTSPRecorder.h"
|
|
|
|
|
|
#include <opencv2/opencv.hpp>
|
|
|
#include <opencv2/core.hpp>
|
|
|
#include <opencv2/imgproc.hpp>
|
|
|
|
|
|
#include <opencv2/core/types.hpp>
|
|
|
#include <opencv2/core/core.hpp>
|
|
|
#include <opencv2/imgproc/imgproc.hpp>
|
|
|
|
|
|
#include <android/log.h>
|
|
|
#include <android/thermal.h>
|
|
|
#include <android/imagedecoder.h>
|
|
|
#include <sys/system_properties.h>
|
|
|
#include <media/NdkImage.h>
|
|
|
#include <mat.h>
|
|
|
#include <string.h>
|
|
|
|
|
|
#ifdef USING_HDRPLUS
|
|
|
#include <hdrplus/hdrplus_pipeline.h>
|
|
|
#include <hdrplus2/include/HDRPlus.h>
|
|
|
#endif
|
|
|
|
|
|
#include "netcamera/netcamera.h"
|
|
|
|
|
|
#include <fcntl.h>
|
|
|
#include <filesystem>
|
|
|
#include <cstdio>
|
|
|
#include <unistd.h>
|
|
|
namespace fs = std::filesystem;
|
|
|
|
|
|
#if 0
|
|
|
#define CMD_SET_485_EN_STATE 131
|
|
|
#define CMD_SET_CAM_3V3_EN_STATE 132
|
|
|
#define CMD_SET_12V_EN_STATE 133
|
|
|
#endif
|
|
|
|
|
|
extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
|
|
|
|
|
|
|
|
|
bool makeHdr(vector<float>& times, std::vector<std::string>& paths, cv::Mat& rgb)
|
|
|
{
|
|
|
// Read images and exposure times
|
|
|
vector<cv::Mat> images;
|
|
|
|
|
|
for (auto it = paths.cbegin(); it != paths.cend(); ++it)
|
|
|
{
|
|
|
cv::Mat im = cv::imread((*it).c_str());
|
|
|
images.push_back(im);
|
|
|
}
|
|
|
// Align input images
|
|
|
// cout << "Aligning images ... " << endl;
|
|
|
cv::Ptr<cv::AlignMTB> alignMTB = cv::createAlignMTB();
|
|
|
#if 0
|
|
|
alignMTB->process(images, images);
|
|
|
#endif
|
|
|
|
|
|
// Obtain Camera Response Function (CRF)
|
|
|
// cout << "Calculating Camera Response Function (CRF) ... " << endl;
|
|
|
cv::Mat responseDebevec;
|
|
|
cv::Ptr<cv::CalibrateDebevec> calibrateDebevec = cv::createCalibrateDebevec();
|
|
|
calibrateDebevec->process(images, responseDebevec, times);
|
|
|
|
|
|
// Merge images into an HDR linear image
|
|
|
// cout << "Merging images into one HDR image ... ";
|
|
|
cv::Mat hdrDebevec;
|
|
|
cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
|
|
|
mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
|
|
|
// Save HDR image.
|
|
|
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
|
|
|
// cout << "saved hdrDebevec.hdr " << endl;
|
|
|
|
|
|
{
|
|
|
std::vector<cv::Mat> empty;
|
|
|
empty.swap(images);
|
|
|
}
|
|
|
|
|
|
// Tonemap using Reinhard's method to obtain 24-bit color image
|
|
|
// cout << "Tonemaping using Reinhard's method ... ";
|
|
|
cv::Mat ldrReinhard;
|
|
|
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
|
|
|
tonemapReinhard->process(hdrDebevec, ldrReinhard);
|
|
|
hdrDebevec.release();
|
|
|
|
|
|
int type = ldrReinhard.type();
|
|
|
ldrReinhard = ldrReinhard * 255;
|
|
|
|
|
|
ldrReinhard.convertTo(rgb, CV_8U);
|
|
|
ldrReinhard.release();
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool AndroidBitmap_CompressWriteFile(void *userContext, const void *data, size_t size)
|
|
|
{
|
|
|
FILE* file = (FILE*)userContext;
|
|
|
int bytesWritten = fwrite(data, 1, size, file);
|
|
|
return bytesWritten == size;
|
|
|
}
|
|
|
|
|
|
#define WAKELOCK_NAME "NDK_WK_"
|
|
|
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
|
|
|
// ranges
|
|
|
// are normalized to eight bits.
|
|
|
static const int kMaxChannelValue = 262143;
|
|
|
|
|
|
class ByteArraysPointer
|
|
|
{
|
|
|
public:
|
|
|
ByteArraysPointer()
|
|
|
{
|
|
|
}
|
|
|
~ByteArraysPointer()
|
|
|
{
|
|
|
#ifdef _DEBUG
|
|
|
ALOGD("ByteArray Size=%u", (uint32_t)byteArrays.size());
|
|
|
for (auto it = byteArrays.cbegin(); it != byteArrays.cend(); ++it)
|
|
|
{
|
|
|
ALOGD("ByteArray Free: Size=%u", (uint32_t)((*it).size()));
|
|
|
}
|
|
|
|
|
|
#endif
|
|
|
byteArrays.clear();
|
|
|
}
|
|
|
std::vector<std::vector<uint8_t> > byteArrays;
|
|
|
};
|
|
|
|
|
|
cv::Mat convert16bit2_8bit_(cv::Mat ans){
|
|
|
if(ans.type()==CV_16UC3){
|
|
|
cv::MatIterator_<cv::Vec3w> it, end;
|
|
|
for( it = ans.begin<cv::Vec3w>(), end = ans.end<cv::Vec3w>(); it != end; ++it)
|
|
|
{
|
|
|
// std::cout<<sizeof (*it)[0] <<std::endl;
|
|
|
(*it)[0] *=(255.0/USHRT_MAX);
|
|
|
(*it)[1] *=(255.0/USHRT_MAX);
|
|
|
(*it)[2] *=(255.0/USHRT_MAX);
|
|
|
}
|
|
|
ans.convertTo(ans, CV_8UC3);
|
|
|
}else if(ans.type()==CV_16UC1){
|
|
|
u_int16_t* ptr = (u_int16_t*)ans.data;
|
|
|
int end = ans.rows*ans.cols;
|
|
|
for(int i=0;i<end;i++){
|
|
|
*(ptr+i) *=(255.0/USHRT_MAX);
|
|
|
}
|
|
|
ans.convertTo(ans, CV_8UC1);
|
|
|
}else{
|
|
|
// std::cout<<"Unsupported Data Type"<<std::endl;
|
|
|
}
|
|
|
return ans;
|
|
|
}
|
|
|
|
|
|
char* MakeArgv(const std::string v)
|
|
|
{
|
|
|
char* argv = new char[v.size() + 1];
|
|
|
memset(argv, 0, v.size() + 1);
|
|
|
strcpy(argv, v.c_str());
|
|
|
return argv;
|
|
|
}
|
|
|
|
|
|
static long getFreeMemoryImpl(const char* const sums[], const size_t sumsLen[], size_t num)
|
|
|
{
|
|
|
int fd = open("/proc/meminfo", O_RDONLY | O_CLOEXEC);
|
|
|
|
|
|
if (fd < 0) {
|
|
|
ALOGW("Unable to open /proc/meminfo");
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
|
char buffer[2048];
|
|
|
const int len = read(fd, buffer, sizeof(buffer)-1);
|
|
|
close(fd);
|
|
|
|
|
|
if (len < 0) {
|
|
|
ALOGW("Unable to read /proc/meminfo");
|
|
|
return -1;
|
|
|
}
|
|
|
buffer[len] = 0;
|
|
|
|
|
|
size_t numFound = 0;
|
|
|
jlong mem = 0;
|
|
|
|
|
|
char* p = buffer;
|
|
|
while (*p && numFound < num) {
|
|
|
int i = 0;
|
|
|
while (sums[i]) {
|
|
|
if (strncmp(p, sums[i], sumsLen[i]) == 0) {
|
|
|
p += sumsLen[i];
|
|
|
while (*p == ' ') p++;
|
|
|
char* num = p;
|
|
|
while (*p >= '0' && *p <= '9') p++;
|
|
|
if (*p != 0) {
|
|
|
*p = 0;
|
|
|
p++;
|
|
|
if (*p == 0) p--;
|
|
|
}
|
|
|
mem += atoll(num) * 1024;
|
|
|
numFound++;
|
|
|
break;
|
|
|
}
|
|
|
i++;
|
|
|
}
|
|
|
p++;
|
|
|
}
|
|
|
|
|
|
return numFound > 0 ? mem : -1;
|
|
|
}
|
|
|
|
|
|
static jlong android_os_Process_getFreeMemory()
|
|
|
{
|
|
|
static const char* const sums[] = { "MemFree:", "Cached:", NULL };
|
|
|
static const size_t sumsLen[] = { strlen("MemFree:"), strlen("Cached:"), 0 };
|
|
|
return getFreeMemoryImpl(sums, sumsLen, 2);
|
|
|
}
|
|
|
|
|
|
static jlong android_os_Process_getTotalMemory()
|
|
|
{
|
|
|
static const char* const sums[] = { "MemTotal:", NULL };
|
|
|
static const size_t sumsLen[] = { strlen("MemTotal:"), 0 };
|
|
|
return getFreeMemoryImpl(sums, sumsLen, 1);
|
|
|
}
|
|
|
|
|
|
static inline uint32_t YUV2RGB(int nY, int nU, int nV) {
|
|
|
nY -= 16;
|
|
|
nU -= 128;
|
|
|
nV -= 128;
|
|
|
if (nY < 0) nY = 0;
|
|
|
|
|
|
// This is the floating point equivalent. We do the conversion in integer
|
|
|
// because some Android devices do not have floating point in hardware.
|
|
|
// nR = (int)(1.164 * nY + 1.596 * nV);
|
|
|
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
|
|
|
// nB = (int)(1.164 * nY + 2.018 * nU);
|
|
|
|
|
|
int nR = (int)(1192 * nY + 1634 * nV);
|
|
|
int nG = (int)(1192 * nY - 833 * nV - 400 * nU);
|
|
|
int nB = (int)(1192 * nY + 2066 * nU);
|
|
|
|
|
|
nR = std::min(kMaxChannelValue, std::max(0, nR));
|
|
|
nG = std::min(kMaxChannelValue, std::max(0, nG));
|
|
|
nB = std::min(kMaxChannelValue, std::max(0, nB));
|
|
|
|
|
|
nR = (nR >> 10) & 0xff;
|
|
|
nG = (nG >> 10) & 0xff;
|
|
|
nB = (nB >> 10) & 0xff;
|
|
|
|
|
|
return 0xff000000 | (nR << 16) | (nG << 8) | nB;
|
|
|
}
|
|
|
|
|
|
class AutoEnv
|
|
|
{
|
|
|
public:
|
|
|
AutoEnv(JavaVM* vm)
|
|
|
{
|
|
|
didAttachThread = false;
|
|
|
env = NULL;
|
|
|
m_vm = vm;
|
|
|
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
}
|
|
|
|
|
|
~AutoEnv()
|
|
|
{
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
}
|
|
|
|
|
|
private:
|
|
|
JavaVM* m_vm;
|
|
|
JNIEnv* env;
|
|
|
bool didAttachThread;
|
|
|
};
|
|
|
|
|
|
CPhoneDevice::CPhoneCamera::CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params) : NdkCamera(width, height, params), m_dev(dev)
|
|
|
{
|
|
|
}
|
|
|
|
|
|
CPhoneDevice::CPhoneCamera::~CPhoneCamera()
|
|
|
{
|
|
|
m_dev = NULL;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat rgb)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
return m_dev->OnImageReady(rgb);
|
|
|
}
|
|
|
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CPhoneCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, uint32_t duration, cv::Mat rgb)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
return m_dev->onOneCapture(characteristics, result, ldr, duration, rgb);
|
|
|
}
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
return m_dev->onBurstCapture(characteristics, results, ldr, duration, frames);
|
|
|
}
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
return m_dev->onBurstCapture(characteristics, results, ldr, duration, frames);
|
|
|
}
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::CPhoneCamera::on_error(const std::string& msg)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
m_dev->onError(msg);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::CPhoneCamera::onDisconnected(ACameraDevice* device)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
m_dev->onDisconnected(device);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t height, const std::string& path, const NdkCamera::CAMERA_PARAMS& params) : CPhoneDevice::CPhoneCamera(dev, width, height, params), m_path(path)
|
|
|
{
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CJpegCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, uint32_t duration, cv::Mat rgb)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
return m_dev->onOneCapture(characteristics, result, ldr, duration, rgb);
|
|
|
}
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
m_dev->onBurstCapture(characteristics, results, ldr, duration, frames);
|
|
|
}
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames)
|
|
|
{
|
|
|
if (m_dev != NULL)
|
|
|
{
|
|
|
m_dev->onBurstCapture(characteristics, results, ldr, duration, frames);
|
|
|
}
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader)
|
|
|
{
|
|
|
ALOGD("onImageAvailable %p", reader);
|
|
|
|
|
|
AImage* image = 0;
|
|
|
media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image);
|
|
|
|
|
|
if (mstatus != AMEDIA_OK)
|
|
|
{
|
|
|
// error
|
|
|
// https://stackoverflow.com/questions/67063562
|
|
|
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus);
|
|
|
}
|
|
|
return;
|
|
|
}
|
|
|
|
|
|
uint8_t* y_data = 0;
|
|
|
int y_len = 0;
|
|
|
#if 0
|
|
|
if (!lightDetected)
|
|
|
{
|
|
|
AImage_getPlaneData(image, 0, &y_data, &y_len);
|
|
|
|
|
|
lightDetected = true;
|
|
|
|
|
|
#if __cplusplus >= 201703L
|
|
|
uint64_t avgY = std::reduce(y_data, y_data + y_len, 0);
|
|
|
#else
|
|
|
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
|
|
|
#endif
|
|
|
avgY = avgY / (uint64_t)y_len;
|
|
|
mLdr = avgY;
|
|
|
#if 1
|
|
|
if (avgY < 50)
|
|
|
{
|
|
|
if (m_params.autoExposure)
|
|
|
{
|
|
|
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
|
|
|
camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
|
|
|
|
|
|
int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY);
|
|
|
status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
|
|
|
|
|
|
int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY);
|
|
|
status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime);
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY,
|
|
|
mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity);
|
|
|
}
|
|
|
AImage_delete(image);
|
|
|
return;
|
|
|
}
|
|
|
#endif
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
int32_t format;
|
|
|
AImage_getFormat(image, &format);
|
|
|
|
|
|
if (format == AIMAGE_FORMAT_JPEG)
|
|
|
{
|
|
|
int planeCount;
|
|
|
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
|
|
|
|
|
|
// LOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount);
|
|
|
if (!(status == AMEDIA_OK && planeCount == 1))
|
|
|
{
|
|
|
// LOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount);
|
|
|
AImage_delete(image);
|
|
|
return;
|
|
|
}
|
|
|
|
|
|
uint8_t *data = nullptr;
|
|
|
int len = 0;
|
|
|
AImage_getPlaneData(image, 0, &data, &len);
|
|
|
|
|
|
FILE *file = fopen(m_path.c_str(), "wb");
|
|
|
if (file && data && len)
|
|
|
{
|
|
|
fwrite(data, 1, len, file);
|
|
|
fdatasync(fileno(file));
|
|
|
fclose(file);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
if (file)
|
|
|
fclose(file);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
AImage_delete(image);
|
|
|
}
|
|
|
|
|
|
int32_t CPhoneDevice::CJpegCamera::getOutputFormat() const
|
|
|
{
|
|
|
return AIMAGE_FORMAT_JPEG;
|
|
|
}
|
|
|
|
|
|
CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, uint64_t activeNetHandle, unsigned int versionCode, const std::string& nativeLibDir)
|
|
|
: mVersionCode(versionCode), m_nativeLibraryDir(nativeLibDir), m_network(NULL), m_defNetHandle(activeNetHandle), m_ethnetHandle(NETWORK_UNSPECIFIED)
|
|
|
{
|
|
|
mCamera = NULL;
|
|
|
m_listener = NULL;
|
|
|
m_pRecognizationCfg = NULL;
|
|
|
mAIInitialized = false;
|
|
|
mHeartbeatStartTime = 0;
|
|
|
mHeartbeatDuration = 0;
|
|
|
m_javaService = NULL;
|
|
|
m_appPath = appPath;
|
|
|
|
|
|
m_signalLevel = 0;
|
|
|
m_signalLevelUpdateTime = time(NULL);
|
|
|
mBuildTime = 0;
|
|
|
m_lastTime = 0;
|
|
|
m_shouldStopWaiting = false;
|
|
|
m_collecting = false;
|
|
|
localDelayTime = GetMicroTimeStamp();
|
|
|
|
|
|
RegisterHandlerForSignal(SIGUSR2);
|
|
|
|
|
|
GpioControl::Startup();
|
|
|
|
|
|
LoadNetworkInfo();
|
|
|
|
|
|
m_vm = vm;
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
if (service != NULL)
|
|
|
{
|
|
|
m_javaService = env->NewGlobalRef(service);
|
|
|
|
|
|
jclass classService = env->GetObjectClass(m_javaService);
|
|
|
mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(IJ)V");
|
|
|
mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z");
|
|
|
mUpdateCaptureScheduleMid = env->GetMethodID(classService, "updateCaptureSchedule", "(J)Z");
|
|
|
mStartRecordingMid = env->GetMethodID(classService, "startRecording", "(ZIJIIIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
|
|
|
|
|
|
mRequestWakelockMid = env->GetMethodID(classService, "requestWakelock", "(Ljava/lang/String;J)V");
|
|
|
mReleaseWakelockMid = env->GetMethodID(classService, "releaseWakelock", "(Ljava/lang/String;)V");
|
|
|
|
|
|
mGetSystemInfoMid = env->GetMethodID(classService, "getSystemInfo", "()Ljava/lang/String;");
|
|
|
mInstallAppMid = env->GetMethodID(classService, "installApp", "(Ljava/lang/String;J)Z");
|
|
|
mRebootMid = env->GetMethodID(classService, "reboot", "(IJLjava/lang/String;)V");
|
|
|
mEnableGpsMid = env->GetMethodID(classService, "enableGps", "(Z)V");
|
|
|
mRequestPositionMid = env->GetMethodID(classService, "requestPosition", "()Z");
|
|
|
|
|
|
mExecHdrplusMid = env->GetMethodID(classService, "execHdrplus", "(IILjava/lang/String;Ljava/lang/String;)I");
|
|
|
|
|
|
mSetStaticIpMid = env->GetMethodID(classService, "setStaticNetwork", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
|
|
|
|
|
|
mConvertDngToPngMid = env->GetMethodID(classService, "convertDngToPng", "(Ljava/lang/String;Ljava/lang/String;)V");
|
|
|
|
|
|
mCallSysCameraMid = env->GetMethodID(classService, "callSystemCamera", "(IJ)V");
|
|
|
|
|
|
env->DeleteLocalRef(classService);
|
|
|
}
|
|
|
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
m_timerUidFeed = time(NULL) * 1000;
|
|
|
m_wakelockIdFeed = (uint64_t)m_timerUidFeed;
|
|
|
m_uniqueIdFeed = (uint64_t)m_timerUidFeed;
|
|
|
|
|
|
#ifdef USING_NRSEC
|
|
|
GpioControl::setCam3V3Enable(true);
|
|
|
GpioControl::setSpiPower(true);
|
|
|
#endif
|
|
|
}
|
|
|
|
|
|
CPhoneDevice::~CPhoneDevice()
|
|
|
{
|
|
|
m_devLocker.lock();
|
|
|
for (auto it = mTimers.begin(); it != mTimers.end(); ++it)
|
|
|
{
|
|
|
timer_delete((timer_t)it->first);
|
|
|
delete it->second;
|
|
|
}
|
|
|
mTimers.clear();
|
|
|
m_devLocker.unlock();
|
|
|
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
env->DeleteGlobalRef(m_javaService);
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
m_javaService = NULL;
|
|
|
|
|
|
if (m_pRecognizationCfg != NULL)
|
|
|
{
|
|
|
if (mAIInitialized)
|
|
|
{
|
|
|
ncnn_uninit();
|
|
|
}
|
|
|
m_pRecognizationCfg = NULL;
|
|
|
}
|
|
|
|
|
|
if (m_network != NULL)
|
|
|
{
|
|
|
delete m_network;
|
|
|
m_network = NULL;
|
|
|
}
|
|
|
|
|
|
GpioControl::Stop();
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::SetListener(IListener* listener)
|
|
|
{
|
|
|
m_listener = listener;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::SetRecognizationCfg(const IDevice::CFG_RECOGNIZATION* pRecognizationCfg)
|
|
|
{
|
|
|
if (m_pRecognizationCfg == NULL && pRecognizationCfg != NULL && (pRecognizationCfg->enabled != 0))
|
|
|
{
|
|
|
// TODO
|
|
|
std::string paramFile = m_appPath + (APP_PATH_RECOG_PARAM);
|
|
|
std::string binFile = m_appPath + (APP_PATH_RECOG_BIN);
|
|
|
std::error_code err;
|
|
|
if (!existsFile(paramFile) || !existsFile(binFile) || fs::is_directory(fs::path(paramFile), err) || fs::is_directory(fs::path(binFile), err))
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "AI Config Files are invalid");
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "AI Enabled and will Init NCNN");
|
|
|
ncnn_init();
|
|
|
mAIInitialized = true;
|
|
|
bool res = YoloV5Ncnn_Init(paramFile, binFile);
|
|
|
if (res)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Init NCNN");
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Init NCNN");
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "AI Disabled");
|
|
|
}
|
|
|
|
|
|
m_pRecognizationCfg = pRecognizationCfg;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::BindNetwork(int sock)
|
|
|
{
|
|
|
#ifdef USING_ETHERNET
|
|
|
#if 0
|
|
|
m_devLocker.lock();
|
|
|
net_handle_t defNetHandle = m_defNetHandle;
|
|
|
m_devLocker.unlock();
|
|
|
if (defNetHandle != NETWORK_UNSPECIFIED)
|
|
|
{
|
|
|
int res = android_setsocknetwork(defNetHandle, sock);
|
|
|
if (res == -1)
|
|
|
{
|
|
|
int errcode = errno;
|
|
|
printf("android_setsocknetwork errno=%d", errcode);
|
|
|
}
|
|
|
return res == 0;
|
|
|
}
|
|
|
#endif
|
|
|
#endif // USING_ETHERNET
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::SelfTest(std::string& result)
|
|
|
{
|
|
|
result.clear();
|
|
|
|
|
|
const char* ITEM_SEP = "\t"; //
|
|
|
unsigned int numberOfChannels = 0;
|
|
|
|
|
|
result += "设备自检 版本:" + GetVersion() + ITEM_SEP;
|
|
|
|
|
|
Json::Value appConfig = Json::objectValue;
|
|
|
std::vector<unsigned char> content;
|
|
|
std::string filePath = m_appPath + (APP_DATA_DIR DIR_SEP_STR APP_FILE_NAME_APP_CONF);
|
|
|
if (!readFile(filePath, content))
|
|
|
{
|
|
|
result += "读取系统配置文件App.json失败";
|
|
|
result += ITEM_SEP;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
Json::CharReaderBuilder builder;
|
|
|
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
|
|
|
|
|
|
const char* doc = (const char*)&(content[0]);
|
|
|
if (reader->parse(doc, doc + content.size(), &appConfig, NULL))
|
|
|
{
|
|
|
unsigned int val = 0;
|
|
|
if (GetJSONUInt32Value(appConfig, "channels", val) && (val > 0 && val <= 255))
|
|
|
{
|
|
|
numberOfChannels = val;
|
|
|
result += "通道数:" + std::to_string(numberOfChannels) + ITEM_SEP;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
result += "通道数未定义或者无效" + std::string(ITEM_SEP);
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
result += "解析系统配置文件App.json失败" + std::string(ITEM_SEP);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
for (unsigned int channel = 1; channel <= numberOfChannels; channel++)
|
|
|
{
|
|
|
std::string path = m_appPath + (APP_PATH_CHANNELS DIR_SEP_STR);
|
|
|
|
|
|
unsigned char cameraId = 0;
|
|
|
unsigned char usbCamera = 0;
|
|
|
Json::Value channelCfg = Json::objectValue;
|
|
|
content.clear();
|
|
|
filePath = m_appPath + (APP_DATA_DIR DIR_SEP_STR APP_FILE_NAME_APP_CONF);
|
|
|
if (!readFile(filePath, content))
|
|
|
{
|
|
|
result += "读取通道" + std::to_string(channel) + "配置文件失败" + std::string(ITEM_SEP);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
Json::CharReaderBuilder builder;
|
|
|
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
|
|
|
|
|
|
const char* doc = (const char*)&(content[0]);
|
|
|
if (reader->parse(doc, doc + content.size(), &channelCfg, NULL))
|
|
|
{
|
|
|
GetJSONUInt8Value(channelCfg, "usbCamera", usbCamera);
|
|
|
if (GetJSONUInt8Value(channelCfg, "cameraId", cameraId))
|
|
|
{
|
|
|
result += "通道" + std::to_string(channel) + " Camera ID为 " + std::to_string(cameraId) + ITEM_SEP;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
cameraId = channel - 1;
|
|
|
result += "通道" + std::to_string(channel) + "未定义Camera ID, 使用默认值 " + std::to_string(cameraId) + ITEM_SEP;
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
result += "解析通道" + std::to_string(channel) + "配置文件App.json失败" + std::string(ITEM_SEP);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
int32_t width = 0;
|
|
|
int32_t height = 0;
|
|
|
NdkCamera::CAMERA_PARAMS params = { 0 };
|
|
|
params.burstCaptures = 1;
|
|
|
if (usbCamera)
|
|
|
{
|
|
|
GpioControl::setOtgState(true);
|
|
|
}
|
|
|
GpioControl::setCam3V3Enable(true);
|
|
|
|
|
|
NdkCamera camera(width, height, params);
|
|
|
int res = camera.selfTest(std::to_string(cameraId), width, height);
|
|
|
GpioControl::setCam3V3Enable(false);
|
|
|
if (usbCamera)
|
|
|
{
|
|
|
GpioControl::setOtgState(false);
|
|
|
}
|
|
|
if (res == 0)
|
|
|
{
|
|
|
result += "通道" + std::to_string(channel) + "正常:最大分辨率:" + std::to_string(width) + "x" + std::to_string(height) + ITEM_SEP;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
result += "通道" + std::to_string(channel) + " 异常 err=" + std::to_string(res) + ITEM_SEP;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
int bv = QueryBatteryVoltage(DEFAULT_BATTERY_QUERY_RETRIES);
|
|
|
if (bv > 0)
|
|
|
{
|
|
|
bv -= bv % 100;
|
|
|
result += std::string("电池电压:") + std::to_string(bv / 1000) + std::string(".") + std::to_string((bv % 1000) / 100) + ITEM_SEP;
|
|
|
}
|
|
|
|
|
|
fs::space_info si = fs::space("/data");
|
|
|
double fr = ((double)si.available * 100.0f) / ((double)si.capacity);
|
|
|
result += "可用存储:";
|
|
|
result += std::to_string((int)fr);
|
|
|
result += "%%" + std::string(ITEM_SEP);
|
|
|
|
|
|
long fm = android_os_Process_getFreeMemory();
|
|
|
long tm = android_os_Process_getTotalMemory();
|
|
|
double fmp = ((double)fm * 100.0f) / ((double)tm);
|
|
|
result += std::string("可用内存:") + std::to_string((int)fmp) + std::string("%%") + ITEM_SEP;
|
|
|
|
|
|
if (!m_tfCardPath.empty())
|
|
|
{
|
|
|
fs::space_info si2 = fs::space(m_tfCardPath.c_str());
|
|
|
double fr2 = ((double)si2.available * 100.0f) / ((double)si2.capacity);
|
|
|
result += "TF卡可用空间:";
|
|
|
result += std::to_string((int)fr2);
|
|
|
result += "%%" + std::string(ITEM_SEP);
|
|
|
}
|
|
|
|
|
|
result += "4G信号强度:";
|
|
|
result += std::to_string(m_signalLevel);
|
|
|
result += ITEM_SEP;
|
|
|
|
|
|
result += "网络接口:";
|
|
|
std::vector<std::string> devices;
|
|
|
GetNetDevices(devices);
|
|
|
for (auto it = devices.cbegin(); it != devices.cend(); ++it)
|
|
|
{
|
|
|
result += (*it);
|
|
|
result += " ";
|
|
|
}
|
|
|
// result += ITEM_SEP;
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::UpdateTime(time_t ts)
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
jlong timeInMillis = ((jlong)ts) * 1000;
|
|
|
ret = env->CallBooleanMethod(m_javaService, mUpdateTimeMid, timeInMillis);
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return (ret == JNI_TRUE);
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::UpdateSchedules()
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
jlong ts = time(NULL);
|
|
|
ret = env->CallBooleanMethod(m_javaService, mUpdateCaptureScheduleMid, ts);
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return (ret == JNI_TRUE);
|
|
|
}
|
|
|
|
|
|
int CPhoneDevice::QueryBatteryVoltage(int retries)
|
|
|
{
|
|
|
int val = -1; // // BatVol
|
|
|
for (int idx = 0; idx < retries; idx++)
|
|
|
{
|
|
|
val = GpioControl::getBatteryBusVoltage(); // // BatVol
|
|
|
if (val >= 0)
|
|
|
{
|
|
|
break;
|
|
|
}
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
|
|
}
|
|
|
|
|
|
return val;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::QuerySystemProperties(std::map<std::string, std::string>& properties)
|
|
|
{
|
|
|
char value[PROP_VALUE_MAX] = { 0 };
|
|
|
std::map<std::string, std::string> powerInfo;
|
|
|
int res = 0;
|
|
|
int bv = -1;
|
|
|
|
|
|
for (std::map<std::string, std::string>::iterator it = properties.begin(); it != properties.end(); ++it)
|
|
|
{
|
|
|
if (!(it->second.empty()))
|
|
|
{
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
if (it->first == PROP_EQUIP_NAME)
|
|
|
{
|
|
|
__system_property_get("ro.product.name", value);
|
|
|
it->second = value;
|
|
|
}
|
|
|
else if (it->first == PROP_MODEL)
|
|
|
{
|
|
|
__system_property_get("ro.product.model", value);
|
|
|
it->second = std::string(value);
|
|
|
}
|
|
|
else if (it->first == PROP_BS_MANU)
|
|
|
{
|
|
|
__system_property_get("ro.product.manufacturer", value);
|
|
|
it->second = std::string(value);
|
|
|
}
|
|
|
else if (it->first == PROP_VERSION)
|
|
|
{
|
|
|
// FOR Protocol
|
|
|
snprintf(value, sizeof(value), "%u.%03u", (mVersionCode / 1000), (mVersionCode % 1000));
|
|
|
// __system_property_get("ro.build.version.release", value);
|
|
|
it->second = std::string(value);
|
|
|
}
|
|
|
else if (it->first == (PROP_VERSION_ABBR))
|
|
|
{
|
|
|
// FOR OSD
|
|
|
string version = GetVersion();
|
|
|
#if 0
|
|
|
version += " " + FormatLocalTime(mBuildTime);
|
|
|
#endif
|
|
|
it->second = version;
|
|
|
}
|
|
|
else if (it->first == PROP_BUILD_TIME)
|
|
|
{
|
|
|
it->second = FormatLocalDateTime(mBuildTime);
|
|
|
}
|
|
|
else if (it->first == PROP_PROD_DATE)
|
|
|
{
|
|
|
__system_property_get("ro.build.date.utc", value);
|
|
|
it->second = std::string(value);
|
|
|
}
|
|
|
else if (it->first == PROP_SN || it->first == PROP_BS_ID)
|
|
|
{
|
|
|
__system_property_get("ro.serialno", value);
|
|
|
it->second = std::string(value);
|
|
|
}
|
|
|
else if (it->first == PROP_IMEI)
|
|
|
{
|
|
|
if (m_simcard.empty())
|
|
|
{
|
|
|
__system_property_get("phone.imei", value);
|
|
|
it->second = std::string(value);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
it->second = m_simcard;
|
|
|
}
|
|
|
}
|
|
|
else if (it->first == PROP_OPERATION_TEMP)
|
|
|
{
|
|
|
it->second = QueryCpuTemperature();
|
|
|
}
|
|
|
else if (it->first == PROP_FREE_ROM)
|
|
|
{
|
|
|
fs::space_info si = fs::space("/data");
|
|
|
it->second = std::to_string(si.available); // Unit: M
|
|
|
}
|
|
|
else if (it->first == PROP_FREE_ROM_PERCENT)
|
|
|
{
|
|
|
fs::space_info si = fs::space("/data");
|
|
|
double fr = ((double)si.available * 100.0f) / ((double)si.capacity);
|
|
|
snprintf(value, sizeof(value), "%d%%", (int)fr);
|
|
|
it->second = std::string(value);
|
|
|
}
|
|
|
else if (it->first == PROP_TOTAL_ROM)
|
|
|
{
|
|
|
fs::space_info si = fs::space("/data");
|
|
|
it->second = std::to_string(si.capacity); // Unit: M
|
|
|
}
|
|
|
else if (it->first == PROP_FREE_MEMORY)
|
|
|
{
|
|
|
it->second = std::to_string(android_os_Process_getFreeMemory()); // Unit: M
|
|
|
}
|
|
|
else if (it->first == PROP_FREE_MEMORY_PERCENT)
|
|
|
{
|
|
|
long fm = android_os_Process_getFreeMemory();
|
|
|
long tm = android_os_Process_getTotalMemory();
|
|
|
double fmp = ((double)fm * 100.0f) / ((double)tm);
|
|
|
snprintf(value, sizeof(value), "%d%%", (int)fmp);
|
|
|
it->second = std::string(value); // Unit: M
|
|
|
}
|
|
|
else if (it->first == PROP_TOTAL_MEMORY)
|
|
|
{
|
|
|
it->second = std::to_string(android_os_Process_getTotalMemory()); // Unit: M
|
|
|
}
|
|
|
else if (it->first == (PROP_LIGHTDEPENDENT_RESISTOR))
|
|
|
{
|
|
|
int val = GpioControl::getLightAdc();
|
|
|
it->second = std::to_string(val);
|
|
|
}
|
|
|
#if 0
|
|
|
else if (it->first == (PROP_CHARGING_CURRENT))
|
|
|
{
|
|
|
it->second = std::to_string(GpioControl::getChargingCurrent());
|
|
|
}
|
|
|
else if (it->first == (PROP_CHARGING_POWER))
|
|
|
{
|
|
|
it->second = std::to_string(GpioControl::getChargingPower());
|
|
|
}
|
|
|
#endif
|
|
|
else if (it->first == (PROP_CHARGING_BUS_VOL) || it->first == (PROP_CHARGING_VOLTAGE))
|
|
|
{
|
|
|
double val = -1;
|
|
|
char str[32] = { 0 };
|
|
|
for (int idx = 0; idx < 3; idx++)
|
|
|
{
|
|
|
val = GpioControl::getChargingBusVoltage();
|
|
|
if (val < 0)
|
|
|
{
|
|
|
continue;
|
|
|
}
|
|
|
snprintf(str, sizeof(str), "%.1f", (val / 1000.0));
|
|
|
it->second = std::string(str);
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
#if 0
|
|
|
else if (it->first == (PROP_BATTERY_POWER))
|
|
|
{
|
|
|
it->second = std::to_string(GpioControl::getBatteryPower());
|
|
|
}
|
|
|
#endif
|
|
|
else if (it->first == (PROP_BATTERY_BUS_VOL) || it->first == (PROP_BATTERY_VOLTAGE))
|
|
|
{
|
|
|
int val = QueryBatteryVoltage(DEFAULT_BATTERY_QUERY_RETRIES); // // BatVol
|
|
|
if (val > 0)
|
|
|
{
|
|
|
bv = val;
|
|
|
snprintf(value, sizeof(value), "%.1f", val / 1000.0);
|
|
|
it->second = std::string(value);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
#ifdef _DEBUG
|
|
|
int aa = 0;
|
|
|
#endif
|
|
|
}
|
|
|
}
|
|
|
else if ((it->first == (PROP_SIGNAL_4G)) || (it->first == (PROP_SIGNAL_2G)) || (it->first == (PROP_SIGNAL_LEVEL)))
|
|
|
{
|
|
|
it->second = std::to_string(m_signalLevel);
|
|
|
}
|
|
|
/*
|
|
|
else if (startsWith(it->first, PROP_JAVA_PREFIX))
|
|
|
{
|
|
|
if (powerInfo.empty())
|
|
|
{
|
|
|
QueryPowerInfo(powerInfo);
|
|
|
}
|
|
|
auto it2 = powerInfo.find(it->first);
|
|
|
if (it2 != powerInfo.cend())
|
|
|
{
|
|
|
it->second = it2->second;
|
|
|
}
|
|
|
}
|
|
|
*/
|
|
|
}
|
|
|
|
|
|
std::map<std::string, std::string>::iterator it = properties.find(PROP_BATTERY_CURRENT);
|
|
|
if (it != properties.end())
|
|
|
{
|
|
|
if (bv == -1)
|
|
|
{
|
|
|
bv = QueryBatteryVoltage(DEFAULT_BATTERY_QUERY_RETRIES);
|
|
|
}
|
|
|
|
|
|
if (bv > 0)
|
|
|
{
|
|
|
char str[32] = { 0 };
|
|
|
float batteryCurrent = STANDARD_CURRENT_64V / ((float)bv / 1000.0f / STANDARD_VOLTAGE_64V);
|
|
|
snprintf(str, sizeof(str), "%d", (int)batteryCurrent);
|
|
|
it->second = std::string(str);
|
|
|
}
|
|
|
}
|
|
|
// __system_property_get("ro.telephony.default_network", value);
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
std::string CPhoneDevice::QueryCpuTemperature()
|
|
|
{
|
|
|
// /sys/devices/virtual/thermal/thermal_zone0/temp
|
|
|
std::vector<unsigned char> data;
|
|
|
// /sys/class/thermal/thermal zone*/temp
|
|
|
if (readFile("/sys/class/thermal/thermal_zone3/temp", data) && !data.empty())
|
|
|
{
|
|
|
data.push_back(0);
|
|
|
int temp = atoi((const char*)(&data[0]));
|
|
|
return std::to_string((temp / 1000) + 20);
|
|
|
}
|
|
|
|
|
|
return "";
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::QueryPowerInfo(std::map<std::string, std::string>& powerInfo)
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
jobject jobj = env->CallObjectMethod(m_javaService, mGetSystemInfoMid);
|
|
|
std::string str = jstring2string(env, (jstring)jobj);
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
if (!str.empty())
|
|
|
{
|
|
|
std::map<std::string, std::string> queries = parseQuery(str);
|
|
|
powerInfo.swap(queries);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector<uint32_t>& items)
|
|
|
{
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::InstallAPP(const std::string& path, unsigned int delayedTime)
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
|
|
|
jstring jpath = env->NewStringUTF(path.c_str());
|
|
|
env->CallBooleanMethod(m_javaService, mInstallAppMid, jpath, (jlong)delayedTime);
|
|
|
// env->ReleaseStringUTFChars(jpath, path.c_str());
|
|
|
env->DeleteLocalRef(jpath);
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::Reboot(int resetType, bool manually, const std::string& reason, uint32_t timeout/* = 1000*/)
|
|
|
{
|
|
|
if (resetType == REBOOT_TYPE_DEVICE)
|
|
|
{
|
|
|
// reboot the device
|
|
|
if (!manually)
|
|
|
{
|
|
|
time_t rebootTime = GetRebootTime();
|
|
|
time_t ts = time(NULL);
|
|
|
if ((ts - rebootTime) < 1800)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "Frequent REBOOT DEV Cancelled Prev RBT Time=%lld", (int64_t)rebootTime);
|
|
|
return false;
|
|
|
}
|
|
|
}
|
|
|
if (manually)
|
|
|
{
|
|
|
// GpioControl::reboot();
|
|
|
RestartApp(resetType, timeout, reason);
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
long timeout = 1000;
|
|
|
RestartApp(resetType, timeout, reason);
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::RestartApp(int resetType, long timeout, const std::string& reason)
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
|
|
|
jstring jreason = NULL;
|
|
|
if (!reason.empty())
|
|
|
{
|
|
|
jreason = env->NewStringUTF(reason.c_str());
|
|
|
}
|
|
|
env->CallVoidMethod(m_javaService, mRebootMid, resetType, (jlong)timeout, jreason);
|
|
|
if (jreason != NULL) env->DeleteLocalRef(jreason);
|
|
|
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::EnableGPS(bool enabled)
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
return false;
|
|
|
}
|
|
|
jboolean jenabled = enabled ? JNI_TRUE : JNI_FALSE;
|
|
|
env->CallVoidMethod(m_javaService, mEnableGpsMid, jenabled);
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
float CPhoneDevice::QueryBattaryVoltage(int timesForAvg, bool* isCharging)
|
|
|
{
|
|
|
if (timesForAvg <= 0)
|
|
|
{
|
|
|
return 0.0f;
|
|
|
}
|
|
|
|
|
|
int val = 0;
|
|
|
int totalVals = 0;
|
|
|
float chargingBusVoltage = 0.0f;
|
|
|
for (int idx = 0; idx < timesForAvg; idx++)
|
|
|
{
|
|
|
val = GpioControl::getChargingBusVoltage();
|
|
|
if (val > 1000)
|
|
|
{
|
|
|
chargingBusVoltage = (float)val / 1000.0;
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (isCharging != NULL)
|
|
|
{
|
|
|
*isCharging = chargingBusVoltage > DEFAULT_WARNING_CHARGING_BUS_VOL;
|
|
|
}
|
|
|
|
|
|
int matched = 0;
|
|
|
for (int idx = 0; idx < timesForAvg; idx++)
|
|
|
{
|
|
|
val = GpioControl::getBatteryVoltage(); // // BatVol
|
|
|
if (val > 0)
|
|
|
{
|
|
|
totalVals += val > BATTARY_VOLTAGE_MAX ? BATTARY_VOLTAGE_MAX : val;
|
|
|
matched++;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
return (matched > 0) ? ((float)totalVals / 1000.0 / matched) : 0;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::RequestPosition()
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
jboolean ret = env->CallBooleanMethod(m_javaService, mRequestPositionMid);
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return (ret == JNI_TRUE);
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::handleSignal(int sig, siginfo_t *si, void *uc)
|
|
|
{
|
|
|
TIMER_CONTEXT* context = (TIMER_CONTEXT*)(si->si_value.sival_ptr);
|
|
|
context->device->handleTimerImpl(context);
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::RegisterHandlerForSignal(int sig)
|
|
|
{
|
|
|
return true;
|
|
|
// Establish handler for timer signal
|
|
|
struct sigaction sa;
|
|
|
sigset_t mask;
|
|
|
|
|
|
sa.sa_flags = SA_SIGINFO;
|
|
|
sa.sa_sigaction = CPhoneDevice::handleSignal;
|
|
|
sigemptyset(&sa.sa_mask);
|
|
|
if (sigaction(sig, &sa, NULL) == -1)
|
|
|
{
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
// Block timer signal temporarily
|
|
|
|
|
|
// printf("Blocking signal %d\n", SIG);
|
|
|
sigemptyset(&mask);
|
|
|
sigaddset(&mask, sig);
|
|
|
if (sigprocmask(SIG_SETMASK, &mask, NULL) == -1)
|
|
|
{
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::handleTimer(union sigval v)
|
|
|
{
|
|
|
#ifdef _DEBUG
|
|
|
setThreadName("bztimer");
|
|
|
#endif
|
|
|
TIMER_CONTEXT* context = (TIMER_CONTEXT*)(v.sival_ptr);
|
|
|
context->device->handleTimerImpl(context);
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::handleTimerImpl(CPhoneDevice::TIMER_CONTEXT* context)
|
|
|
{
|
|
|
context->times++;
|
|
|
if (context->expectedTimes == 0 || context->times <= context->expectedTimes)
|
|
|
{
|
|
|
if (m_listener != NULL)
|
|
|
{
|
|
|
m_listener->OnTimeout(context->uid, context->timerType, context->data, context->times);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::handleRebootTimer(union sigval v)
|
|
|
{
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
const char *path = "/sdcard/com.xypower.mpapp/tmp/closeThreadReboot.txt";
|
|
|
FILE* file = fopen(path, "w");
|
|
|
if (file) {
|
|
|
fprintf(file, "Restarting app due to: Camera Can't Close\n");
|
|
|
fclose(file);
|
|
|
}
|
|
|
#endif
|
|
|
CPhoneDevice* pDevice = (CPhoneDevice*)(v.sival_ptr);
|
|
|
const IDevice::PHOTO_INFO& photoInfo = pDevice->mPhotoInfo;
|
|
|
// Reboot APP
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Camera Close Thread is DEAD, will RESTART app CH=%u PR=%X", photoInfo.channel, photoInfo.preset);
|
|
|
pDevice->RestartApp(REBOOT_TYPE_APP, 30000, "Camera Cant Close");
|
|
|
}
|
|
|
|
|
|
IDevice::timer_uid_t CPhoneDevice::RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times/* = 0*/)
|
|
|
{
|
|
|
struct sigevent evp = { 0 };
|
|
|
struct itimerspec ts = { 0 };
|
|
|
timer_t timer;
|
|
|
int ret;
|
|
|
|
|
|
TIMER_CONTEXT* context = new TIMER_CONTEXT();
|
|
|
context->device = this;
|
|
|
context->data = data;
|
|
|
context->timerType = timerType;
|
|
|
context->expectedTimes = times;
|
|
|
context->times = 0;
|
|
|
context->uid = 0;
|
|
|
|
|
|
evp.sigev_value.sival_ptr = context;
|
|
|
evp.sigev_notify = SIGEV_THREAD; //SIGEV_THREAD_ID;
|
|
|
evp.sigev_notify_function = CPhoneDevice::handleTimer;
|
|
|
// evp.sigev_notify_thread_id = gettid();
|
|
|
// evp.sigev_notify = SIGEV_SIGNAL;
|
|
|
// evp.sigev_signo = SIGUSR2;
|
|
|
|
|
|
ret = timer_create(CLOCK_REALTIME, &evp, &timer);
|
|
|
if( ret)
|
|
|
{
|
|
|
int err = errno;
|
|
|
delete context;
|
|
|
return INVALID_TIMER_UID;
|
|
|
}
|
|
|
|
|
|
context->uid = (uint64_t)timer;
|
|
|
ts.it_value.tv_sec = (timeout / 1000);
|
|
|
ts.it_value.tv_nsec = (timeout % 1000) * 1000;
|
|
|
if (times != 1)
|
|
|
{
|
|
|
ts.it_interval.tv_sec = ts.it_value.tv_sec;
|
|
|
ts.it_interval.tv_nsec = ts.it_value.tv_nsec;
|
|
|
}
|
|
|
|
|
|
ret = timer_settime(timer, 0, &ts, NULL);
|
|
|
if(ret)
|
|
|
{
|
|
|
timer_delete(timer);
|
|
|
delete context;
|
|
|
return INVALID_TIMER_UID;
|
|
|
}
|
|
|
|
|
|
m_devLocker.lock();
|
|
|
mTimers.insert(mTimers.end(), std::pair<IDevice::timer_uid_t, TIMER_CONTEXT*>((IDevice::timer_uid_t)timer, context));
|
|
|
m_devLocker.unlock();
|
|
|
return (IDevice::timer_uid_t)timer;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::UnregisterTimer(IDevice::timer_uid_t uid)
|
|
|
{
|
|
|
timer_t timer = (timer_t)uid;
|
|
|
int res = timer_delete(timer);
|
|
|
|
|
|
m_devLocker.lock();
|
|
|
std::map<IDevice::timer_uid_t, TIMER_CONTEXT*>::iterator it = mTimers.find(uid);
|
|
|
if (it != mTimers.end())
|
|
|
{
|
|
|
delete it->second;
|
|
|
mTimers.erase(it);
|
|
|
m_devLocker.unlock();
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
m_devLocker.unlock();
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
uint64_t CPhoneDevice::RequestWakelock(uint64_t timeout)
|
|
|
{
|
|
|
uint64_t wakelockId = m_wakelockIdFeed.fetch_add(1);
|
|
|
std::string name = WAKELOCK_NAME;
|
|
|
name += to_string(wakelockId);
|
|
|
|
|
|
// ALOGI("RequestWakelock=%lld",wakelockId);
|
|
|
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
return 0;
|
|
|
}
|
|
|
jstring jname = env->NewStringUTF(name.c_str());
|
|
|
jlong jtimeout = (jlong)timeout;
|
|
|
|
|
|
env->CallVoidMethod(m_javaService, mRequestWakelockMid, jname, jtimeout);
|
|
|
// env->ReleaseStringUTFChars(jname, name.c_str());
|
|
|
env->DeleteLocalRef(jname);
|
|
|
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return wakelockId;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::ReleaseWakelock(uint64_t wakelock)
|
|
|
{
|
|
|
// ALOGI("ReleaseWakelock=%lld", wakelock);
|
|
|
std::string name = WAKELOCK_NAME;
|
|
|
name += to_string(wakelock);
|
|
|
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
return false;
|
|
|
}
|
|
|
jstring jname = env->NewStringUTF(name.c_str());
|
|
|
|
|
|
if (!env->IsSameObject(m_javaService, NULL))
|
|
|
{
|
|
|
env->CallVoidMethod(m_javaService, mReleaseWakelockMid, jname);
|
|
|
}
|
|
|
env->DeleteLocalRef(jname);
|
|
|
// env->ReleaseStringUTFChars(jname, name.c_str());
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
IDevice::timer_uid_t CPhoneDevice::RegisterHeartbeat(unsigned int timerType, unsigned int timeout, time_t tsForNextPhoto)
|
|
|
{
|
|
|
mHeartbeatStartTime = time(NULL);
|
|
|
mHeartbeatDuration = timeout;
|
|
|
|
|
|
IDevice::timer_uid_t uid = m_timerUidFeed.fetch_add(1);
|
|
|
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
return 0;
|
|
|
}
|
|
|
#ifdef ALIGN_HB_TIMER_TO_PHOTO
|
|
|
env->CallVoidMethod(m_javaService, mRegisterHeartbeatMid, (jint)timeout, (jlong)tsForNextPhoto);
|
|
|
#else
|
|
|
env->CallVoidMethod(m_javaService, mRegisterHeartbeatMid, (jint)timeout, 0);
|
|
|
#endif
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return uid;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr)
|
|
|
{
|
|
|
// AutoEnv autoEnv(pThis->m_vm);
|
|
|
time_t ts = time(NULL);
|
|
|
uint32_t waitTime = localPhotoInfo.selfTestingTime;
|
|
|
if(!GpioControl::GetSelftestStatus(waitTime))
|
|
|
{
|
|
|
m_isSelfTesting.store(true);
|
|
|
waitTime = (waitTime != 0) ? (waitTime * 1024) : 10240;
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(waitTime));
|
|
|
m_isSelfTesting.store(false);
|
|
|
}
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON");
|
|
|
|
|
|
uint32_t netWaitTime = (localPhotoInfo.cameraType == CAM_TYPE_PLZ) ? 20 : 4;
|
|
|
std::shared_ptr<PowerControl> ethernetPowerCtrl = std::make_shared<EthernetPowerCtrl>(netWaitTime);
|
|
|
|
|
|
net_handle_t netHandle = GetEthnetHandle();
|
|
|
if (netHandle == 0)
|
|
|
{
|
|
|
// Wait about 10s
|
|
|
for (int idx = 0; idx < 84; idx++)
|
|
|
{
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(128));
|
|
|
netHandle = GetEthnetHandle();
|
|
|
|
|
|
if (netHandle != 0)
|
|
|
{
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (netHandle == 0)
|
|
|
{
|
|
|
// timeout
|
|
|
std::string pwrStatus = powerCtrlPtr->GetStatus();
|
|
|
pwrStatus += ethernetPowerCtrl->GetStatus();
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Ethernet Not Existing CH=%u PR=%X PHOTOID=%u PWR:%s",
|
|
|
(uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId, pwrStatus.c_str());
|
|
|
TakePhotoCb(0, localPhotoInfo, "", 0);
|
|
|
return false;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
unsigned int ip = 0;
|
|
|
unsigned int netMask = 0;
|
|
|
unsigned int gateway = 0;
|
|
|
char buf[32] = { 0 };
|
|
|
if (GetNetInfo("eth0", ip, netMask, gateway))
|
|
|
{
|
|
|
// const
|
|
|
sockaddr_in addrIn = { AF_INET, 0, ip};
|
|
|
inet_ntop(AF_INET, &addrIn.sin_addr, buf, sizeof(buf)); //其中recvAddr为SOCKADDR_IN类型
|
|
|
}
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Ethernet is Available Handle=%llu IP=%s CH=%u PR=%X PHOTOID=%u", (uint64_t)netHandle, buf, (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
|
|
|
}
|
|
|
|
|
|
// SetStaticIp();
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(256));
|
|
|
|
|
|
NET_PHOTO_INFO netPhotoInfo = { netHandle, 0 };
|
|
|
if (localPhotoInfo.vendor == 1)
|
|
|
{
|
|
|
// Hai Kang
|
|
|
netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
|
|
|
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/ISAPI/Streaming/channels/1/picture?");
|
|
|
}
|
|
|
else if (localPhotoInfo.vendor == 2)
|
|
|
{
|
|
|
// Hang Yu
|
|
|
strcpy(netPhotoInfo.url, "/cgi-bin/snapshot.cgi");
|
|
|
}
|
|
|
else if (localPhotoInfo.vendor == 3)
|
|
|
{
|
|
|
// Yu Shi
|
|
|
netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
|
|
|
int streamSid = 0; // should put into config
|
|
|
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/LAPI/V1.0/Channels/%u/Media/Video/Streams/%d/Snapshot", (uint32_t)localPhotoInfo.cameraId, streamSid);
|
|
|
}
|
|
|
else if (localPhotoInfo.vendor == 5)
|
|
|
{
|
|
|
// Hang Yu - New
|
|
|
netPhotoInfo.authType = HTTP_AUTH_TYPE_BASIC;
|
|
|
// http://192.168.1.46/Snapshot/%u/RemoteImageCapture?ImageFormat=2&HorizontalPixel=1920&VerticalPixel=1080
|
|
|
// http://192.168.1.101/Snapshot/1/2/RemoteImageCaptureV2?ImageFormat=jpg
|
|
|
// http://192.168.1.101/Snapshot/1/1/RemoteImageCaptureV2?ImageFormat=jpg
|
|
|
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", (uint32_t)localPhotoInfo.cameraId);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.vendor, (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId);
|
|
|
TakePhotoCb(0, localPhotoInfo, "", 0);
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
struct in_addr addr;
|
|
|
addr.s_addr = localPhotoInfo.ip;
|
|
|
strcpy(netPhotoInfo.ip, inet_ntoa(addr));
|
|
|
strcpy(netPhotoInfo.outputPath, path.c_str());
|
|
|
if (!localPhotoInfo.userName.empty())
|
|
|
{
|
|
|
size_t len = std::min<size_t>(sizeof(netPhotoInfo.userName) - 1, localPhotoInfo.userName.size());
|
|
|
strncpy(netPhotoInfo.userName, localPhotoInfo.userName.c_str(), len);
|
|
|
}
|
|
|
if (!localPhotoInfo.password.empty())
|
|
|
{
|
|
|
size_t len = std::min<size_t>(sizeof(netPhotoInfo.password) - 1, localPhotoInfo.password.size());
|
|
|
strncpy(netPhotoInfo.password, localPhotoInfo.password.c_str(), len);
|
|
|
}
|
|
|
// strcpy(netPhotoInfo.interface, "eth0");
|
|
|
|
|
|
std::vector<uint8_t> img;
|
|
|
|
|
|
bool netCaptureResult = false;
|
|
|
for (int idx = 0; idx < 64; idx++)
|
|
|
{
|
|
|
netHandle = GetEthnetHandle();
|
|
|
netPhotoInfo.netHandle = netHandle;
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "NetCapture %d NetHandle=%lld PHOTOID=%u", idx, netHandle, localPhotoInfo.photoId);
|
|
|
|
|
|
if(localPhotoInfo.vendor == 3)
|
|
|
{
|
|
|
UniviewResolutionSet(netPhotoInfo, localPhotoInfo.cameraId,localPhotoInfo.resolution);
|
|
|
}
|
|
|
|
|
|
img.clear();
|
|
|
netCaptureResult = requestCapture(localPhotoInfo.channel, localPhotoInfo.preset, netPhotoInfo, img);
|
|
|
if (netCaptureResult && !img.empty())
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "NetCapture Succeeded PHOTOID=%u Img Size=%u", localPhotoInfo.photoId, (uint32_t)img.size());
|
|
|
break;
|
|
|
}
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
|
|
|
}
|
|
|
|
|
|
ethernetPowerCtrl.reset();
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF");
|
|
|
|
|
|
cv::Mat rgb;
|
|
|
if (netCaptureResult && !img.empty())
|
|
|
{
|
|
|
rgb = cv::imdecode(cv::Mat(img), cv::IMREAD_COLOR);
|
|
|
}
|
|
|
if (!rgb.empty())
|
|
|
{
|
|
|
time_t takingTime = ts;
|
|
|
if (localPhotoInfo.remedy != 0)
|
|
|
{
|
|
|
time_t scheduleTime = localPhotoInfo.scheduleTime;
|
|
|
if (scheduleTime == 0)
|
|
|
{
|
|
|
scheduleTime = localPhotoInfo.requestTime;
|
|
|
}
|
|
|
if ((takingTime - scheduleTime) > 30)
|
|
|
{
|
|
|
takingTime = scheduleTime + localPhotoInfo.channel * 2;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
localPhotoInfo.photoTime = takingTime;
|
|
|
|
|
|
// Notify to take next photo
|
|
|
TakePhotoCb(1, localPhotoInfo, "", takingTime);
|
|
|
|
|
|
#ifdef _DEBUG
|
|
|
// cv::imwrite("/sdcard/com.xypower.mpapp/tmp/netimg2.jpg", rgb);
|
|
|
#endif
|
|
|
netCaptureResult = PostProcessPhoto(localPhotoInfo, osds, path, "", rgb);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
|
|
|
localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url);
|
|
|
TakePhotoCb(0, localPhotoInfo, "", 0);
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
|
|
|
bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr)
|
|
|
{
|
|
|
// AutoEnv autoEnv(pThis->m_vm);
|
|
|
time_t ts = time(NULL);
|
|
|
uint32_t waitTime = localPhotoInfo.selfTestingTime;
|
|
|
if(!GpioControl::GetSelftestStatus(waitTime))
|
|
|
{
|
|
|
m_isSelfTesting.store(true);
|
|
|
waitTime = (waitTime != 0) ? (waitTime * 1024) : 10240;
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(waitTime));
|
|
|
m_isSelfTesting.store(false);
|
|
|
}
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON");
|
|
|
std::shared_ptr<PowerControl> ethernetPowerCtrl = std::make_shared<EthernetPowerCtrl>(1);
|
|
|
// std::shared_ptr<PowerControl> ethernetPowerCtrl;
|
|
|
|
|
|
net_handle_t netHandle = GetEthnetHandle();
|
|
|
if (netHandle == 0)
|
|
|
{
|
|
|
// Wait about 10s
|
|
|
for (int idx = 0; idx < 84; idx++)
|
|
|
{
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(128));
|
|
|
netHandle = GetEthnetHandle();
|
|
|
|
|
|
if (netHandle != 0)
|
|
|
{
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (netHandle == 0)
|
|
|
{
|
|
|
// timeout
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Ethernet not existing CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
|
|
|
#ifdef NDEBUG
|
|
|
TakePhotoCb(0, localPhotoInfo, "", 0);
|
|
|
return false;
|
|
|
#endif
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Ethernet is Available CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
|
|
|
}
|
|
|
|
|
|
// SetStaticIp();
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(256));
|
|
|
|
|
|
struct in_addr addr;
|
|
|
char ip[32] = { 0 };
|
|
|
addr.s_addr = localPhotoInfo.ip;
|
|
|
strcpy(ip, inet_ntoa(addr));
|
|
|
// strcpy(netPhotoInfo.outputPath, path.c_str());
|
|
|
|
|
|
VendorCtrl* vendorCtrl = MakeVendorCtrl(localPhotoInfo.vendor, localPhotoInfo.channel, ip, localPhotoInfo.userName, localPhotoInfo.password, netHandle);
|
|
|
if (vendorCtrl == NULL)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.vendor, (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId);
|
|
|
TakePhotoCb(0, localPhotoInfo, "", 0);
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
std::string streamingUrl = vendorCtrl->GetStreamingUrl(localPhotoInfo.cameraId);
|
|
|
|
|
|
if (streamingUrl.empty())
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Invalid Streaming URL CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId);
|
|
|
TakePhotoCb(0, localPhotoInfo, "", 0);
|
|
|
return false;
|
|
|
}
|
|
|
// strcpy(netPhotoInfo.outputPath, path.c_str());
|
|
|
|
|
|
// strcpy(netPhotoInfo.interface, "eth0");
|
|
|
|
|
|
localPhotoInfo.photoTime = time(NULL);
|
|
|
std::string tmpFile = m_appPath + (APP_PATH_TMP DIR_SEP_STR) + std::to_string(localPhotoInfo.photoId) + ".mp4";
|
|
|
// RTSPToMP4 dumper(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000);
|
|
|
// dumper.start();
|
|
|
dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, GetEthnetHandle());
|
|
|
|
|
|
ethernetPowerCtrl.reset();
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF");
|
|
|
|
|
|
std::string fullPath = endsWith(mPath, ".mp4") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo));
|
|
|
|
|
|
if (existsFile(tmpFile))
|
|
|
{
|
|
|
std::rename(tmpFile.c_str(), fullPath.c_str());
|
|
|
TakePhotoCb(3, localPhotoInfo, "", localPhotoInfo.photoTime);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
TakePhotoCb(0, localPhotoInfo, "", 0);
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
|
|
|
localPhotoInfo.photoId, ip, streamingUrl.c_str());
|
|
|
}
|
|
|
// Notify to take next photo
|
|
|
// TakePhotoCb(1, localPhotoInfo, "", takingTime);
|
|
|
|
|
|
// XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
|
|
|
// localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url);
|
|
|
// TakePhotoCb(0, localPhotoInfo, "", 0);
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::StartPushStreaming(IDevice::PHOTO_INFO& photoInfo, const std::string& url, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr)
|
|
|
{
|
|
|
#if 0
|
|
|
if (photoInfo.mediaType == XY_MEDIA_TYPE_STREAM)
|
|
|
{
|
|
|
std::map<uint8_t, std::shared_ptr<Streaming> >::iterator it = m_streamings.find(photoInfo.channel);
|
|
|
if (it != m_streamings.end())
|
|
|
{
|
|
|
it->second->stop();
|
|
|
it->second.reset();
|
|
|
m_streamings.erase(it);
|
|
|
}
|
|
|
|
|
|
NET_PHOTO_INFO netPhotoInfo = { 0, 0 };
|
|
|
if (photoInfo.vendor == 1)
|
|
|
{
|
|
|
// Hai Kang
|
|
|
netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
|
|
|
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/ISAPI/Streaming/channels/1/picture?");
|
|
|
}
|
|
|
else if (photoInfo.vendor == 2)
|
|
|
{
|
|
|
// Hang Yu
|
|
|
strcpy(netPhotoInfo.url, "/cgi-bin/snapshot.cgi");
|
|
|
}
|
|
|
else if (photoInfo.vendor == 3)
|
|
|
{
|
|
|
// Yu Shi
|
|
|
netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
|
|
|
int streamSid = 0; // should put into config
|
|
|
// rtsp://192.168.0.13:554/media/video1
|
|
|
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/media/video%u", (uint32_t)photoInfo.cameraId);
|
|
|
// strcpy(netPhotoInfo.url, "rtsp://192.168.50.224/live/0");
|
|
|
}
|
|
|
else if (photoInfo.vendor == 5)
|
|
|
{
|
|
|
// Hang Yu - New
|
|
|
netPhotoInfo.authType = HTTP_AUTH_TYPE_BASIC;
|
|
|
// http://192.168.1.46/Snapshot/%u/RemoteImageCapture?ImageFormat=2&HorizontalPixel=1920&VerticalPixel=1080
|
|
|
// http://192.168.1.101/Snapshot/1/2/RemoteImageCaptureV2?ImageFormat=jpg
|
|
|
// http://192.168.1.101/Snapshot/1/1/RemoteImageCaptureV2?ImageFormat=jpg
|
|
|
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", (uint32_t)photoInfo.cameraId);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.vendor, (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
|
|
|
TakePhotoCb(0, photoInfo, "", 0);
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
StreamForwarder* forwarder = new StreamForwarder();
|
|
|
m_streamings[photoInfo.channel] = std::shared_ptr<Streaming>((Streaming*)forwarder);
|
|
|
// Initialize with RTSP input and RTMP output
|
|
|
if (!forwarder->initialize(std::string(netPhotoInfo.url), url)) {
|
|
|
std::cerr << "Failed to initialize stream forwarder" << std::endl;
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
|
// Optional: Set callback to process video frames
|
|
|
#if 0
|
|
|
forwarder->setFrameCallback([](uint8_t* data, int linesize, int width, int height) {
|
|
|
// Process frame data here
|
|
|
// Example: Add OSD overlay
|
|
|
});
|
|
|
#endif
|
|
|
|
|
|
// Start forwarding
|
|
|
forwarder->start();
|
|
|
|
|
|
// Wait for user input to stop
|
|
|
// std::cout << "Press Enter to stop streaming..." << std::endl;
|
|
|
// std::cin.get();
|
|
|
|
|
|
// forwarder.stop();
|
|
|
}
|
|
|
else if (photoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF)
|
|
|
{
|
|
|
auto it = m_streamings.find(photoInfo.channel);
|
|
|
if (it != m_streamings.end())
|
|
|
{
|
|
|
it->second->stop();
|
|
|
it->second.reset();
|
|
|
m_streamings.erase(it);
|
|
|
}
|
|
|
}
|
|
|
#endif
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path)
|
|
|
{
|
|
|
if (photoInfo.width == 0 || photoInfo.height == 0)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "TP: Invalid Size: (%u-%u) PHOTOID=%u", (unsigned int)photoInfo.width, (unsigned int)photoInfo.height, photoInfo.photoId);
|
|
|
return false;
|
|
|
}
|
|
|
if (m_threadClose.joinable())
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "TP: Wait Prev Thread CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
|
|
|
struct sigevent evp = { 0 };
|
|
|
struct itimerspec ts = { 0 };
|
|
|
timer_t timer;
|
|
|
int ret;
|
|
|
|
|
|
evp.sigev_value.sival_ptr = this;
|
|
|
evp.sigev_notify = SIGEV_THREAD; //SIGEV_THREAD_ID;
|
|
|
evp.sigev_notify_function = CPhoneDevice::handleRebootTimer;
|
|
|
// evp.sigev_notify_thread_id = gettid();
|
|
|
// evp.sigev_notify = SIGEV_SIGNAL;
|
|
|
// evp.sigev_signo = SIGUSR2;
|
|
|
|
|
|
ret = timer_create(CLOCK_REALTIME, &evp, &timer);
|
|
|
if( ret == 0)
|
|
|
{
|
|
|
ts.it_value.tv_sec = 8; // 8 seconds
|
|
|
ts.it_value.tv_nsec = 0;
|
|
|
ret = timer_settime(timer, 0, &ts, NULL);
|
|
|
}
|
|
|
m_threadClose.join();
|
|
|
timer_delete(timer);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "TP: Wait Prev Thread End CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
|
|
|
}
|
|
|
|
|
|
if (mCamera != NULL)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "TP: mCamera ISNOT null CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
|
|
|
delete mCamera;
|
|
|
mCamera = NULL;
|
|
|
}
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "TP: CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
|
|
|
mPhotoInfo = photoInfo;
|
|
|
mPath = path;
|
|
|
mOsds = osds;
|
|
|
|
|
|
bool res = false;
|
|
|
|
|
|
std::shared_ptr<PowerControl> powerCtrlPtr;
|
|
|
|
|
|
if (photoInfo.cameraType == CAM_TYPE_MIPI)
|
|
|
{
|
|
|
powerCtrlPtr = std::shared_ptr<PowerControl>(new CameraPowerCtrl(mPhotoInfo.closeDelayTime));
|
|
|
}
|
|
|
else if (photoInfo.cameraType == CAM_TYPE_USB)
|
|
|
{
|
|
|
powerCtrlPtr = std::shared_ptr<PowerControl>(new UsbCameraPowerCtrl(mPhotoInfo.closeDelayTime));
|
|
|
}
|
|
|
else if (photoInfo.cameraType == CAM_TYPE_NET)
|
|
|
{
|
|
|
if (photoInfo.retries > 0)
|
|
|
{
|
|
|
std::this_thread::sleep_for(std::chrono::seconds(3));
|
|
|
}
|
|
|
if(mPhotoInfo.scheduleTime == 0)
|
|
|
powerCtrlPtr = std::shared_ptr<PowerControl>(new NetCameraPowerCtrl(mPhotoInfo.closeDelayTime));
|
|
|
else
|
|
|
powerCtrlPtr = std::shared_ptr<PowerControl>(new NetCameraPowerCtrl(2));
|
|
|
}
|
|
|
else if (photoInfo.cameraType == CAM_TYPE_PLZ)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "PTZ PWR turned ON");
|
|
|
if(mPhotoInfo.scheduleTime == 0)
|
|
|
powerCtrlPtr = std::shared_ptr<PowerControl>(new PlzCameraPowerCtrl(mPhotoInfo.closeDelayTime));
|
|
|
else
|
|
|
powerCtrlPtr = std::shared_ptr<PowerControl>(new PlzCameraPowerCtrl(2));
|
|
|
}
|
|
|
|
|
|
res = true;
|
|
|
if ((mPhotoInfo.mediaType == 0) && ((mPhotoInfo.cameraType == CAM_TYPE_MIPI) || (mPhotoInfo.cameraType == CAM_TYPE_USB)))
|
|
|
{
|
|
|
NdkCamera::CAMERA_PARAMS params;
|
|
|
memset(¶ms, 0, sizeof(params));
|
|
|
|
|
|
params.sceneMode = mPhotoInfo.sceneMode;
|
|
|
params.autoFocus = mPhotoInfo.autoFocus;
|
|
|
params.autoExposure = mPhotoInfo.autoExposure;
|
|
|
params.focusTimeout = mPhotoInfo.focusTimeout * 1000;
|
|
|
params.exposureTime = mPhotoInfo.exposureTime;
|
|
|
params.sensitivity = mPhotoInfo.sensitivity;
|
|
|
params.compensation = mPhotoInfo.compensation;
|
|
|
params.orientation = mPhotoInfo.orientation;
|
|
|
params.zoom = mPhotoInfo.zoom;
|
|
|
params.zoomRatio = mPhotoInfo.zoomRatio;
|
|
|
params.requestTemplate = mPhotoInfo.requestTemplate;
|
|
|
params.awbMode = mPhotoInfo.awbMode;
|
|
|
params.wait3ALocked = mPhotoInfo.wait3ALocked;
|
|
|
params.customHdr = mPhotoInfo.customHdr;
|
|
|
params.hdrStep = mPhotoInfo.hdrStep;
|
|
|
params.burstRawCapture = mPhotoInfo.usingRawFormat;
|
|
|
params.burstCaptures = mPhotoInfo.burstCaptures;
|
|
|
if (params.requestTemplate <= 0 || params.requestTemplate > 5)
|
|
|
{
|
|
|
params.requestTemplate = 2;
|
|
|
}
|
|
|
|
|
|
if (mPhotoInfo.autoExposure != 0 && mPhotoInfo.customHdr != 0 && mPhotoInfo.channel == 1)
|
|
|
{
|
|
|
params.autoExposure = 0;
|
|
|
if (params.exposureTime == 0)
|
|
|
{
|
|
|
params.exposureTime = 100000000;
|
|
|
}
|
|
|
if (params.sensitivity == 0)
|
|
|
{
|
|
|
params.sensitivity = 110;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
#if 0
|
|
|
if (photoInfo.ldrEnabled)
|
|
|
{
|
|
|
if (GpioControl::getLightAdc() > 1400)
|
|
|
{
|
|
|
params.autoExposure = 0;
|
|
|
params.exposureTime = 1200000000;
|
|
|
params.sensitivity = 1200;
|
|
|
}
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params);
|
|
|
// mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params);
|
|
|
if (mCamera->open(to_string(mPhotoInfo.cameraId)) == 0)
|
|
|
{
|
|
|
m_powerCtrlPtr = powerCtrlPtr;
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Succeeded to OpenCamera CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
bool hasFatalError = mCamera->HasFatalError();
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Failed to OpenCamera CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
|
|
|
delete mCamera;
|
|
|
mCamera = NULL;
|
|
|
res = false;
|
|
|
|
|
|
{
|
|
|
std::shared_ptr<PowerControl> empty;
|
|
|
empty.swap(powerCtrlPtr);
|
|
|
}
|
|
|
|
|
|
if (hasFatalError)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Fatal Error Happened, will RestartAPP in 60s CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
|
|
|
RestartApp(REBOOT_TYPE_APP, 60000, "FatalErrorOnCamera");
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
else if ((mPhotoInfo.mediaType == 0) && (mPhotoInfo.cameraType == CAM_TYPE_NET))
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Start TP on NET Camera CH=%u PR=%X PHOTOID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.preset, mPhotoInfo.photoId);
|
|
|
|
|
|
// Start Thread
|
|
|
CPhoneDevice* pThis = this;
|
|
|
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
|
|
|
|
|
|
std::thread t([localPhotoInfo, path, pThis, osds, powerCtrlPtr]() mutable
|
|
|
{
|
|
|
pThis->TakePhotoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr);
|
|
|
});
|
|
|
|
|
|
t.detach();
|
|
|
}
|
|
|
else if (mPhotoInfo.mediaType == 0 && (mPhotoInfo.cameraType == CAM_TYPE_SERIAL))
|
|
|
{
|
|
|
uint64_t wid_serial = RequestWakelock(0);
|
|
|
CPhoneDevice* pThis = this;
|
|
|
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
|
|
|
IDevice::SerialsPhotoParam param = { "", 0, 0 };
|
|
|
GetPhotoSerialsParamCb(param);
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
|
|
|
std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial]() mutable
|
|
|
{
|
|
|
time_t ts = time(NULL);
|
|
|
if(localPhotoInfo.scheduleTime != 0)
|
|
|
ts = localPhotoInfo.scheduleTime;
|
|
|
pThis->OpenPTZSensors(localPhotoInfo.selfTestingTime);
|
|
|
|
|
|
if (localPhotoInfo.preset != 0 && localPhotoInfo.preset != 0xFF)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"Recv CameraCtrl Command, action= MOVE_PRESETNO, preset = %u", localPhotoInfo.preset);
|
|
|
CameraPhotoCmd(time(NULL), localPhotoInfo.channel, MOVE_PRESETNO, 0, localPhotoInfo.preset, param.serfile, param.baud, param.addr);
|
|
|
std::this_thread::sleep_for(std::chrono::seconds(5));
|
|
|
}
|
|
|
|
|
|
CameraPhotoCmd(ts, localPhotoInfo.channel, TAKE_PHOTO, localPhotoInfo.resolution, localPhotoInfo.preset, param.serfile, param.baud, param.addr);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Taking photo over");
|
|
|
|
|
|
if(localPhotoInfo.scheduleTime == 0) {
|
|
|
pThis->ClosePTZSensors(localPhotoInfo.closeDelayTime);
|
|
|
}
|
|
|
else {
|
|
|
pThis->ClosePTZSensors(2);
|
|
|
}
|
|
|
|
|
|
|
|
|
time_t takingTime = ts;
|
|
|
if (localPhotoInfo.remedy != 0)
|
|
|
{
|
|
|
time_t scheduleTime = localPhotoInfo.scheduleTime;
|
|
|
if (scheduleTime == 0)
|
|
|
{
|
|
|
scheduleTime = localPhotoInfo.requestTime;
|
|
|
}
|
|
|
if ((takingTime - scheduleTime) > 30)
|
|
|
{
|
|
|
takingTime = scheduleTime + localPhotoInfo.channel * 2;
|
|
|
}
|
|
|
}
|
|
|
IMAGE_DEF photo = { 0 };
|
|
|
std::vector<IDevice::RECOG_OBJECT> objects;
|
|
|
GetImage(0, &photo);
|
|
|
if(photo.state == 5)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"Get Serials Photo, PhotoID = %s", photo.photoname);
|
|
|
localPhotoInfo.photoTime = takingTime;
|
|
|
cv::Mat img = cv::imread(photo.photoname, cv::IMREAD_COLOR);
|
|
|
if (!img.empty())
|
|
|
{
|
|
|
int result = std::remove(photo.photoname);
|
|
|
pThis->TakePhotoCb(1, localPhotoInfo, "", takingTime, objects);
|
|
|
pThis->PostProcessPhoto(localPhotoInfo, osds, path, "", img);
|
|
|
}
|
|
|
|
|
|
}else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING,"Get Serials Photo Failed");
|
|
|
pThis->TakePhotoCb(0, localPhotoInfo, path, takingTime, objects);
|
|
|
}
|
|
|
// pThis->TakePTZPhotoCb(3, localPhotoInfo);
|
|
|
pThis->ReleaseWakelock(wid_serial);
|
|
|
});
|
|
|
|
|
|
t.detach();
|
|
|
}
|
|
|
else if (mPhotoInfo.mediaType == 0 && (mPhotoInfo.cameraType == CAM_TYPE_PLZ))
|
|
|
{
|
|
|
uint64_t wid_serial = RequestWakelock(0);
|
|
|
CPhoneDevice* pThis = this;
|
|
|
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
|
|
|
IDevice::SerialsPhotoParam param = { "", 0, 0 };
|
|
|
GetPhotoSerialsParamCb(param);
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
|
|
|
std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial, powerCtrlPtr]() mutable
|
|
|
{
|
|
|
uint32_t waitTime = localPhotoInfo.selfTestingTime;
|
|
|
if(!GpioControl::GetSelftestStatus(waitTime))
|
|
|
{
|
|
|
pThis->m_isSelfTesting.store(true);
|
|
|
time_t remaintime = GpioControl::GetSelfTestRemain(waitTime);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime);
|
|
|
remaintime = (remaintime != 0) ? (remaintime * 1024) : 10240;
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(remaintime));
|
|
|
pThis->m_isSelfTesting.store(false);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over");
|
|
|
}
|
|
|
|
|
|
if (localPhotoInfo.preset != 0 && localPhotoInfo.preset != 0xFF)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"Recv CameraCtrl Command, action= MOVE_PRESETNO, preset = %u", localPhotoInfo.preset);
|
|
|
CameraPhotoCmd(time(NULL), localPhotoInfo.channel, MOVE_PRESETNO, 0, localPhotoInfo.preset, param.serfile, param.baud, param.addr);
|
|
|
std::this_thread::sleep_for(std::chrono::seconds(10));
|
|
|
}
|
|
|
|
|
|
pThis->TakePhotoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr);
|
|
|
pThis->ReleaseWakelock(wid_serial);
|
|
|
});
|
|
|
|
|
|
t.detach();
|
|
|
}
|
|
|
else if ((mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM || mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF) && (mPhotoInfo.cameraType == CAM_TYPE_NET || mPhotoInfo.cameraType == CAM_TYPE_PLZ))
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Start TP(Streaming) CH=%u PR=%X PHOTOID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.preset, mPhotoInfo.photoId);
|
|
|
|
|
|
// Start Thread
|
|
|
CPhoneDevice* pThis = this;
|
|
|
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
|
|
|
|
|
|
std::thread t([localPhotoInfo, path, pThis, osds, powerCtrlPtr]() mutable
|
|
|
{
|
|
|
pThis->StartPushStreaming(localPhotoInfo, path, osds, powerCtrlPtr);
|
|
|
});
|
|
|
|
|
|
t.detach();
|
|
|
}
|
|
|
else if (mPhotoInfo.mediaType == 1 && (mPhotoInfo.cameraType == CAM_TYPE_PLZ))
|
|
|
{
|
|
|
uint64_t wid_serial = RequestWakelock(0);
|
|
|
CPhoneDevice* pThis = this;
|
|
|
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
|
|
|
IDevice::SerialsPhotoParam param = { "", 0, 0 };
|
|
|
GetPhotoSerialsParamCb(param);
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
|
|
|
std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial, powerCtrlPtr]() mutable
|
|
|
{
|
|
|
uint32_t waitTime = localPhotoInfo.selfTestingTime;
|
|
|
if(!GpioControl::GetSelftestStatus(waitTime))
|
|
|
{
|
|
|
pThis->m_isSelfTesting.store(true);
|
|
|
time_t remaintime = GpioControl::GetSelfTestRemain(waitTime);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime);
|
|
|
remaintime = (remaintime != 0) ? (remaintime * 1024) : 10240;
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(remaintime));
|
|
|
pThis->m_isSelfTesting.store(false);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over");
|
|
|
}
|
|
|
|
|
|
if (localPhotoInfo.preset != 0 && localPhotoInfo.preset != 0xFF)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"Recv CameraCtrl Command, action= MOVE_PRESETNO, preset = %u", localPhotoInfo.preset);
|
|
|
CameraPhotoCmd(time(NULL), localPhotoInfo.channel, MOVE_PRESETNO, 0, localPhotoInfo.preset, param.serfile, param.baud, param.addr);
|
|
|
std::this_thread::sleep_for(std::chrono::seconds(10));
|
|
|
}
|
|
|
|
|
|
pThis->TakeVideoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr);
|
|
|
pThis->ReleaseWakelock(wid_serial);
|
|
|
});
|
|
|
|
|
|
t.detach();
|
|
|
}
|
|
|
else if (mPhotoInfo.mediaType == 1 && (mPhotoInfo.cameraType == CAM_TYPE_NET))
|
|
|
{
|
|
|
uint64_t wid_serial = RequestWakelock(0);
|
|
|
CPhoneDevice* pThis = this;
|
|
|
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
|
|
|
std::thread t([localPhotoInfo, pThis, path, osds, wid_serial, powerCtrlPtr]() mutable
|
|
|
{
|
|
|
uint32_t waitTime = localPhotoInfo.selfTestingTime;
|
|
|
if(!GpioControl::GetSelftestStatus(waitTime))
|
|
|
{
|
|
|
pThis->m_isSelfTesting.store(true);
|
|
|
time_t remaintime = GpioControl::GetSelfTestRemain(waitTime);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime);
|
|
|
remaintime = (remaintime != 0) ? (remaintime * 1024) : 10240;
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(remaintime));
|
|
|
pThis->m_isSelfTesting.store(false);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over");
|
|
|
}
|
|
|
|
|
|
pThis->TakeVideoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr);
|
|
|
pThis->ReleaseWakelock(wid_serial);
|
|
|
});
|
|
|
|
|
|
t.detach();
|
|
|
}
|
|
|
else if (mPhotoInfo.usingSysCamera == 1)
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
IDevice::PHOTO_INFO *pPhotoInfo = new IDevice::PHOTO_INFO(mPhotoInfo);
|
|
|
|
|
|
jboolean photoOrVideo = mPhotoInfo.mediaType == 0 ? JNI_TRUE : JNI_FALSE;
|
|
|
env->CallVoidMethod(m_javaService, mCallSysCameraMid, mPhotoInfo.cameraId,
|
|
|
reinterpret_cast<jlong>(pPhotoInfo));
|
|
|
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
jstring leftTopOSD = NULL;
|
|
|
jstring rightTopOSD = NULL;
|
|
|
jstring rightBottomOSD = NULL;
|
|
|
jstring leftBottomOSD = NULL;
|
|
|
|
|
|
for (vector<OSD_INFO>::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it)
|
|
|
{
|
|
|
if (it->text.empty())
|
|
|
{
|
|
|
continue;
|
|
|
}
|
|
|
switch (it->alignment)
|
|
|
{
|
|
|
case OSD_ALIGNMENT_TOP_LEFT:
|
|
|
leftTopOSD = env->NewStringUTF(it->text.c_str());
|
|
|
break;
|
|
|
case OSD_ALIGNMENT_TOP_RIGHT:
|
|
|
rightTopOSD = env->NewStringUTF(it->text.c_str());
|
|
|
break;
|
|
|
case OSD_ALIGNMENT_BOTTOM_RIGHT:
|
|
|
rightBottomOSD = env->NewStringUTF(it->text.c_str());
|
|
|
break;
|
|
|
case OSD_ALIGNMENT_BOTTOM_LEFT:
|
|
|
leftBottomOSD = env->NewStringUTF(it->text.c_str());
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1) * 90;
|
|
|
jboolean photoOrVideo = mPhotoInfo.mediaType == 0 ? JNI_TRUE : JNI_FALSE;
|
|
|
env->CallVoidMethod(m_javaService, mStartRecordingMid, photoOrVideo, mPhotoInfo.cameraId, (uint64_t)mPhotoInfo.photoId,
|
|
|
mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height, mPhotoInfo.duration, orientation,
|
|
|
leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD);
|
|
|
|
|
|
if (leftTopOSD) env->DeleteLocalRef(leftTopOSD);
|
|
|
if (rightTopOSD) env->DeleteLocalRef(rightTopOSD);
|
|
|
if (rightBottomOSD) env->DeleteLocalRef(rightBottomOSD);
|
|
|
if (leftBottomOSD) env->DeleteLocalRef(leftBottomOSD);
|
|
|
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
}
|
|
|
|
|
|
return res;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::OpenPTZSensors(uint32_t sec)
|
|
|
{
|
|
|
uint64_t wid = RequestWakelock(0);
|
|
|
unsigned long long time_now = GetMicroTimeStamp();
|
|
|
|
|
|
OpenSensors(MAIN_POWER_OPEN);
|
|
|
OpenSensors(CAMERA_SENSOR_OPEN);
|
|
|
|
|
|
if (m_isSelfTesting.load() || (GpioControl::GetCamerastatus() && GpioControl::GetSelftestStatus(sec)))
|
|
|
{
|
|
|
ReleaseWakelock(wid);
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
|
|
|
if(GpioControl::GetCamerastatus() && !GpioControl::GetSelftestStatus(sec))
|
|
|
{
|
|
|
m_isSelfTesting.store(true);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting, selfTestingtime=%u", sec);
|
|
|
auto start = std::chrono::steady_clock::now();
|
|
|
while (std::chrono::steady_clock::now() - start < std::chrono::seconds(sec))
|
|
|
{
|
|
|
if (m_shouldStopWaiting.load())
|
|
|
{
|
|
|
CloseSensors(CAMERA_SENSOR_OPEN, 0);
|
|
|
CloseSensors(MAIN_POWER_OPEN, 0);
|
|
|
m_shouldStopWaiting.store(false);
|
|
|
m_isSelfTesting.store(false);
|
|
|
ReleaseWakelock(wid);
|
|
|
return false;
|
|
|
}
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(200));
|
|
|
}
|
|
|
m_isSelfTesting.store(false);
|
|
|
m_shouldStopWaiting.store(false);
|
|
|
unsigned long long time_over = GetMicroTimeStamp();
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over, selfTestingtime=%u", (time_over - time_now)/1000);
|
|
|
}
|
|
|
|
|
|
ReleaseWakelock(wid);
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::ClosePTZSensors(uint32_t delayedCloseTime)
|
|
|
{
|
|
|
if(m_isSelfTesting.load())
|
|
|
{
|
|
|
m_shouldStopWaiting.store(true);
|
|
|
}else
|
|
|
{
|
|
|
CloseSensors(CAMERA_SENSOR_OPEN, delayedCloseTime);
|
|
|
CloseSensors(MAIN_POWER_OPEN, delayedCloseTime);
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::GetPTZSensorsStatus(time_t waittime)
|
|
|
{
|
|
|
return GpioControl::GetSelftestStatus(waittime);
|
|
|
}
|
|
|
bool CPhoneDevice::GetCameraStatus()
|
|
|
{
|
|
|
return GpioControl::GetCamerastatus();
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CloseCamera()
|
|
|
{
|
|
|
if (mCamera != NULL)
|
|
|
{
|
|
|
auto camera = mCamera;
|
|
|
mCamera = NULL;
|
|
|
|
|
|
camera->close();
|
|
|
delete camera;
|
|
|
}
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::CloseCamera2(CPhoneDevice::CPhoneCamera* camera, unsigned int photoId, unsigned char cameraType)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Start CloseCamera PHOTOID=%u", photoId);
|
|
|
// std::this_thread::sleep_for(std::chrono::milliseconds(16));
|
|
|
if (camera != NULL)
|
|
|
{
|
|
|
camera->close();
|
|
|
delete camera;
|
|
|
}
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Will Turn Off Power PHOTOID=%u", photoId);
|
|
|
{
|
|
|
std::shared_ptr<PowerControl> empty;
|
|
|
empty.swap(m_powerCtrlPtr);
|
|
|
}
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: End Turn Off Power PHOTOID=%u", photoId);
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "TP: CloseCamera PHOTOID=%u", photoId);
|
|
|
|
|
|
}
|
|
|
|
|
|
void visualize(const char* filename, const ncnn::Mat& m)
|
|
|
{
|
|
|
cv::Mat a(m.h, m.w, CV_8UC3);
|
|
|
m.to_pixels(a.data, ncnn::Mat::PIXEL_BGR2RGB);
|
|
|
|
|
|
cv::imwrite(filename, a);
|
|
|
}
|
|
|
|
|
|
void DrawOutlineText(cv::Ptr<cv::ft::FreeType2> ft2, cv::Mat& mat, const std::string& str, cv::Point startPoint, int fontSize, cv::Scalar clr, int thickness)
|
|
|
{
|
|
|
if (mat.empty())
|
|
|
{
|
|
|
return;
|
|
|
}
|
|
|
std::vector<std::string> lines = split(str, "\n");
|
|
|
int lineHeight = 0;
|
|
|
cv::Point pt = startPoint;
|
|
|
cv::Size textSize;
|
|
|
int baseline = 0;
|
|
|
|
|
|
for (std::vector<std::string>::const_iterator it = lines.cbegin(); it != lines.cend(); ++it )
|
|
|
{
|
|
|
std::string trimmedLine = *it;
|
|
|
trimString(trimmedLine);
|
|
|
textSize = ft2->getTextSize(trimmedLine, fontSize, thickness, &baseline);
|
|
|
lineHeight = std::max(fontSize, textSize.height + baseline);
|
|
|
|
|
|
ft2->putText(mat, trimmedLine, pt, fontSize, clr, thickness, cv::LINE_AA, false, true);
|
|
|
|
|
|
pt.x = startPoint.x;
|
|
|
pt.y += lineHeight + (lineHeight >> 2); // 125%
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics,
|
|
|
std::shared_ptr<ACameraMetadata> result,
|
|
|
uint32_t ldr, uint32_t duration, cv::Mat rgb)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "TP: OneCapture Finished CH=%u PR=%u PHOTOID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.preset, (uint32_t)mPhotoInfo.photoId);
|
|
|
time_t takingTime = time(NULL);
|
|
|
if (mPhotoInfo.remedy != 0)
|
|
|
{
|
|
|
time_t scheduleTime = mPhotoInfo.scheduleTime;
|
|
|
if (scheduleTime == 0)
|
|
|
{
|
|
|
scheduleTime = mPhotoInfo.requestTime;
|
|
|
}
|
|
|
if ((takingTime - scheduleTime) > 30)
|
|
|
{
|
|
|
takingTime = scheduleTime + mPhotoInfo.channel * 2;
|
|
|
}
|
|
|
}
|
|
|
mPhotoInfo.photoTime = takingTime;
|
|
|
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
PHOTO_INFO photoInfo = mPhotoInfo;
|
|
|
std::string path;
|
|
|
path.swap(mPath);
|
|
|
|
|
|
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
|
|
|
|
|
|
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
|
|
|
if (status == ACAMERA_OK)
|
|
|
{
|
|
|
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
|
|
|
}
|
|
|
|
|
|
int sensorOrientation = 0;
|
|
|
{
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
|
|
|
if (status == ACAMERA_OK)
|
|
|
{
|
|
|
sensorOrientation = (int)e.data.i32[0];
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool turnOffOtg = (photoInfo.usbCamera != 0);
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
mCamera = NULL;
|
|
|
|
|
|
media_status_t mstatus;
|
|
|
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, photoInfo.cameraType);
|
|
|
m_threadClose.swap(closeThread);
|
|
|
if (closeThread.joinable())
|
|
|
{
|
|
|
closeThread.detach();
|
|
|
}
|
|
|
|
|
|
if (rgb.empty())
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Empty Mat object CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
TakePhotoCb(0, photoInfo, "", takingTime);
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
CPhoneDevice* pThis = this;
|
|
|
std::thread th([pThis, characteristics, result, photoInfo, osds, path, rgb, facing, sensorOrientation, ldr, duration, takingTime]()
|
|
|
{
|
|
|
std::string cameraInfo;
|
|
|
if (photoInfo.outputDbgInfo != 0)
|
|
|
{
|
|
|
NdkCamera::CAPTURE_RESULT captureResult = { 0 };
|
|
|
NdkCamera::EnumCameraResult(result.get(), captureResult);
|
|
|
|
|
|
char extimeunit[4] = { 0 };
|
|
|
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
|
|
|
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
|
|
|
char str[128] = { 0 };
|
|
|
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
|
|
|
captureResult.autoExposure, captureResult.autoFocus,
|
|
|
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
|
|
|
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
|
|
|
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
|
|
|
captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
|
|
|
duration, captureResult.frameDuration);
|
|
|
cameraInfo = str;
|
|
|
}
|
|
|
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
#if 0
|
|
|
bool shouldRetry = false;
|
|
|
if (ldr != ~0)
|
|
|
{
|
|
|
if (ldr < MIN_LIGHT_Y)
|
|
|
{
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
{
|
|
|
shouldRetry = true;
|
|
|
char presetBuf[16] = {0};
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
|
|
|
// photoInfo.usingRawFormat = 1;
|
|
|
}
|
|
|
}
|
|
|
else if (ldr > MAX_LIGHT_Y)
|
|
|
{
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
{
|
|
|
shouldRetry = true;
|
|
|
char presetBuf[16] = {0};
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
}
|
|
|
|
|
|
photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
|
|
|
}
|
|
|
}
|
|
|
#endif // 0
|
|
|
#endif // OUTPUT_DBG_INFO
|
|
|
|
|
|
// Notify to take next photo
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "TP: Notofy to Take Next CUR Info: CH=%u PR=%u PHOTOID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.preset, (uint32_t)photoInfo.photoId);
|
|
|
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
|
|
|
|
|
|
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
|
|
|
if (res)
|
|
|
{
|
|
|
// TakePhotoCb(2, photoInfo, path, takingTime);
|
|
|
}
|
|
|
});
|
|
|
|
|
|
th.detach();
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics,
|
|
|
std::vector<std::shared_ptr<ACameraMetadata> >& results,
|
|
|
uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames)
|
|
|
{
|
|
|
time_t takingTime = time(NULL);
|
|
|
if (mPhotoInfo.remedy != 0)
|
|
|
{
|
|
|
time_t scheduleTime = mPhotoInfo.scheduleTime;
|
|
|
if (scheduleTime == 0)
|
|
|
{
|
|
|
scheduleTime = mPhotoInfo.requestTime;
|
|
|
}
|
|
|
if ((takingTime - scheduleTime) > 30)
|
|
|
{
|
|
|
takingTime = scheduleTime + mPhotoInfo.channel * 2;
|
|
|
}
|
|
|
}
|
|
|
mPhotoInfo.photoTime = takingTime;
|
|
|
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
PHOTO_INFO photoInfo = mPhotoInfo;
|
|
|
std::string path;
|
|
|
path.swap(mPath);
|
|
|
|
|
|
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
|
|
|
std::shared_ptr<ByteArraysPointer> pByteArrays = std::make_shared<ByteArraysPointer>();
|
|
|
pByteArrays.get()->byteArrays.swap(frames);
|
|
|
|
|
|
bool turnOffOtg = (photoInfo.usbCamera != 0);
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
mCamera = NULL;
|
|
|
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, photoInfo.cameraType);
|
|
|
m_threadClose.swap(closeThread);
|
|
|
if (closeThread.joinable())
|
|
|
{
|
|
|
closeThread.detach();
|
|
|
}
|
|
|
|
|
|
CPhoneDevice* pThis = this;
|
|
|
std::thread th([pThis, characteristics, results, photoInfo, osds, path, pByteArrays, ldr, duration, takingTime]()mutable
|
|
|
{
|
|
|
cv::Mat rgb;
|
|
|
std::string cameraInfo;
|
|
|
media_status_t mstatus;
|
|
|
|
|
|
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
|
|
|
if (status == ACAMERA_OK)
|
|
|
{
|
|
|
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
|
|
|
}
|
|
|
|
|
|
int sensorOrientation = 0;
|
|
|
{
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
|
|
|
if (status == ACAMERA_OK)
|
|
|
{
|
|
|
sensorOrientation = (int)e.data.i32[0];
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (photoInfo.outputDbgInfo != 0)
|
|
|
{
|
|
|
if (!results.empty())
|
|
|
{
|
|
|
NdkCamera::CAPTURE_RESULT captureResult = { 0 };
|
|
|
NdkCamera::EnumCameraResult(results[0].get(), captureResult);
|
|
|
|
|
|
char extimeunit[4] = { 0 };
|
|
|
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
|
|
|
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
|
|
|
char str[128] = { 0 };
|
|
|
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld BURST",
|
|
|
captureResult.autoExposure, captureResult.autoFocus,
|
|
|
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
|
|
|
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
|
|
|
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
|
|
|
captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
|
|
|
duration, captureResult.frameDuration);
|
|
|
cameraInfo = str;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
#if 0
|
|
|
bool shouldRetry = false;
|
|
|
if (ldr != ~0)
|
|
|
{
|
|
|
if (ldr < MIN_LIGHT_Y)
|
|
|
{
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
{
|
|
|
shouldRetry = true;
|
|
|
char presetBuf[16] = {0};
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
|
|
|
// photoInfo.usingRawFormat = 1;
|
|
|
}
|
|
|
}
|
|
|
else if (ldr > MAX_LIGHT_Y)
|
|
|
{
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
{
|
|
|
shouldRetry = true;
|
|
|
char presetBuf[16] = {0};
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
}
|
|
|
|
|
|
photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
|
|
|
}
|
|
|
}
|
|
|
#endif // 0
|
|
|
#endif // OUTPUT_DBG_INFO
|
|
|
|
|
|
// Notify to take next photo
|
|
|
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
|
|
|
|
|
|
#ifdef USING_EXEC_HDRP
|
|
|
if (photoInfo.usingNewHdrplus)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
std::vector<std::vector<uint8_t> > localFrames;
|
|
|
localFrames.swap(pByteArrays.get()->byteArrays);
|
|
|
#ifdef _DEBUG
|
|
|
std::vector<uint8_t>& firstFrame = localFrames[0];
|
|
|
writeFile("/sdcard/com.xypower.mpapp/tmp/dngs/1.dng", &firstFrame[0], firstFrame.size());
|
|
|
writeFile("/sdcard/com.xypower.mpapp/tmp/dngs/2.dng", &localFrames[1][0], localFrames[1].size());
|
|
|
writeFile("/sdcard/com.xypower.mpapp/tmp/dngs/3.dng", &localFrames[2][0], localFrames[2].size());
|
|
|
writeFile("/sdcard/com.xypower.mpapp/tmp/dngs/4.dng", &localFrames[3][0], localFrames[3].size());
|
|
|
|
|
|
// readFile("/sdcard/com.xypower.mpapp/tmp/dngs/001.dng", localFrames[0]);
|
|
|
// readFile("/sdcard/com.xypower.mpapp/tmp/dngs/002.dng", localFrames[1]);
|
|
|
// readFile("/sdcard/com.xypower.mpapp/tmp/dngs/003.dng", localFrames[2]);
|
|
|
// readFile("/sdcard/com.xypower.mpapp/tmp/dngs/004.dng", localFrames[3]);
|
|
|
|
|
|
#endif
|
|
|
doHdrPlus(localFrames, rgb);
|
|
|
cv::cvtColor(rgb.clone(), rgb, cv::COLOR_RGB2BGR);
|
|
|
|
|
|
localFrames.clear();
|
|
|
|
|
|
#ifdef _DEBUG
|
|
|
std::vector<int> params;
|
|
|
params.push_back(cv::IMWRITE_JPEG_QUALITY);
|
|
|
params.push_back(95);
|
|
|
cv::imwrite("/sdcard/com.xypower.mpapp/tmp/1.jpg", rgb, params);
|
|
|
#endif
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
|
|
|
{
|
|
|
cv::Mat tempPic = convert16bit2_8bit_(rgb);
|
|
|
rgb = tempPic;
|
|
|
}
|
|
|
|
|
|
if (photoInfo.orientation > 0)
|
|
|
{
|
|
|
if (photoInfo.orientation == 1)
|
|
|
{
|
|
|
if (facing == ACAMERA_LENS_FACING_FRONT)
|
|
|
{
|
|
|
cv::flip(rgb, rgb, 1);
|
|
|
}
|
|
|
}
|
|
|
else if (photoInfo.orientation == 2)
|
|
|
{
|
|
|
cv::Mat tempPic;
|
|
|
cv::transpose(rgb, tempPic);
|
|
|
cv::flip(tempPic, rgb, 1);
|
|
|
}
|
|
|
else if (photoInfo.orientation == 3)
|
|
|
{
|
|
|
if (facing == ACAMERA_LENS_FACING_FRONT)
|
|
|
{
|
|
|
flip(rgb, rgb, 0);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
cv::flip(rgb, rgb, -1);
|
|
|
}
|
|
|
}
|
|
|
else if ((photoInfo.orientation % 4) == 0)
|
|
|
{
|
|
|
cv::Mat tempPic;
|
|
|
cv::transpose(rgb, tempPic);
|
|
|
cv::flip(tempPic, rgb, 0);
|
|
|
}
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
}
|
|
|
// cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
uint64_t uniqueId = pThis->m_uniqueIdFeed.fetch_add(1);
|
|
|
|
|
|
std::string tmpDir = pThis->m_appPath + (APP_DIR_TMP DIR_SEP_STR) + std::to_string(uniqueId) + DIR_SEP_STR;
|
|
|
EnsureDirectoryPathExists(tmpDir);
|
|
|
|
|
|
std::vector<std::vector<uint8_t> > localFrames;
|
|
|
localFrames.swap(pByteArrays.get()->byteArrays);
|
|
|
|
|
|
if (photoInfo.customHdr)
|
|
|
{
|
|
|
std::vector<std::string> imagePaths;
|
|
|
std::vector<float> exposureTimes;
|
|
|
|
|
|
for (int idx = 0; idx < localFrames.size(); idx++)
|
|
|
{
|
|
|
ACameraMetadata_const_entry val = { 0 };
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(results[idx].get(), ACAMERA_SENSOR_EXPOSURE_TIME, &val);
|
|
|
int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1;
|
|
|
|
|
|
exposureTimes.push_back(exTime / 1000000000.0);
|
|
|
|
|
|
std::string imagePath = tmpDir + std::to_string(idx) + ".dng";
|
|
|
std::vector<uint8_t>& frame = localFrames[idx];
|
|
|
if (writeFile(imagePath, &frame[0], frame.size()))
|
|
|
{
|
|
|
std::vector<uint8_t> empty;
|
|
|
empty.swap(frame);
|
|
|
}
|
|
|
|
|
|
string pngPath = imagePath + ".png";
|
|
|
|
|
|
pThis->ConvertDngToPng(imagePath, pngPath);
|
|
|
imagePaths.push_back(pngPath);
|
|
|
|
|
|
#if 0
|
|
|
AImageDecoder* imageDecoder = NULL;
|
|
|
AImageDecoder_createFromBuffer(&frame[0], frame.size(), &imageDecoder);
|
|
|
|
|
|
const AImageDecoderHeaderInfo* info = AImageDecoder_getHeaderInfo(imageDecoder);
|
|
|
AndroidBitmapInfo bmpInfo = { 0 };
|
|
|
bmpInfo.flags = AImageDecoderHeaderInfo_getAlphaFlags(info);
|
|
|
bmpInfo.width = AImageDecoderHeaderInfo_getWidth(info);
|
|
|
bmpInfo.height = AImageDecoderHeaderInfo_getHeight(info);
|
|
|
bmpInfo.format = (AndroidBitmapFormat) AImageDecoderHeaderInfo_getAndroidBitmapFormat(info);
|
|
|
bmpInfo.stride = AImageDecoder_getMinimumStride(imageDecoder); // Image decoder does not
|
|
|
// use padding by default
|
|
|
int32_t fmt = ANDROID_BITMAP_FORMAT_RGBA_8888;
|
|
|
size_t stride = photoInfo.width * 4;
|
|
|
size_t size = stride * photoInfo.height;
|
|
|
|
|
|
int32_t dataSpace = AImageDecoderHeaderInfo_getDataSpace(info);
|
|
|
|
|
|
frame.resize(size);
|
|
|
|
|
|
int result = AImageDecoder_decodeImage(imageDecoder, (void *)(&frame[0]), bmpInfo.stride, size);
|
|
|
AImageDecoder_delete(imageDecoder);
|
|
|
|
|
|
if (result != ANDROID_IMAGE_DECODER_SUCCESS)
|
|
|
{
|
|
|
imagePath += ".png";
|
|
|
FILE* file = fopen(imagePath.c_str(), "wb");
|
|
|
AndroidBitmap_compress(&bmpInfo, dataSpace, &frame[0], ANDROID_BITMAP_COMPRESS_FORMAT_PNG, 100, file, AndroidBitmap_CompressWriteFile);
|
|
|
fclose(file);
|
|
|
std::vector<uint8_t> empty;
|
|
|
empty.swap(frame);
|
|
|
|
|
|
imagePaths.push_back(imagePath);
|
|
|
}
|
|
|
#endif
|
|
|
}
|
|
|
localFrames.clear();
|
|
|
|
|
|
makeHdr(exposureTimes, imagePaths, rgb);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
std::string outputPath = tmpDir + "output.bmp";
|
|
|
size_t numberOfFrames = localFrames.size();
|
|
|
std::vector<std::string> imagePaths;
|
|
|
for (int idx = 0; idx < localFrames.size(); idx++)
|
|
|
{
|
|
|
std::string imagePath = tmpDir + std::to_string(idx) + ".dng";
|
|
|
std::vector<uint8_t>& frame = localFrames[idx];
|
|
|
if (writeFile(imagePath, &frame[0], frame.size()))
|
|
|
{
|
|
|
imagePaths.push_back(imagePath);
|
|
|
}
|
|
|
}
|
|
|
localFrames.clear();
|
|
|
|
|
|
int exitCode = pThis->CallExecv(photoInfo.orientation, facing == ACAMERA_LENS_FACING_FRONT ? 1 : 0, outputPath, imagePaths);
|
|
|
for (auto it = imagePaths.cbegin(); it != imagePaths.cend(); ++it)
|
|
|
{
|
|
|
std::remove((*it).c_str());
|
|
|
}
|
|
|
|
|
|
if (existsFile(outputPath))
|
|
|
{
|
|
|
rgb = cv::imread(outputPath);
|
|
|
std::remove(outputPath.c_str());
|
|
|
}
|
|
|
|
|
|
std::error_code errCode;
|
|
|
fs::remove_all(fs::path(tmpDir), errCode);
|
|
|
}
|
|
|
|
|
|
}
|
|
|
#else // USING_EXEC_HDRP
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
hdrplus::hdrplus_pipeline pipeline;
|
|
|
std::vector<std::vector<uint8_t> > localFrames;
|
|
|
localFrames.swap(pByteArrays.get()->byteArrays);
|
|
|
pipeline.run_pipeline(localFrames, 0, rgb);
|
|
|
localFrames.clear();
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
|
|
|
{
|
|
|
cv::Mat tempPic = convert16bit2_8bit_(rgb);
|
|
|
rgb = tempPic;
|
|
|
}
|
|
|
|
|
|
if (photoInfo.orientation > 0)
|
|
|
{
|
|
|
if (photoInfo.orientation == 1)
|
|
|
{
|
|
|
if (facing == ACAMERA_LENS_FACING_FRONT)
|
|
|
{
|
|
|
cv::flip(rgb, rgb, 1);
|
|
|
}
|
|
|
}
|
|
|
else if (photoInfo.orientation == 2)
|
|
|
{
|
|
|
cv::Mat tempPic;
|
|
|
cv::transpose(rgb, tempPic);
|
|
|
cv::flip(tempPic, rgb, 1);
|
|
|
}
|
|
|
else if (photoInfo.orientation == 3)
|
|
|
{
|
|
|
if (facing == ACAMERA_LENS_FACING_FRONT)
|
|
|
{
|
|
|
flip(rgb, rgb, 0);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
cv::flip(rgb, rgb, -1);
|
|
|
}
|
|
|
}
|
|
|
else if ((photoInfo.orientation % 4) == 0)
|
|
|
{
|
|
|
cv::Mat tempPic;
|
|
|
cv::transpose(rgb, tempPic);
|
|
|
cv::flip(tempPic, rgb, 0);
|
|
|
}
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
}
|
|
|
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
|
|
|
#endif // USING_EXEC_HDRP
|
|
|
|
|
|
if (rgb.empty())
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Empty Mat object CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
pThis->TakePhotoCb(0, photoInfo, path, takingTime);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
|
|
|
if (res)
|
|
|
{
|
|
|
// TakePhotoCb(2, photoInfo, path, takingTime);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
th.detach();
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics,
|
|
|
std::vector<std::shared_ptr<ACameraMetadata> >& results,
|
|
|
uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames)
|
|
|
{
|
|
|
time_t takingTime = time(NULL);
|
|
|
if (mPhotoInfo.remedy != 0)
|
|
|
{
|
|
|
time_t scheduleTime = mPhotoInfo.scheduleTime;
|
|
|
if (scheduleTime == 0)
|
|
|
{
|
|
|
scheduleTime = mPhotoInfo.requestTime;
|
|
|
}
|
|
|
if ((takingTime - scheduleTime) > 30)
|
|
|
{
|
|
|
takingTime = scheduleTime + mPhotoInfo.channel * 2;
|
|
|
}
|
|
|
}
|
|
|
mPhotoInfo.photoTime = takingTime;
|
|
|
|
|
|
vector<IDevice::OSD_INFO> osds;
|
|
|
osds.swap(mOsds);
|
|
|
PHOTO_INFO photoInfo = mPhotoInfo;
|
|
|
std::string path;
|
|
|
path.swap(mPath);
|
|
|
|
|
|
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
|
|
|
|
|
|
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
|
|
|
if (status == ACAMERA_OK)
|
|
|
{
|
|
|
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
|
|
|
}
|
|
|
|
|
|
int sensorOrientation = 0;
|
|
|
{
|
|
|
ACameraMetadata_const_entry e = { 0 };
|
|
|
status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
|
|
|
if (status == ACAMERA_OK)
|
|
|
{
|
|
|
sensorOrientation = (int)e.data.i32[0];
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool turnOffOtg = (photoInfo.usbCamera != 0);
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
mCamera = NULL;
|
|
|
|
|
|
cv::Mat rgb;
|
|
|
media_status_t mstatus;
|
|
|
|
|
|
std::vector<std::shared_ptr<hdrplus::MemFile> > rawFiles;
|
|
|
|
|
|
if (photoInfo.usingRawFormat != 0)
|
|
|
{
|
|
|
for (int idx = 0; idx < frames.size(); idx++)
|
|
|
{
|
|
|
std::shared_ptr<AImage> spImage = frames[idx];
|
|
|
std::shared_ptr<ACameraMetadata> spResult = results[idx];
|
|
|
|
|
|
hdrplus::MemFile* rawImage = new hdrplus::MemFile();
|
|
|
rawFiles.push_back(std::shared_ptr<hdrplus::MemFile>(rawImage));
|
|
|
// rawImage->FromAImage(spImage.get(), characteristics.get(), spResult.get());
|
|
|
|
|
|
int32_t width = 0;
|
|
|
int32_t height = 0;
|
|
|
mstatus = AImage_getWidth(spImage.get(), &width);
|
|
|
mstatus = AImage_getHeight(spImage.get(), &height);
|
|
|
|
|
|
int32_t planeCount = 0;
|
|
|
mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
|
|
|
AASSERT(mstatus == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
|
|
|
|
|
|
uint8_t *planeData = NULL;
|
|
|
int planeDataLen = 0;
|
|
|
mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
|
|
|
ALOGD("Start Converting Dng");
|
|
|
DngCreator dngCreator(characteristics.get(), spResult.get());
|
|
|
dngCreator.writeInputBuffer(rawImage->content, planeData, planeDataLen, width, height, 0);
|
|
|
ALOGD("End Converting Dng");
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
if (results.size() == 1 && frames.size() == 1)
|
|
|
{
|
|
|
std::shared_ptr<ACameraMetadata> result = results[0];
|
|
|
std::shared_ptr<AImage> frame = frames[0];
|
|
|
|
|
|
int32_t format;
|
|
|
mstatus = AImage_getFormat(frame.get(), &format);
|
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888)
|
|
|
{
|
|
|
int32_t width;
|
|
|
int32_t height;
|
|
|
mstatus = AImage_getWidth(frame.get(), &width);
|
|
|
mstatus = AImage_getHeight(frame.get(), &height);
|
|
|
|
|
|
int32_t y_pixelStride = 0;
|
|
|
int32_t u_pixelStride = 0;
|
|
|
int32_t v_pixelStride = 0;
|
|
|
AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride);
|
|
|
AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride);
|
|
|
AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride);
|
|
|
|
|
|
int32_t y_rowStride = 0;
|
|
|
int32_t u_rowStride = 0;
|
|
|
int32_t v_rowStride = 0;
|
|
|
AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride);
|
|
|
AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride);
|
|
|
AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride);
|
|
|
|
|
|
uint8_t* y_data = 0;
|
|
|
uint8_t* u_data = 0;
|
|
|
uint8_t* v_data = 0;
|
|
|
int y_len = 0;
|
|
|
int u_len = 0;
|
|
|
int v_len = 0;
|
|
|
AImage_getPlaneData(frame.get(), 0, &y_data, &y_len);
|
|
|
AImage_getPlaneData(frame.get(), 1, &u_data, &u_len);
|
|
|
AImage_getPlaneData(frame.get(), 2, &v_data, &v_len);
|
|
|
|
|
|
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
|
|
|
{
|
|
|
// already nv21
|
|
|
ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
// construct nv21
|
|
|
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
|
|
|
{
|
|
|
// Y
|
|
|
uint8_t* yptr = nv21;
|
|
|
for (int y = 0; y < height; y++)
|
|
|
{
|
|
|
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
|
|
|
for (int x = 0; x < width; x++)
|
|
|
{
|
|
|
yptr[0] = y_data_ptr[0];
|
|
|
yptr++;
|
|
|
y_data_ptr += y_pixelStride;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
// UV
|
|
|
uint8_t* uvptr = nv21 + width * height;
|
|
|
for (int y = 0; y < height / 2; y++)
|
|
|
{
|
|
|
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
|
|
|
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
|
|
|
for (int x = 0; x < width / 2; x++)
|
|
|
{
|
|
|
uvptr[0] = v_data_ptr[0];
|
|
|
uvptr[1] = u_data_ptr[0];
|
|
|
uvptr += 2;
|
|
|
v_data_ptr += v_pixelStride;
|
|
|
u_data_ptr += u_pixelStride;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
|
|
|
|
|
|
delete[] nv21;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
frames.clear();
|
|
|
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, mPhotoInfo.cameraType);
|
|
|
m_threadClose.swap(closeThread);
|
|
|
if (closeThread.joinable())
|
|
|
{
|
|
|
closeThread.detach();
|
|
|
}
|
|
|
|
|
|
CPhoneDevice* pThis = this;
|
|
|
std::thread th([pThis, characteristics, results, photoInfo, osds, path, rgb, rawFiles, facing, sensorOrientation, ldr, duration, takingTime]()mutable
|
|
|
{
|
|
|
std::string cameraInfo;
|
|
|
if (photoInfo.outputDbgInfo != 0)
|
|
|
{
|
|
|
if (!results.empty())
|
|
|
{
|
|
|
NdkCamera::CAPTURE_RESULT captureResult = { 0 };
|
|
|
NdkCamera::EnumCameraResult(results[0].get(), captureResult);
|
|
|
|
|
|
char extimeunit[4] = { 0 };
|
|
|
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
|
|
|
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
|
|
|
char str[128] = { 0 };
|
|
|
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
|
|
|
captureResult.autoExposure, captureResult.autoFocus,
|
|
|
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
|
|
|
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
|
|
|
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
|
|
|
captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
|
|
|
duration, captureResult.frameDuration);
|
|
|
cameraInfo = str;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
#if 0
|
|
|
bool shouldRetry = false;
|
|
|
if (ldr != ~0)
|
|
|
{
|
|
|
if (ldr < MIN_LIGHT_Y)
|
|
|
{
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
{
|
|
|
shouldRetry = true;
|
|
|
char presetBuf[16] = {0};
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
|
|
|
// photoInfo.usingRawFormat = 1;
|
|
|
}
|
|
|
}
|
|
|
else if (ldr > MAX_LIGHT_Y)
|
|
|
{
|
|
|
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
{
|
|
|
shouldRetry = true;
|
|
|
char presetBuf[16] = {0};
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
}
|
|
|
|
|
|
photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
|
|
|
}
|
|
|
}
|
|
|
#endif // 0
|
|
|
#endif // OUTPUT_DBG_INFO
|
|
|
|
|
|
// Notify to take next photo
|
|
|
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
|
|
|
|
|
|
if (photoInfo.usingRawFormat != 0)
|
|
|
{
|
|
|
#ifndef USING_EXEC_HDRP
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
hdrplus::hdrplus_pipeline pipeline;
|
|
|
pipeline.run_pipeline(rawFiles, 0, rgb);
|
|
|
rawFiles.clear();
|
|
|
#endif
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
|
|
|
{
|
|
|
cv::Mat tempPic = convert16bit2_8bit_(rgb);
|
|
|
rgb = tempPic;
|
|
|
}
|
|
|
|
|
|
if (photoInfo.orientation > 0)
|
|
|
{
|
|
|
if (photoInfo.orientation == 1)
|
|
|
{
|
|
|
if (facing == ACAMERA_LENS_FACING_FRONT)
|
|
|
{
|
|
|
cv::flip(rgb, rgb, 1);
|
|
|
}
|
|
|
} else if (photoInfo.orientation == 2)
|
|
|
{
|
|
|
cv::Mat tempPic;
|
|
|
cv::transpose(rgb, tempPic);
|
|
|
cv::flip(tempPic, rgb, 1);
|
|
|
}
|
|
|
else if (photoInfo.orientation == 3)
|
|
|
{
|
|
|
if (facing == ACAMERA_LENS_FACING_FRONT)
|
|
|
{
|
|
|
flip(rgb, rgb, 0);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
cv::flip(rgb, rgb, -1);
|
|
|
}
|
|
|
}
|
|
|
else if (photoInfo.orientation == 4)
|
|
|
{
|
|
|
cv::Mat tempPic;
|
|
|
cv::transpose(rgb, tempPic);
|
|
|
cv::flip(tempPic, rgb, 0);
|
|
|
}
|
|
|
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
}
|
|
|
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
|
|
|
}
|
|
|
|
|
|
if (rgb.empty())
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Empty Mat object CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
|
|
|
pThis->TakePhotoCb(0, photoInfo, path, takingTime);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
|
|
|
if (res)
|
|
|
{
|
|
|
// TakePhotoCb(2, photoInfo, path, takingTime);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
|
|
th.detach();
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
int CPhoneDevice::CallExecv(int rotation, int frontCamera, const std::string& outputPath, const std::vector<std::string>& images)
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
|
|
|
std::string pathsWithSpace;
|
|
|
for (auto it = images.cbegin(); it != images.cend(); ++it)
|
|
|
{
|
|
|
pathsWithSpace.append(*it);
|
|
|
pathsWithSpace.append(" ");
|
|
|
}
|
|
|
pathsWithSpace.pop_back();
|
|
|
|
|
|
jstring joutputPath = env->NewStringUTF(outputPath.c_str());
|
|
|
jstring jpathWithSpace = env->NewStringUTF(pathsWithSpace.c_str());
|
|
|
jint exitCode = env->CallIntMethod(m_javaService, mExecHdrplusMid, rotation, frontCamera, joutputPath, jpathWithSpace);
|
|
|
env->DeleteLocalRef(jpathWithSpace);
|
|
|
env->DeleteLocalRef(joutputPath);
|
|
|
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
|
|
|
return exitCode;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::OnImageReady(cv::Mat mat)
|
|
|
{
|
|
|
time_t takingTime = time(NULL);
|
|
|
if (mPhotoInfo.remedy != 0)
|
|
|
{
|
|
|
time_t scheduleTime = mPhotoInfo.scheduleTime;
|
|
|
if (scheduleTime == 0)
|
|
|
{
|
|
|
scheduleTime = mPhotoInfo.requestTime;
|
|
|
}
|
|
|
if ((takingTime - scheduleTime) > 30)
|
|
|
{
|
|
|
takingTime = scheduleTime + mPhotoInfo.channel * 2;
|
|
|
}
|
|
|
}
|
|
|
mPhotoInfo.photoTime = takingTime;
|
|
|
int baseline = 0;
|
|
|
cv::Size textSize;
|
|
|
double height = mat.size().height;
|
|
|
double width = mat.size().width;
|
|
|
// double ratio = std::min(height / 1024, width / 1920);
|
|
|
double ratio = height / 1024.0;
|
|
|
int thickness = round(1.4 * ratio);
|
|
|
if (thickness < 1) thickness = 1;
|
|
|
else if (thickness > 5) thickness = 5;
|
|
|
cv::Scalar scalarWhite(255, 255, 255); // white
|
|
|
int fontSize = (int)(28.0 * ratio);
|
|
|
cv::Point pt;
|
|
|
|
|
|
std::string fontPath;
|
|
|
if (existsFile("/system/fonts/NotoSansCJK-Regular.ttc"))
|
|
|
{
|
|
|
fontPath = "/system/fonts/NotoSansCJK-Regular.ttc";
|
|
|
}
|
|
|
else if (existsFile("/system/fonts/NotoSerifCJK-Regular.ttc"))
|
|
|
{
|
|
|
fontPath = "/system/fonts/NotoSerifCJK-Regular.ttc";
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
fontPath = m_appPath+ "fonts/Noto.otf";
|
|
|
}
|
|
|
cv::Ptr<cv::ft::FreeType2> ft2;
|
|
|
ft2 = cv::ft::createFreeType2();
|
|
|
ft2->loadFontData(fontPath.c_str(), 0);
|
|
|
// cv::Rect rc(0, 0, mat.cols, mat.rows);
|
|
|
// cv::rectangle (mat, rc, cv::Scalar(255, 255, 255), cv::FILLED);
|
|
|
std::vector<IDevice::RECOG_OBJECT> objs;
|
|
|
|
|
|
if ((m_pRecognizationCfg != NULL) && (m_pRecognizationCfg->enabled != 0) && (mPhotoInfo.recognization != 0))
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Channel AI Enabled");
|
|
|
|
|
|
// visualize(ncnnPath.c_str(), in);
|
|
|
#ifdef _DEBUG
|
|
|
double startTime = ncnn::get_current_time();
|
|
|
#endif // _DEBUG
|
|
|
|
|
|
bool detected = YoloV5NcnnDetect(mat, true, m_pRecognizationCfg->blobName8, m_pRecognizationCfg->blobName16, m_pRecognizationCfg->blobName32, objs);
|
|
|
#ifdef _DEBUG
|
|
|
double elasped = ncnn::get_current_time() - startTime;
|
|
|
// __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "%.2fms detect", elasped);
|
|
|
#endif // _DEBUG
|
|
|
#ifdef _DEBUG
|
|
|
ALOGI( "NCNN recognization: %.2fms res=%d", elasped, ((detected && !objs.empty()) ? 1 : 0));
|
|
|
#endif
|
|
|
if (detected && !objs.empty())
|
|
|
{
|
|
|
#if 0
|
|
|
static const char* class_names[] = {
|
|
|
"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light",
|
|
|
"fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow",
|
|
|
"elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee",
|
|
|
"skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard",
|
|
|
"tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple",
|
|
|
"sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch",
|
|
|
"potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone",
|
|
|
"microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear",
|
|
|
"hair drier", "toothbrush"
|
|
|
};
|
|
|
#endif
|
|
|
|
|
|
cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16);
|
|
|
cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16);
|
|
|
float minSizeW = m_pRecognizationCfg->minSize > 0 ? (mPhotoInfo.width * m_pRecognizationCfg->minSize / 100) : 0;
|
|
|
float minSizeH = m_pRecognizationCfg->minSize > 0 ? (mPhotoInfo.height * m_pRecognizationCfg->minSize / 100) : 0;
|
|
|
|
|
|
for (std::vector<IDevice::RECOG_OBJECT>::const_iterator it = objs.cbegin(); it != objs.cend();)
|
|
|
{
|
|
|
if (it->label >= m_pRecognizationCfg->items.size())
|
|
|
{
|
|
|
it = objs.erase(it);
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
const IDevice::CFG_RECOGNIZATION::ITEM& item = m_pRecognizationCfg->items[it->label];
|
|
|
if (item.enabled == 0 || it->prob < item.prob)
|
|
|
{
|
|
|
it = objs.erase(it);
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
if (m_pRecognizationCfg->minSize > 0)
|
|
|
{
|
|
|
if (it->w < minSizeW || it->h < minSizeH)
|
|
|
{
|
|
|
it = objs.erase(it);
|
|
|
continue;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if ((mPhotoInfo.recognization & 0x2) != 0)
|
|
|
{
|
|
|
cv::Rect rc(it->x, it->y, it->w, it->h);
|
|
|
cv::rectangle(mat, rc, borderColor, m_pRecognizationCfg->thickness);
|
|
|
textSize = ft2->getTextSize(item.name, fontSize, thickness, &baseline);
|
|
|
textSize.height += baseline;
|
|
|
if (it->y > textSize.height)
|
|
|
{
|
|
|
pt.y = it->y - textSize.height - 4 - m_pRecognizationCfg->thickness;
|
|
|
}
|
|
|
else if (mat.rows - it->y - it->h > textSize.height)
|
|
|
{
|
|
|
pt.y = it->y + it->h + 4 + m_pRecognizationCfg->thickness;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
// Inner
|
|
|
pt.y = it->y + 4 + m_pRecognizationCfg->thickness;
|
|
|
}
|
|
|
if (mat.cols - it->x > textSize.width)
|
|
|
{
|
|
|
pt.x = it->x;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
pt.x = it->x + it->w - textSize.width;
|
|
|
}
|
|
|
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
char buf[128];
|
|
|
snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)",
|
|
|
it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height);
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, buf);
|
|
|
#endif
|
|
|
ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true);
|
|
|
}
|
|
|
++it;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled");
|
|
|
}
|
|
|
|
|
|
// #ifdef OUTPUT_DBG_INFO
|
|
|
|
|
|
if (mCamera != NULL)
|
|
|
{
|
|
|
|
|
|
if (mPhotoInfo.outputDbgInfo != 0)
|
|
|
{
|
|
|
cv::Scalar scalarRed(0, 0, 255); // red
|
|
|
|
|
|
char extimeunit[4] = { 0 };
|
|
|
char str[128] = { 0 };
|
|
|
|
|
|
int fs = fontSize * 2 / 3;
|
|
|
textSize = ft2->getTextSize(str, fs, -1, &baseline);
|
|
|
cv::Point lt(0, mat.rows - fs - 20 * ratio);
|
|
|
cv::Point lt2(0, lt.y - 2 * ratio);
|
|
|
cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio);
|
|
|
|
|
|
if (rb.x > (int)width - 1)
|
|
|
{
|
|
|
rb.x = (int)width - 1;
|
|
|
}
|
|
|
if (rb.y > (int)height - 1)
|
|
|
{
|
|
|
rb.y = (int)height - 1;
|
|
|
}
|
|
|
cv::Mat roi = mat(cv::Rect(lt2, rb));
|
|
|
cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite);
|
|
|
double alpha = 0.5;
|
|
|
cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi);
|
|
|
|
|
|
// cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1);
|
|
|
ft2->putText(mat, str, lt, fs, scalarRed, -1, cv::LINE_AA, false);
|
|
|
// DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1);
|
|
|
}
|
|
|
}
|
|
|
// #endif // OUTPUT_DBG_INFO
|
|
|
|
|
|
for (vector<OSD_INFO>::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it)
|
|
|
{
|
|
|
if (it->text.empty())
|
|
|
{
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
#ifdef _DEBUG
|
|
|
if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT)
|
|
|
{
|
|
|
int aa = 0;
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline);
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline);
|
|
|
|
|
|
if (it->alignment == OSD_ALIGNMENT_TOP_LEFT)
|
|
|
{
|
|
|
pt.x = it->x * ratio;
|
|
|
pt.y = it->y * ratio;
|
|
|
}
|
|
|
else if (it->alignment == OSD_ALIGNMENT_TOP_RIGHT)
|
|
|
{
|
|
|
pt.x = width - textSize.width - it->x * ratio;
|
|
|
pt.y= it->y * ratio;
|
|
|
}
|
|
|
else if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT)
|
|
|
{
|
|
|
pt.x = width - textSize.width - it->x * ratio;
|
|
|
pt.y = height - it->y * ratio - textSize.height - baseline;
|
|
|
}
|
|
|
else if (it->alignment == OSD_ALIGNMENT_BOTTOM_LEFT)
|
|
|
{
|
|
|
pt.x = it->x * ratio;
|
|
|
pt.y = height - it->y * ratio - textSize.height - baseline;
|
|
|
}
|
|
|
|
|
|
// cv::Rect rc(pt.x, pt.y, textSize.width, textSize.height);
|
|
|
// cv::rectangle(mat, rc, cv::Scalar(0,255,255), 2);
|
|
|
DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness);
|
|
|
}
|
|
|
|
|
|
std::vector<int> params;
|
|
|
params.push_back(cv::IMWRITE_JPEG_QUALITY);
|
|
|
params.push_back((int)((uint32_t)mPhotoInfo.quality));
|
|
|
|
|
|
bool res = false;
|
|
|
std::string fullPath = endsWith(mPath, ".jpg") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo));
|
|
|
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
|
|
|
bool shouldRetry = false;
|
|
|
#if 0
|
|
|
if (mCamera != NULL) {
|
|
|
NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
|
|
|
|
|
|
if (captureResult.avgY < MIN_LIGHT_Y)
|
|
|
{
|
|
|
if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
{
|
|
|
shouldRetry = true;
|
|
|
char presetBuf[16] = {0};
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", mPhotoInfo.retries);
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg");
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
|
|
|
// mPhotoInfo.usingRawFormat = 1;
|
|
|
}
|
|
|
}
|
|
|
else if (captureResult.avgY > MAX_LIGHT_Y)
|
|
|
{
|
|
|
if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
|
|
|
{
|
|
|
shouldRetry = true;
|
|
|
char presetBuf[16] = {0};
|
|
|
snprintf(presetBuf, sizeof(presetBuf), "%02X", mPhotoInfo.retries);
|
|
|
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg");
|
|
|
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
|
|
|
(uint32_t) captureResult.avgY);
|
|
|
}
|
|
|
|
|
|
mPhotoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
|
|
|
}
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
#endif // OUTPUT_DBG_INFO
|
|
|
bool imgExisted = std::filesystem::exists(std::filesystem::path(fullPath));
|
|
|
if (imgExisted)
|
|
|
{
|
|
|
size_t imgFileSize = getFileSize(fullPath);
|
|
|
if (imgFileSize == 0 || imgFileSize == (size_t)-1)
|
|
|
{
|
|
|
imgExisted = false;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (!imgExisted)
|
|
|
{
|
|
|
bool res = cv::imwrite(fullPath.c_str(), mat, params);
|
|
|
if (!res)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to write photo: %s", fullPath.c_str());
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to write photo: %s", fullPath.c_str());
|
|
|
}
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
if (shouldRetry)
|
|
|
{
|
|
|
TakePhotoCb(0, mPhotoInfo, fullPath, takingTime, objs);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs);
|
|
|
}
|
|
|
#else
|
|
|
TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs);
|
|
|
#endif
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
ALOGI("Photo file exists: %s", mPath.c_str());
|
|
|
}
|
|
|
|
|
|
return res;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat)
|
|
|
{
|
|
|
int baseline = 0;
|
|
|
cv::Size textSize;
|
|
|
double height = mat.rows;
|
|
|
double width = mat.cols;
|
|
|
// double ratio = std::min(height / 1024, width / 1920);
|
|
|
double ratio = height / 1024.0;
|
|
|
int thickness = round(1.4 * ratio);
|
|
|
if (thickness < 1) thickness = 1;
|
|
|
else if (thickness > 5) thickness = 5;
|
|
|
cv::Scalar scalarWhite(255, 255, 255); // white
|
|
|
int fontSize = (int)(28.0 * ratio);
|
|
|
cv::Point pt;
|
|
|
|
|
|
std::string fontPath;
|
|
|
if (existsFile("/system/fonts/NotoSansCJK-Regular.ttc"))
|
|
|
{
|
|
|
fontPath = "/system/fonts/NotoSansCJK-Regular.ttc";
|
|
|
}
|
|
|
else if (existsFile("/system/fonts/NotoSerifCJK-Regular.ttc"))
|
|
|
{
|
|
|
fontPath = "/system/fonts/NotoSerifCJK-Regular.ttc";
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
fontPath = m_appPath+ "fonts/Noto.otf";
|
|
|
}
|
|
|
cv::Ptr<cv::ft::FreeType2> ft2;
|
|
|
ft2 = cv::ft::createFreeType2();
|
|
|
ft2->loadFontData(fontPath.c_str(), 0);
|
|
|
// cv::Rect rc(0, 0, mat.cols, mat.rows);
|
|
|
// cv::rectangle (mat, rc, cv::Scalar(255, 255, 255), cv::FILLED);
|
|
|
std::vector<IDevice::RECOG_OBJECT> objs;
|
|
|
|
|
|
if ((m_pRecognizationCfg != NULL) && (m_pRecognizationCfg->enabled != 0) && (photoInfo.recognization != 0))
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Channel AI Enabled");
|
|
|
|
|
|
// visualize(ncnnPath.c_str(), in);
|
|
|
#ifdef _DEBUG
|
|
|
double startTime = ncnn::get_current_time();
|
|
|
#endif // _DEBUG
|
|
|
|
|
|
bool detected = YoloV5NcnnDetect(mat, true, m_pRecognizationCfg->blobName8, m_pRecognizationCfg->blobName16, m_pRecognizationCfg->blobName32, objs);
|
|
|
#ifdef _DEBUG
|
|
|
double elasped = ncnn::get_current_time() - startTime;
|
|
|
// __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "%.2fms detect", elasped);
|
|
|
#endif // _DEBUG
|
|
|
#ifdef _DEBUG
|
|
|
ALOGI( "NCNN recognization: %.2fms res=%d", elasped, ((detected && !objs.empty()) ? 1 : 0));
|
|
|
#endif
|
|
|
if (detected && !objs.empty())
|
|
|
{
|
|
|
cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16);
|
|
|
cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16);
|
|
|
float minSizeW = m_pRecognizationCfg->minSize > 0 ? (photoInfo.width * m_pRecognizationCfg->minSize / 100) : 0;
|
|
|
float minSizeH = m_pRecognizationCfg->minSize > 0 ? (photoInfo.height * m_pRecognizationCfg->minSize / 100) : 0;
|
|
|
|
|
|
for (std::vector<IDevice::RECOG_OBJECT>::const_iterator it = objs.cbegin(); it != objs.cend();)
|
|
|
{
|
|
|
if (it->label >= m_pRecognizationCfg->items.size())
|
|
|
{
|
|
|
it = objs.erase(it);
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
const IDevice::CFG_RECOGNIZATION::ITEM& item = m_pRecognizationCfg->items[it->label];
|
|
|
if (item.enabled == 0 || it->prob < item.prob)
|
|
|
{
|
|
|
it = objs.erase(it);
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
if (m_pRecognizationCfg->minSize > 0)
|
|
|
{
|
|
|
if (it->w < minSizeW || it->h < minSizeH)
|
|
|
{
|
|
|
it = objs.erase(it);
|
|
|
continue;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if ((photoInfo.recognization & 0x2) != 0)
|
|
|
{
|
|
|
cv::Rect rc(it->x, it->y, it->w, it->h);
|
|
|
cv::rectangle(mat, rc, borderColor, m_pRecognizationCfg->thickness);
|
|
|
textSize = ft2->getTextSize(item.name, fontSize, thickness, &baseline);
|
|
|
textSize.height += baseline;
|
|
|
if (it->y > textSize.height)
|
|
|
{
|
|
|
pt.y = it->y - textSize.height - 4 - m_pRecognizationCfg->thickness;
|
|
|
}
|
|
|
else if (mat.rows - it->y - it->h > textSize.height)
|
|
|
{
|
|
|
pt.y = it->y + it->h + 4 + m_pRecognizationCfg->thickness;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
// Inner
|
|
|
pt.y = it->y + 4 + m_pRecognizationCfg->thickness;
|
|
|
}
|
|
|
if (mat.cols - it->x > textSize.width)
|
|
|
{
|
|
|
pt.x = it->x;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
pt.x = it->x + it->w - textSize.width;
|
|
|
}
|
|
|
|
|
|
#ifdef OUTPUT_DBG_INFO
|
|
|
char buf[128];
|
|
|
snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)",
|
|
|
it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height);
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, buf);
|
|
|
#endif
|
|
|
ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true);
|
|
|
}
|
|
|
++it;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled");
|
|
|
}
|
|
|
|
|
|
// #ifdef OUTPUT_DBG_INFO
|
|
|
|
|
|
if (!cameraInfo.empty())
|
|
|
{
|
|
|
// NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
|
|
|
|
|
|
if (photoInfo.outputDbgInfo != 0)
|
|
|
{
|
|
|
cv::Scalar scalarRed(0, 0, 255); // red
|
|
|
|
|
|
int fs = fontSize * 2 / 3;
|
|
|
textSize = ft2->getTextSize(cameraInfo, fs, -1, &baseline);
|
|
|
cv::Point lt(0, mat.rows - fs - 20 * ratio);
|
|
|
cv::Point lt2(0, lt.y - 2 * ratio);
|
|
|
cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio);
|
|
|
cv::Point rt(0 + textSize.width + 2 * ratio, mat.rows - fs - 20 * ratio);
|
|
|
|
|
|
|
|
|
if (rb.x > (int)width - 1)
|
|
|
{
|
|
|
rb.x = (int)width - 1;
|
|
|
}
|
|
|
if (rb.y > (int)height - 1)
|
|
|
{
|
|
|
rb.y = (int)height - 1;
|
|
|
}
|
|
|
cv::Mat roi = mat(cv::Rect(lt2, rb));
|
|
|
cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite);
|
|
|
double alpha = 0.5;
|
|
|
cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi);
|
|
|
|
|
|
// cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1);
|
|
|
ft2->putText(mat, cameraInfo, lt, fs, scalarRed, -1, cv::LINE_AA, false);
|
|
|
|
|
|
// DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1);
|
|
|
}
|
|
|
}
|
|
|
// #endif // OUTPUT_DBG_INFO
|
|
|
|
|
|
for (vector<OSD_INFO>::const_iterator it = osds.cbegin(); it != osds.cend(); ++it)
|
|
|
{
|
|
|
if (it->text.empty())
|
|
|
{
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
#ifdef _DEBUG
|
|
|
if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT)
|
|
|
{
|
|
|
int aa = 0;
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline);
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline);
|
|
|
|
|
|
if (it->alignment == OSD_ALIGNMENT_TOP_LEFT)
|
|
|
{
|
|
|
pt.x = it->x * ratio;
|
|
|
pt.y = it->y * ratio;
|
|
|
}
|
|
|
else if (it->alignment == OSD_ALIGNMENT_TOP_RIGHT)
|
|
|
{
|
|
|
std::size_t newlinePos = it->text.find('\n');
|
|
|
if (newlinePos != std::string::npos)
|
|
|
{
|
|
|
std::string textBeforeNewline = it->text.substr(0, newlinePos);
|
|
|
std::string textAfterNewline = it->text.substr(newlinePos + 1);
|
|
|
if(textBeforeNewline.length()>=textAfterNewline.length())
|
|
|
textSize = ft2->getTextSize(textBeforeNewline, fontSize, thickness, &baseline);
|
|
|
else
|
|
|
textSize = ft2->getTextSize(textAfterNewline, fontSize, thickness, &baseline);
|
|
|
}
|
|
|
pt.x = width - textSize.width - it->x * ratio;
|
|
|
pt.y= it->y * ratio;
|
|
|
}
|
|
|
else if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT)
|
|
|
{
|
|
|
pt.x = width - textSize.width - it->x * ratio;
|
|
|
pt.y = height - it->y * ratio - textSize.height - baseline;
|
|
|
}
|
|
|
else if (it->alignment == OSD_ALIGNMENT_BOTTOM_LEFT)
|
|
|
{
|
|
|
pt.x = it->x * ratio;
|
|
|
pt.y = height - it->y * ratio - textSize.height - baseline;
|
|
|
}
|
|
|
|
|
|
// cv::Rect rc(pt.x, pt.y, textSize.width, textSize.height);
|
|
|
// cv::rectangle(mat, rc, cv::Scalar(0,255,255), 2);
|
|
|
DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness);
|
|
|
}
|
|
|
|
|
|
std::vector<int> params;
|
|
|
params.push_back(cv::IMWRITE_JPEG_QUALITY);
|
|
|
params.push_back((int)((uint32_t)photoInfo.quality));
|
|
|
|
|
|
bool res = false;
|
|
|
std::string fullPath = endsWith(path, ".jpg") ? path : (path + CTerminal::BuildPhotoFileName(photoInfo));
|
|
|
|
|
|
bool imgExisted = std::filesystem::exists(std::filesystem::path(fullPath));
|
|
|
if (imgExisted)
|
|
|
{
|
|
|
size_t imgFileSize = getFileSize(fullPath);
|
|
|
if (imgFileSize == 0 || imgFileSize == (size_t)-1)
|
|
|
{
|
|
|
imgExisted = false;
|
|
|
}
|
|
|
}
|
|
|
if (!imgExisted)
|
|
|
{
|
|
|
#ifdef _DEBUG
|
|
|
char log[256] = { 0 };
|
|
|
strcpy(log, fullPath.c_str());
|
|
|
#endif
|
|
|
std::string tmpPath = fullPath;
|
|
|
replaceAll(tmpPath, "/photos/", "/tmp/");
|
|
|
|
|
|
#if 1
|
|
|
// Save photo to temporary file and then rename to right path
|
|
|
std::vector<uint8_t> imgContents;
|
|
|
size_t imgFileSize = 0;
|
|
|
bool res = cv::imencode(".jpg", mat, imgContents, params);
|
|
|
if (res)
|
|
|
{
|
|
|
int errcode = 0;
|
|
|
res = writeFile(tmpPath.c_str(), &imgContents[0], imgContents.size(), errcode);
|
|
|
if (res)
|
|
|
{
|
|
|
if (existsFile(tmpPath))
|
|
|
{
|
|
|
imgFileSize = getFileSize(tmpPath);
|
|
|
if (imgFileSize == 0 || imgFileSize == -1)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Empty File Written: %s errno=%d", tmpPath.c_str() + m_appPath.size(), errcode);
|
|
|
remove(tmpPath.c_str());
|
|
|
res = false;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s errno=%d", tmpPath.c_str() + m_appPath.size(), errcode);
|
|
|
if (existsFile(tmpPath))
|
|
|
{
|
|
|
remove(tmpPath.c_str());
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
int errcode = errno;
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Encode Image CH=%u PR=%u IMGID=%u, errno=%d", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.preset, (uint32_t)photoInfo.photoId, errcode);
|
|
|
}
|
|
|
// bool res = cv::imwrite(tmpPath.c_str(), mat, params);
|
|
|
if (res/* && imgFileSize > 0*/)
|
|
|
{
|
|
|
res = (rename(tmpPath.c_str(), fullPath.c_str()) == 0);
|
|
|
if (res)
|
|
|
{
|
|
|
imgFileSize = getFileSize(fullPath);
|
|
|
if (imgFileSize == 0 || imgFileSize == -1)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Empty File after rename %s", fullPath.c_str() + m_appPath.size());
|
|
|
res = false;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
}
|
|
|
#else
|
|
|
bool res = cv::imwrite(fullPath.c_str(), mat, params);
|
|
|
size_t imgFileSize = getFileSize(fullPath);
|
|
|
#endif
|
|
|
if (!res)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s", fullPath.c_str() + m_appPath.size());
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Write File: %s, FileSize=%u", fullPath.c_str() + m_appPath.size(), (uint32_t)imgFileSize);
|
|
|
}
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
|
|
|
TakePhotoCb(res ? 2 : 0, photoInfo, fullPath, photoInfo.photoTime, objs);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Photo File Exists: %s", fullPath.c_str() + m_appPath.size());
|
|
|
}
|
|
|
|
|
|
return res;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::OnCaptureReady(bool photoOrVideo, bool result, cv::Mat mat, unsigned int photoId)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "RAW Capture finished: %u RES=%d", photoId, (result ? 1 : 0));
|
|
|
if (photoOrVideo)
|
|
|
{
|
|
|
if (result)
|
|
|
{
|
|
|
OnImageReady(mat);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
std::vector<IDevice::RECOG_OBJECT> objs;
|
|
|
TakePhotoCb(0, mPhotoInfo, "", time(NULL), objs);
|
|
|
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
mCamera = NULL;
|
|
|
|
|
|
bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, mPhotoInfo.cameraType);
|
|
|
m_threadClose.swap(closeThread);
|
|
|
if (closeThread.joinable())
|
|
|
{
|
|
|
closeThread.detach();
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId)
|
|
|
{
|
|
|
if (photoOrVideo)
|
|
|
{
|
|
|
mPhotoInfo.photoTime = time(NULL);
|
|
|
CPhoneCamera* pCamera = NULL;
|
|
|
|
|
|
std::vector<IDevice::RECOG_OBJECT> objs;
|
|
|
std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo);
|
|
|
if (result)
|
|
|
{
|
|
|
std::rename(path, fullPath.c_str());
|
|
|
}
|
|
|
TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs);
|
|
|
|
|
|
bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, mPhotoInfo.cameraType);
|
|
|
m_threadClose.swap(closeThread);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
mPhotoInfo.photoTime = time(NULL);
|
|
|
CPhoneCamera* pCamera = NULL;
|
|
|
|
|
|
std::vector<IDevice::RECOG_OBJECT> objs;
|
|
|
std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo);
|
|
|
if (result)
|
|
|
{
|
|
|
std::rename(path, fullPath.c_str());
|
|
|
}
|
|
|
TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs);
|
|
|
|
|
|
bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, mPhotoInfo.cameraType);
|
|
|
m_threadClose.swap(closeThread);
|
|
|
}
|
|
|
|
|
|
return result;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::onError(const std::string& msg)
|
|
|
{
|
|
|
if (mCamera == NULL)
|
|
|
{
|
|
|
int aa = 0;
|
|
|
return;
|
|
|
}
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Take Photo (IMGID=%u): %s", mPhotoInfo.photoId, msg.c_str());
|
|
|
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
mCamera = NULL;
|
|
|
|
|
|
TakePhotoCb(0, mPhotoInfo, mPath, 0);
|
|
|
|
|
|
bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, mPhotoInfo.cameraType);
|
|
|
// closeThread.detach();
|
|
|
m_threadClose.swap(closeThread);
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::onDisconnected(ACameraDevice* device)
|
|
|
{
|
|
|
if (mCamera == NULL)
|
|
|
{
|
|
|
return;
|
|
|
}
|
|
|
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Take Photo (IMGID=%u) as for Disconnection", mPhotoInfo.photoId);
|
|
|
|
|
|
CPhoneCamera* pCamera = mCamera;
|
|
|
mCamera = NULL;
|
|
|
|
|
|
TakePhotoCb(0, mPhotoInfo, mPath, 0);
|
|
|
|
|
|
bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
|
|
|
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, mPhotoInfo.cameraType);
|
|
|
// closeThread.detach();
|
|
|
m_threadClose.swap(closeThread);
|
|
|
}
|
|
|
|
|
|
std::string CPhoneDevice::GetFileName() const
|
|
|
{
|
|
|
return mPath;
|
|
|
}
|
|
|
|
|
|
std::string CPhoneDevice::GetVersion() const
|
|
|
{
|
|
|
// FOR OSD
|
|
|
string version = std::to_string(mVersionCode / 100000);
|
|
|
version += ".";
|
|
|
version += std::to_string((mVersionCode % 100000) / 1000);
|
|
|
version += ".";
|
|
|
version += std::to_string(mVersionCode % 1000);
|
|
|
|
|
|
return version;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::UpdatePosition(double lon, double lat, double radius, time_t ts)
|
|
|
{
|
|
|
if (m_listener != NULL)
|
|
|
{
|
|
|
if (shouldConvertPosition(lat, lon))
|
|
|
{
|
|
|
transformPosition(lat, lon);
|
|
|
}
|
|
|
return m_listener->OnPositionDataArrived(lon, lat, radius, ts);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::UpdateSignalLevel(int signalLevel)
|
|
|
{
|
|
|
m_signalLevel = signalLevel;
|
|
|
m_signalLevelUpdateTime = time(NULL);
|
|
|
XYLOG(XYLOG_SEVERITY_DEBUG, "Signal Level Updated: %d", signalLevel);
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::UpdateSimcard(const std::string& simcard)
|
|
|
{
|
|
|
m_simcard = simcard;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::UpdateNetwork(net_handle_t nethandle, bool available, bool defaultOrEthernet, bool& changed)
|
|
|
{
|
|
|
if (defaultOrEthernet)
|
|
|
{
|
|
|
net_handle_t oldHandle = NETWORK_UNSPECIFIED;
|
|
|
m_devLocker.lock();
|
|
|
oldHandle = m_defNetHandle;
|
|
|
m_defNetHandle = available ? nethandle : NETWORK_UNSPECIFIED;
|
|
|
m_devLocker.unlock();
|
|
|
changed = (oldHandle != nethandle);
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "Active Network Handle: %lld", available ? (uint64_t)nethandle : 0);
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
net_handle_t oldHandle = NETWORK_UNSPECIFIED;
|
|
|
m_devLocker.lock();
|
|
|
oldHandle = m_ethnetHandle;
|
|
|
m_ethnetHandle = available ? nethandle : NETWORK_UNSPECIFIED;
|
|
|
m_devLocker.unlock();
|
|
|
changed = (oldHandle != nethandle);
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "Ethernet Handle: %lld", available ? (uint64_t)nethandle : 0);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
net_handle_t CPhoneDevice::GetEthnetHandle() const
|
|
|
{
|
|
|
net_handle_t nethandle = NETWORK_UNSPECIFIED;
|
|
|
m_devLocker.lock();
|
|
|
nethandle = m_ethnetHandle;
|
|
|
m_devLocker.unlock();
|
|
|
return nethandle;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway)
|
|
|
{
|
|
|
|
|
|
JNIEnv* env = NULL;
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
|
|
|
jstring jiface = env->NewStringUTF(iface.c_str());
|
|
|
#ifdef USING_N938
|
|
|
jstring jip = env->NewStringUTF("0.0.0.0");
|
|
|
#else
|
|
|
jstring jip = env->NewStringUTF(ip.c_str());
|
|
|
#endif
|
|
|
jstring jnetmask = env->NewStringUTF(netmask.c_str());
|
|
|
jstring jgw = env->NewStringUTF(gateway.c_str());
|
|
|
env->CallVoidMethod(m_javaService, mSetStaticIpMid, jiface, jip, jnetmask, jgw);
|
|
|
env->DeleteLocalRef(jgw);
|
|
|
env->DeleteLocalRef(jnetmask);
|
|
|
env->DeleteLocalRef(jip);
|
|
|
env->DeleteLocalRef(jiface);
|
|
|
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::ConvertDngToPng(const std::string& dngPath, const std::string& pngPath)
|
|
|
{
|
|
|
JNIEnv* env = NULL;
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
bool didAttachThread = false;
|
|
|
bool res = GetJniEnv(m_vm, &env, didAttachThread);
|
|
|
if (!res)
|
|
|
{
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
}
|
|
|
|
|
|
jstring jdngPath = env->NewStringUTF(dngPath.c_str());
|
|
|
jstring jpngPath = env->NewStringUTF(pngPath.c_str());
|
|
|
env->CallVoidMethod(m_javaService, mConvertDngToPngMid, jdngPath, jpngPath);
|
|
|
env->DeleteLocalRef(jdngPath);
|
|
|
env->DeleteLocalRef(jpngPath);
|
|
|
|
|
|
if (didAttachThread)
|
|
|
{
|
|
|
m_vm->DetachCurrentThread();
|
|
|
}
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::CameraCtrl(unsigned short waitTime, unsigned short delayTime, unsigned char channel, int cmdidx, unsigned char preset, const char *serfile, unsigned int baud, int addr)
|
|
|
{
|
|
|
if(GpioControl::GetSelftestStatus(waitTime) && GpioControl::GetCamerastatus()) {
|
|
|
CPhoneDevice *pThis = this;
|
|
|
string serfileStr(serfile);
|
|
|
std::thread ctrlThread([pThis, waitTime, delayTime, cmdidx, channel, preset, serfileStr, baud,
|
|
|
addr]() mutable {
|
|
|
uint64_t wid = pThis->RequestWakelock(0);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"CameraCtrl Command= %d, preset = %u", cmdidx, preset);
|
|
|
pThis->OpenPTZSensors(waitTime);
|
|
|
CameraPhotoCmd(time(NULL), channel, cmdidx, 0, preset, serfileStr.c_str(), baud,
|
|
|
addr);
|
|
|
pThis->ClosePTZSensors(delayTime);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"CameraCtrl over");
|
|
|
pThis->ReleaseWakelock(wid);
|
|
|
});
|
|
|
ctrlThread.detach();
|
|
|
}
|
|
|
}
|
|
|
|
|
|
int CPhoneDevice::GetSerialPhoto(int devno, D_IMAGE_DEF *photo)
|
|
|
{
|
|
|
return GetImage(devno, (IMAGE_DEF*)photo);
|
|
|
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::InitSerialComm(D_SENSOR_PARAM *sensorParam, char *filedir,const char *logpath)
|
|
|
{
|
|
|
Gm_InitSerialComm((SENSOR_PARAM *)sensorParam, filedir, logpath);
|
|
|
}
|
|
|
|
|
|
int CPhoneDevice::GetIceData(IDevice::ICE_INFO *iceInfo, IDevice::ICE_TAIL *iceTail, D_SENSOR_PARAM *sensorParam)
|
|
|
{
|
|
|
m_tempData.instantaneous_windspeed = 0xff;
|
|
|
m_tempData.air_temperature = 0xff;
|
|
|
m_tempData.instantaneous_winddirection = 0xff;
|
|
|
m_tempData.humidity = 0xff;
|
|
|
|
|
|
std::unique_lock<std::mutex> lock(m_collectDataLocker);
|
|
|
if (!m_collecting.load()) {
|
|
|
m_collecting.store(true);
|
|
|
m_CollectDatacv.notify_all();
|
|
|
lock.unlock();
|
|
|
Collect_sensor_data(); //15s
|
|
|
lock.lock();
|
|
|
m_collecting.store(false);
|
|
|
m_CollectDatacv.notify_all();
|
|
|
} else {
|
|
|
m_CollectDatacv.wait(lock, [this]{ return !m_collecting.load(); });
|
|
|
// m_collecting.store(false);
|
|
|
}
|
|
|
|
|
|
Data_DEF airt;
|
|
|
//等值覆冰厚度, 综合悬挂载荷, 不均衡张力差 置0
|
|
|
iceInfo->equal_icethickness = 0xff;
|
|
|
iceInfo->tension = 0xff;
|
|
|
iceInfo->tension_difference = 0xff;
|
|
|
bool status = 1;
|
|
|
|
|
|
int pullno = 0;
|
|
|
int angleno = 0;
|
|
|
for(int num = 0; num < MAX_SERIAL_DEV_NUM; num++)
|
|
|
{
|
|
|
if(sensorParam[num].SensorsType == RALLY_PROTOCOL)
|
|
|
{
|
|
|
if(sensorParam[num].IsNoInsta == 0)
|
|
|
{
|
|
|
iceInfo->t_sensor_data[pullno].original_tension = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,拉力传感器未启用",
|
|
|
sensorParam[num].devaddr);
|
|
|
}else
|
|
|
{
|
|
|
GetPullValue(num, &airt);
|
|
|
if(airt.AiState == 255 || airt.AiState == -1)
|
|
|
{
|
|
|
iceInfo->t_sensor_data[pullno].original_tension = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,采样状态 = %d,拉力未接传感器",
|
|
|
sensorParam[num].devaddr, airt.AiState);
|
|
|
}else
|
|
|
{
|
|
|
iceInfo->t_sensor_data[pullno].original_tension = airt.EuValue;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"地址%d,采样状态 = %d,拉力 = %f", sensorParam[num].devaddr, airt.AiState, iceInfo->t_sensor_data[pullno].original_tension);
|
|
|
if(airt.AiState != 2 && iceInfo->t_sensor_data[pullno].original_tension == 0)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,"地址%d,采样状态 = %d,拉力未采集到数据,重新采样", sensorParam[num].devaddr, airt.AiState);
|
|
|
status = 0;
|
|
|
}else
|
|
|
{
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
iceInfo->t_sensor_data[pullno].original_tension = iceInfo->t_sensor_data[pullno].original_tension * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
iceInfo->t_sensor_data[pullno].original_tension = iceInfo->t_sensor_data[pullno].original_tension + sensorParam[num].offset;
|
|
|
if(iceInfo->tension == 0xff)
|
|
|
iceInfo->tension =0;
|
|
|
iceInfo->tension = iceInfo->tension + iceInfo->t_sensor_data[pullno].original_tension;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
pullno++;
|
|
|
} else if(sensorParam[num].SensorsType == SLANT_PROTOCOL)
|
|
|
{
|
|
|
if(sensorParam[num].IsNoInsta == 0)
|
|
|
{
|
|
|
iceInfo->t_sensor_data[angleno].deflection_angle = 0xff;
|
|
|
iceInfo->t_sensor_data[angleno].windage_yaw_angle = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,倾角传感器未启用",
|
|
|
sensorParam[num].devaddr);
|
|
|
}else
|
|
|
{
|
|
|
GetAngleValue(num, &airt, 0);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1) {
|
|
|
iceInfo->t_sensor_data[angleno].deflection_angle = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,采样状态 = %d,倾角x未接传感器",
|
|
|
sensorParam[num].devaddr, airt.AiState);
|
|
|
} else {
|
|
|
iceInfo->t_sensor_data[angleno].deflection_angle = airt.EuValue;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,采样状态 = %d,x = %f",
|
|
|
sensorParam[num].devaddr,
|
|
|
airt.AiState, iceInfo->t_sensor_data[angleno].deflection_angle);
|
|
|
if (airt.AiState != 2 && iceInfo->t_sensor_data[angleno].deflection_angle == 0)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,
|
|
|
"地址%d,采样状态 = %d,倾角x未采集到数据,重新采样",
|
|
|
sensorParam[num].devaddr, airt.AiState);
|
|
|
status = 0;
|
|
|
} else{
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
iceInfo->t_sensor_data[angleno].deflection_angle = iceInfo->t_sensor_data[angleno].deflection_angle * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
iceInfo->t_sensor_data[angleno].deflection_angle = iceInfo->t_sensor_data[angleno].deflection_angle + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
GetAngleValue(num, &airt, 1);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1) {
|
|
|
iceInfo->t_sensor_data[angleno].windage_yaw_angle = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,采样状态 = %d,倾角y未接传感器",
|
|
|
sensorParam[num].devaddr, airt.AiState);
|
|
|
} else {
|
|
|
iceInfo->t_sensor_data[angleno].windage_yaw_angle = airt.EuValue;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,采样状态 = %d,y = %f",
|
|
|
sensorParam[num].devaddr,
|
|
|
airt.AiState, iceInfo->t_sensor_data[angleno].windage_yaw_angle);
|
|
|
if (airt.AiState != 2 &&
|
|
|
iceInfo->t_sensor_data[angleno].windage_yaw_angle == 0) {
|
|
|
XYLOG(XYLOG_SEVERITY_INFO,
|
|
|
"地址%d,采样状态 = %d,倾角y未采集到数据,重新采样",
|
|
|
sensorParam[num].devaddr, airt.AiState);
|
|
|
status = 0;
|
|
|
}else{
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
iceInfo->t_sensor_data[angleno].windage_yaw_angle = iceInfo->t_sensor_data[angleno].windage_yaw_angle * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
iceInfo->t_sensor_data[angleno].windage_yaw_angle = iceInfo->t_sensor_data[angleno].windage_yaw_angle + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
angleno++;
|
|
|
}
|
|
|
}
|
|
|
if(iceInfo->tension != 0xff)
|
|
|
{
|
|
|
iceInfo->equal_icethickness = 0;
|
|
|
iceInfo->tension = iceInfo->tension * 0.913632;
|
|
|
iceInfo->tension_difference = iceInfo->tension * 0.13213;
|
|
|
}
|
|
|
|
|
|
{
|
|
|
std::lock_guard<std::mutex> lock(m_dataLocker);
|
|
|
for (int num = 0; num < MAX_SERIAL_DEV_NUM; num++) {
|
|
|
if (sensorParam[num].SensorsType == WIND_PROTOCOL && sensorParam[num].IsNoInsta == 0) {
|
|
|
iceTail->instantaneous_windspeed = 0xff;
|
|
|
iceTail->instantaneous_winddirection = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,风速风向传感器未启用",
|
|
|
sensorParam[num].devaddr);
|
|
|
continue;
|
|
|
} else if ((sensorParam[num].SensorsType == WIND_PROTOCOL || sensorParam[num].SensorsType == MUTIWEATHER_PROTOCOL) &&
|
|
|
sensorParam[num].IsNoInsta == 1) {
|
|
|
GetWindSpeedData(&airt);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1) {
|
|
|
iceTail->instantaneous_windspeed = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,覆冰风速未接传感器",
|
|
|
airt.AiState);
|
|
|
} else {
|
|
|
iceTail->instantaneous_windspeed = airt.EuValue;
|
|
|
if (airt.AiState != 2 && iceTail->instantaneous_windspeed == 0 &&
|
|
|
m_tempData.instantaneous_windspeed == 0xff)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,覆冰风速未采集到数据,重新采样",
|
|
|
airt.AiState);
|
|
|
status = 0;
|
|
|
} else
|
|
|
{
|
|
|
if (airt.AiState == 2 && m_tempData.instantaneous_windspeed == 0xff)
|
|
|
{
|
|
|
m_tempData.instantaneous_windspeed = iceTail->instantaneous_windspeed;
|
|
|
} else if (iceTail->instantaneous_windspeed == 0 && m_tempData.instantaneous_windspeed != 0xff)
|
|
|
{
|
|
|
iceTail->instantaneous_windspeed = m_tempData.instantaneous_windspeed;
|
|
|
}
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
iceTail->instantaneous_windspeed = iceTail->instantaneous_windspeed * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
iceTail->instantaneous_windspeed = iceTail->instantaneous_windspeed + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
GetWindDirectionData(&airt);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1) {
|
|
|
iceTail->instantaneous_winddirection = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,覆冰风向未接传感器",
|
|
|
airt.AiState);
|
|
|
} else {
|
|
|
iceTail->instantaneous_winddirection = airt.EuValue;
|
|
|
if (airt.AiState != 2 && iceTail->instantaneous_winddirection == 0 &&
|
|
|
m_tempData.instantaneous_winddirection == 0xff) {
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,覆冰风向未采集到数据,重新采样",
|
|
|
airt.AiState);
|
|
|
status = 0;
|
|
|
} else {
|
|
|
if (airt.AiState == 2 && m_tempData.instantaneous_winddirection == 0xff)
|
|
|
{
|
|
|
m_tempData.instantaneous_winddirection = iceTail->instantaneous_winddirection;
|
|
|
} else if (iceTail->instantaneous_winddirection == 0 && m_tempData.instantaneous_winddirection != 0xff)
|
|
|
{
|
|
|
iceTail->instantaneous_winddirection = m_tempData.instantaneous_winddirection;
|
|
|
}
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
iceTail->instantaneous_winddirection = iceTail->instantaneous_winddirection * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
iceTail->instantaneous_winddirection = iceTail->instantaneous_winddirection + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
for (int num = 0; num < MAX_SERIAL_DEV_NUM; num++) {
|
|
|
if (sensorParam[num].SensorsType == WEATHER_PROTOCOL && sensorParam[num].IsNoInsta == 0)
|
|
|
{
|
|
|
iceTail->air_temperature = 0xff;
|
|
|
iceTail->humidity = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,温湿度传感器未启用",
|
|
|
sensorParam[num].devaddr);
|
|
|
continue;
|
|
|
} else if ((sensorParam[num].SensorsType == WEATHER_PROTOCOL || sensorParam[num].SensorsType == MUTIWEATHER_PROTOCOL) && sensorParam[num].IsNoInsta == 1) {
|
|
|
GetAirTempData(&airt);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1) {
|
|
|
iceTail->air_temperature = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,覆冰温度未接传感器",
|
|
|
airt.AiState);
|
|
|
} else {
|
|
|
iceTail->air_temperature = airt.EuValue;
|
|
|
if (airt.AiState != 2 && iceTail->air_temperature == 0 &&
|
|
|
m_tempData.air_temperature == 0xff) {
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,覆冰温度未采集到数据,重新采样",
|
|
|
airt.AiState);
|
|
|
status = 0;
|
|
|
} else {
|
|
|
if (airt.AiState == 2 && m_tempData.air_temperature == 0xff)
|
|
|
{
|
|
|
m_tempData.air_temperature = iceTail->air_temperature;
|
|
|
} else if (iceTail->air_temperature == 0 &&
|
|
|
m_tempData.air_temperature != 0xff)
|
|
|
{
|
|
|
iceTail->air_temperature = m_tempData.air_temperature;
|
|
|
}
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
iceTail->air_temperature = iceTail->air_temperature * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
iceTail->air_temperature = iceTail->air_temperature + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
GetHumidityData(&airt);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1) {
|
|
|
iceTail->humidity = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,覆冰湿度未接传感器",
|
|
|
airt.AiState);
|
|
|
} else {
|
|
|
iceTail->humidity = airt.EuValue;
|
|
|
if (airt.AiState != 2 && iceTail->humidity == 0 &&
|
|
|
m_tempData.humidity == 0xff) {
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,覆冰湿度未采集到数据,重新采样",
|
|
|
airt.AiState);
|
|
|
status = 0;
|
|
|
} else {
|
|
|
if (airt.AiState == 2 && m_tempData.humidity == 0xff) {
|
|
|
m_tempData.humidity = iceTail->humidity;
|
|
|
} else if (iceTail->humidity == 0 && m_tempData.humidity != 0xff) {
|
|
|
iceTail->humidity = m_tempData.humidity;
|
|
|
}
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
iceTail->humidity = iceTail->humidity * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
iceTail->humidity = iceTail->humidity + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if(status)
|
|
|
return true;
|
|
|
else
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
int CPhoneDevice::GetWData(IDevice::WEATHER_INFO *weatherInfo, D_SENSOR_PARAM *sensorParam)
|
|
|
{
|
|
|
m_tempData.instantaneous_windspeed = 0xff;
|
|
|
m_tempData.air_temperature = 0xff;
|
|
|
m_tempData.instantaneous_winddirection = 0xff;
|
|
|
m_tempData.humidity = 0xff;
|
|
|
bool status = 1;
|
|
|
|
|
|
std::unique_lock<std::mutex> lock(m_collectDataLocker);
|
|
|
if (!m_collecting.load()) {
|
|
|
m_collecting.store(true);
|
|
|
m_CollectDatacv.notify_all();
|
|
|
lock.unlock();
|
|
|
Collect_sensor_data(); //15s
|
|
|
lock.lock();
|
|
|
m_collecting.store(false);
|
|
|
m_CollectDatacv.notify_all();
|
|
|
} else {
|
|
|
m_CollectDatacv.wait(lock, [this]{ return !m_collecting.load(); });
|
|
|
// m_collecting.store(false);
|
|
|
}
|
|
|
|
|
|
Data_DEF airt;
|
|
|
{
|
|
|
std::lock_guard<std::mutex> lock(m_dataLocker);
|
|
|
for (int num = 0; num < MAX_SERIAL_DEV_NUM; num++)
|
|
|
{
|
|
|
if (sensorParam[num].SensorsType == WIND_PROTOCOL && sensorParam[num].IsNoInsta == 0)
|
|
|
{
|
|
|
weatherInfo->avg_windspeed_10min = 0xff;
|
|
|
weatherInfo->extreme_windspeed = 0xff;
|
|
|
weatherInfo->standard_windspeed = 0xff;
|
|
|
weatherInfo->avg_winddirection_10min = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,风速风向传感器未启用",
|
|
|
sensorParam[num].devaddr);
|
|
|
continue;
|
|
|
} else if ((sensorParam[num].SensorsType == WIND_PROTOCOL || sensorParam[num].SensorsType == MUTIWEATHER_PROTOCOL) && sensorParam[num].IsNoInsta == 1)
|
|
|
{
|
|
|
GetWindSpeedData(&airt);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1)
|
|
|
{
|
|
|
weatherInfo->avg_windspeed_10min = 0xff;
|
|
|
weatherInfo->extreme_windspeed = 0xff;
|
|
|
weatherInfo->standard_windspeed = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气象风速未接传感器", airt.AiState);
|
|
|
} else
|
|
|
{
|
|
|
weatherInfo->avg_windspeed_10min = airt.EuValue;
|
|
|
weatherInfo->extreme_windspeed = airt.EuValue;
|
|
|
weatherInfo->standard_windspeed = airt.EuValue;
|
|
|
if (airt.AiState != 2 && weatherInfo->avg_windspeed_10min == 0 && m_tempData.instantaneous_windspeed == 0xff)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气象风速未采集到数据,重新采样",
|
|
|
airt.AiState);
|
|
|
status = 0;
|
|
|
} else {
|
|
|
if (airt.AiState == 2 && m_tempData.instantaneous_windspeed == 0xff)
|
|
|
{
|
|
|
m_tempData.instantaneous_windspeed = weatherInfo->avg_windspeed_10min;
|
|
|
} else if (weatherInfo->avg_windspeed_10min == 0 &&
|
|
|
m_tempData.instantaneous_windspeed != 0xff)
|
|
|
{
|
|
|
weatherInfo->avg_windspeed_10min = m_tempData.instantaneous_windspeed;
|
|
|
weatherInfo->extreme_windspeed = m_tempData.instantaneous_windspeed;
|
|
|
weatherInfo->standard_windspeed = m_tempData.instantaneous_windspeed;
|
|
|
}
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
{
|
|
|
weatherInfo->avg_windspeed_10min = weatherInfo->avg_windspeed_10min * sensorParam[num].multiple;
|
|
|
weatherInfo->extreme_windspeed = weatherInfo->avg_windspeed_10min;
|
|
|
weatherInfo->standard_windspeed = weatherInfo->avg_windspeed_10min;
|
|
|
}
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
{
|
|
|
weatherInfo->avg_windspeed_10min = weatherInfo->avg_windspeed_10min + sensorParam[num].offset;
|
|
|
weatherInfo->extreme_windspeed = weatherInfo->avg_windspeed_10min;
|
|
|
weatherInfo->standard_windspeed = weatherInfo->avg_windspeed_10min;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
GetWindDirectionData(&airt);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1)
|
|
|
{
|
|
|
weatherInfo->avg_winddirection_10min = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气象风向未接传感器", airt.AiState);
|
|
|
} else
|
|
|
{
|
|
|
weatherInfo->avg_winddirection_10min = airt.EuValue;
|
|
|
if (airt.AiState != 2 && weatherInfo->avg_winddirection_10min == 0 && m_tempData.instantaneous_winddirection == 0xff)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气象风向未采集到数据,重新采样", airt.AiState);
|
|
|
status = 0;
|
|
|
} else {
|
|
|
if (airt.AiState == 2 && m_tempData.instantaneous_winddirection == 0xff) {
|
|
|
m_tempData.instantaneous_winddirection = weatherInfo->avg_winddirection_10min;
|
|
|
} else if (weatherInfo->winddirection == 0 && m_tempData.instantaneous_winddirection != 0xff) {
|
|
|
weatherInfo->avg_winddirection_10min = m_tempData.instantaneous_winddirection;
|
|
|
}
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
weatherInfo->avg_winddirection_10min = weatherInfo->avg_winddirection_10min * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
weatherInfo->avg_winddirection_10min = weatherInfo->avg_winddirection_10min + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
GetRainfallData(&airt);
|
|
|
weatherInfo->precipitation = airt.EuValue;
|
|
|
if(airt.AiState == 255 || airt.AiState == -1)
|
|
|
{
|
|
|
weatherInfo->precipitation = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,降雨量未接传感器", airt.AiState);
|
|
|
}else if (airt.AiState != 2 && weatherInfo->precipitation == 0)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,降雨量未采集到数据,重新采样",
|
|
|
weatherInfo->precipitation, airt.AiState);
|
|
|
status = 0;
|
|
|
} else
|
|
|
{
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
weatherInfo->precipitation = weatherInfo->precipitation * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
weatherInfo->precipitation = weatherInfo->precipitation + sensorParam[num].offset;
|
|
|
}
|
|
|
|
|
|
GetAtmosData(&airt);
|
|
|
weatherInfo->air_pressure = airt.EuValue;
|
|
|
if(airt.AiState == 255 || airt.AiState == -1)
|
|
|
{
|
|
|
weatherInfo->air_pressure = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气压未接传感器", airt.AiState);
|
|
|
}else if (airt.AiState != 2 && weatherInfo->air_pressure == 0) {
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气压未采集到数据,重新采样",
|
|
|
weatherInfo->air_pressure, airt.AiState);
|
|
|
status = 0;
|
|
|
} else
|
|
|
{
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
weatherInfo->air_pressure = weatherInfo->air_pressure * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
weatherInfo->air_pressure = weatherInfo->air_pressure + sensorParam[num].offset;
|
|
|
}
|
|
|
|
|
|
GetOpticalRadiationData(&airt);
|
|
|
weatherInfo->radiation_intensity = airt.EuValue;
|
|
|
if(airt.AiState == 255 || airt.AiState == -1)
|
|
|
{
|
|
|
weatherInfo->radiation_intensity = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,光照强度未接传感器", airt.AiState);
|
|
|
}else if (airt.AiState != 2 && weatherInfo->radiation_intensity == 0) {
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,光照强度未采集到数据,重新采样",
|
|
|
weatherInfo->radiation_intensity, airt.AiState);
|
|
|
status = 0;
|
|
|
} else
|
|
|
{
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
weatherInfo->radiation_intensity = weatherInfo->radiation_intensity * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
weatherInfo->radiation_intensity = weatherInfo->radiation_intensity + sensorParam[num].offset;
|
|
|
}
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
|
|
|
for (int num = 0; num < MAX_SERIAL_DEV_NUM; num++) {
|
|
|
if (sensorParam[num].SensorsType == WEATHER_PROTOCOL && sensorParam[num].IsNoInsta == 0)
|
|
|
{
|
|
|
weatherInfo->air_temperature = 0xff;
|
|
|
weatherInfo->humidity = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "地址%d,温湿度传感器未启用",
|
|
|
sensorParam[num].devaddr);
|
|
|
continue;
|
|
|
} else if ((sensorParam[num].SensorsType == WEATHER_PROTOCOL || sensorParam[num].SensorsType == MUTIWEATHER_PROTOCOL) &&
|
|
|
sensorParam[num].IsNoInsta == 1)
|
|
|
{
|
|
|
GetAirTempData(&airt);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1)
|
|
|
{
|
|
|
weatherInfo->air_temperature = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气象温度未接传感器", airt.AiState);
|
|
|
} else
|
|
|
{
|
|
|
weatherInfo->air_temperature = airt.EuValue;
|
|
|
if (airt.AiState != 2 && weatherInfo->air_temperature == 0 &&
|
|
|
m_tempData.air_temperature == 0xff)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气象温度未采集到数据,重新采样",
|
|
|
airt.AiState);
|
|
|
status = 0;
|
|
|
} else
|
|
|
{
|
|
|
if (airt.AiState == 2 && m_tempData.air_temperature == 0xff)
|
|
|
{
|
|
|
m_tempData.air_temperature = weatherInfo->air_temperature;
|
|
|
} else if (weatherInfo->air_temperature == 0 && m_tempData.air_temperature != 0xff)
|
|
|
{
|
|
|
weatherInfo->air_temperature = m_tempData.air_temperature;
|
|
|
}
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
weatherInfo->air_temperature = weatherInfo->air_temperature * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
weatherInfo->air_temperature = weatherInfo->air_temperature + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
GetHumidityData(&airt);
|
|
|
if (airt.AiState == 255 || airt.AiState == -1)
|
|
|
{
|
|
|
weatherInfo->humidity = 0xff;
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气象湿度未接传感器", airt.AiState);
|
|
|
} else
|
|
|
{
|
|
|
weatherInfo->humidity = airt.EuValue;
|
|
|
if (airt.AiState != 2 && weatherInfo->humidity == 0 &&
|
|
|
m_tempData.humidity == 0xff)
|
|
|
{
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "采样状态 = %d,气象湿度未采集到数据,重新采样",
|
|
|
airt.AiState);
|
|
|
status = 0;
|
|
|
} else
|
|
|
{
|
|
|
if (airt.AiState == 2 && m_tempData.humidity == 0xff)
|
|
|
{
|
|
|
m_tempData.humidity = weatherInfo->humidity;
|
|
|
} else if (weatherInfo->humidity == 0 && m_tempData.humidity != 0xff)
|
|
|
{
|
|
|
weatherInfo->humidity = m_tempData.humidity;
|
|
|
}
|
|
|
if(sensorParam[num].multiple != 1 && sensorParam[num].multiple != 0)
|
|
|
weatherInfo->humidity = weatherInfo->humidity * sensorParam[num].multiple;
|
|
|
if(sensorParam[num].offset != 0)
|
|
|
weatherInfo->humidity = weatherInfo->humidity + sensorParam[num].offset;
|
|
|
}
|
|
|
}
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (status)
|
|
|
return true;
|
|
|
else
|
|
|
return false;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::OpenSensors(int sensortype)
|
|
|
{
|
|
|
if(sensortype == MAIN_POWER_OPEN) {
|
|
|
GpioControl::set12VEnable(true);
|
|
|
GpioControl::setRS485Enable(true);
|
|
|
GpioControl::setCam3V3Enable(true);
|
|
|
GpioControl::TurnOn(CMD_SET_SPI_POWER);
|
|
|
// GpioControl::TurnOn(CMD_SET_485_EN_STATE); // 打开RS485电源
|
|
|
#ifndef USING_N938
|
|
|
#ifndef USING_PLZ
|
|
|
GpioControl::TurnOn(CMD_SET_485_EN_STATE);
|
|
|
|
|
|
#else
|
|
|
GpioControl::TurnOn(CMD_SET_485_ENABLE);
|
|
|
#endif
|
|
|
#else
|
|
|
GpioControl::TurnOn(CMD_SPI2SERIAL_POWER_EN);
|
|
|
GpioControl::TurnOn(CMD_RS485_3V3_EN);
|
|
|
#endif
|
|
|
}
|
|
|
if(sensortype == CAMERA_SENSOR_OPEN)
|
|
|
{
|
|
|
#ifndef USING_N938
|
|
|
#ifndef USING_PLZ
|
|
|
#else
|
|
|
GpioControl::TurnOn(CMD_SET_5V_PWR_ENABLE);
|
|
|
GpioControl::TurnOn(CMD_SET_PTZ_PWR_ENABLE);
|
|
|
#endif
|
|
|
#else
|
|
|
GpioControl::TurnOn(CMD_SET_PIC1_POWER);
|
|
|
GpioControl::TurnOn(CMD_SET_485_EN4);
|
|
|
#endif
|
|
|
// GpioControl::TurnOn(CMD_SET_CAM_3V3_EN_STATE); // 打开3.3V电压
|
|
|
// GpioControl::TurnOn(CMD_SET_3V3_PWR_ENABLE);
|
|
|
}
|
|
|
if(sensortype == WEATHER_SENSOR_OPEN)
|
|
|
{
|
|
|
#ifndef USING_N938
|
|
|
#else
|
|
|
GpioControl::TurnOn(CMD_SET_WTH_POWER);
|
|
|
GpioControl::TurnOn(CMD_SET_485_EN3);
|
|
|
#endif
|
|
|
}
|
|
|
if(sensortype == ICETHICK_SENSOR_OPEN)
|
|
|
{
|
|
|
#ifndef USING_N938
|
|
|
#else
|
|
|
GpioControl::TurnOn(CMD_SET_PULL_POWER);
|
|
|
GpioControl::TurnOn(CMD_SET_ANGLE_POWER);
|
|
|
GpioControl::TurnOn(CMD_SET_485_EN1);
|
|
|
GpioControl::TurnOn(CMD_SET_485_EN0);
|
|
|
#endif
|
|
|
}
|
|
|
if(sensortype == OTHER_SENSOR)
|
|
|
{
|
|
|
#ifndef USING_N938
|
|
|
#else
|
|
|
GpioControl::TurnOn(CMD_SET_OTHER_POWER);
|
|
|
GpioControl::TurnOn(CMD_SET_485_EN2);
|
|
|
#endif
|
|
|
}
|
|
|
return 0;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::CloseSensors(int sensortype, uint32_t delayedCloseTime)
|
|
|
{
|
|
|
if(sensortype == MAIN_POWER_OPEN)
|
|
|
{
|
|
|
GpioControl::set12VEnable(false, delayedCloseTime);
|
|
|
GpioControl::setRS485Enable(false, delayedCloseTime);
|
|
|
GpioControl::setCam3V3Enable(false, delayedCloseTime);
|
|
|
GpioControl::TurnOff(CMD_SET_SPI_POWER, delayedCloseTime);
|
|
|
// GpioControl::TurnOff(CMD_SET_485_EN_STATE);
|
|
|
#ifndef USING_N938
|
|
|
#ifndef USING_PLZ
|
|
|
GpioControl::TurnOff(CMD_SET_485_EN_STATE, delayedCloseTime);
|
|
|
#else
|
|
|
GpioControl::TurnOff(CMD_SET_485_ENABLE, delayedCloseTime);
|
|
|
#endif
|
|
|
#else
|
|
|
GpioControl::TurnOff(CMD_SPI2SERIAL_POWER_EN, delayedCloseTime);
|
|
|
GpioControl::TurnOff(CMD_RS485_3V3_EN, delayedCloseTime);
|
|
|
#endif
|
|
|
|
|
|
}
|
|
|
if(sensortype == CAMERA_SENSOR_OPEN)
|
|
|
{
|
|
|
if(delayedCloseTime ==0)
|
|
|
{
|
|
|
#ifdef USING_N938
|
|
|
GpioControl::TurnOffImmediately(CMD_SET_PIC1_POWER);
|
|
|
GpioControl::TurnOffImmediately(CMD_SET_485_EN4);
|
|
|
#endif
|
|
|
|
|
|
#ifndef USING_N938
|
|
|
// GpioControl::TurnOff(CMD_SET_3V3_PWR_ENABLE);
|
|
|
#ifndef USING_PLZ
|
|
|
#else
|
|
|
GpioControl::TurnOffImmediately(CMD_SET_5V_PWR_ENABLE);
|
|
|
GpioControl::TurnOffImmediately(CMD_SET_PTZ_PWR_ENABLE);
|
|
|
#endif
|
|
|
#endif
|
|
|
}else
|
|
|
{
|
|
|
#ifdef USING_N938
|
|
|
GpioControl::TurnOff(CMD_SET_PIC1_POWER, delayedCloseTime);
|
|
|
GpioControl::TurnOff(CMD_SET_485_EN4, delayedCloseTime);
|
|
|
// GpioControl::TurnOff(CMD_SET_CAM_3V3_EN_STATE);
|
|
|
#endif
|
|
|
|
|
|
#ifndef USING_N938
|
|
|
// GpioControl::TurnOff(CMD_SET_3V3_PWR_ENABLE);
|
|
|
#ifndef USING_PLZ
|
|
|
#else
|
|
|
GpioControl::TurnOff(CMD_SET_PTZ_PWR_ENABLE, delayedCloseTime);
|
|
|
#endif
|
|
|
#endif
|
|
|
}
|
|
|
}
|
|
|
if(sensortype == WEATHER_SENSOR_OPEN )
|
|
|
{
|
|
|
#ifndef USING_N938
|
|
|
#else
|
|
|
GpioControl::TurnOff(CMD_SET_WTH_POWER, delayedCloseTime);
|
|
|
GpioControl::TurnOff(CMD_SET_485_EN3, delayedCloseTime);
|
|
|
#endif
|
|
|
}
|
|
|
if(sensortype == ICETHICK_SENSOR_OPEN)
|
|
|
{
|
|
|
#ifndef USING_N938
|
|
|
#else
|
|
|
GpioControl::TurnOff(CMD_SET_PULL_POWER, delayedCloseTime);
|
|
|
GpioControl::TurnOff(CMD_SET_ANGLE_POWER, delayedCloseTime);
|
|
|
GpioControl::TurnOff(CMD_SET_485_EN1, delayedCloseTime);
|
|
|
GpioControl::TurnOff(CMD_SET_485_EN0, delayedCloseTime);
|
|
|
#endif
|
|
|
}
|
|
|
if(sensortype == OTHER_SENSOR)
|
|
|
{
|
|
|
#ifndef USING_N938
|
|
|
#else
|
|
|
GpioControl::TurnOff(CMD_SET_OTHER_POWER, delayedCloseTime);
|
|
|
GpioControl::TurnOff(CMD_SET_485_EN2, delayedCloseTime);
|
|
|
#endif
|
|
|
}
|
|
|
return 0;
|
|
|
}
|
|
|
|
|
|
bool CPhoneDevice::LoadNetworkInfo()
|
|
|
{
|
|
|
std::vector<uint8_t> content;
|
|
|
if (!readFile(m_appPath + (APP_PATH_NETWORK), content) && !content.empty())
|
|
|
{
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
Json::CharReaderBuilder builder;
|
|
|
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
|
|
|
|
|
|
Json::Value jsonVal;
|
|
|
const char* doc = (const char*)&(content[0]);
|
|
|
std::string errMsg;
|
|
|
if (!reader->parse(doc, doc + content.size(), &jsonVal, &errMsg))
|
|
|
{
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
if (m_network == NULL)
|
|
|
{
|
|
|
m_network = new NETWORK();
|
|
|
}
|
|
|
GetJSONValue(jsonVal, "iface", m_network->iface);
|
|
|
GetJSONValue(jsonVal, "ip", m_network->ip);
|
|
|
GetJSONValue(jsonVal, "netmask", m_network->netmask);
|
|
|
GetJSONValue(jsonVal, "gateway", m_network->gateway);
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
void CPhoneDevice::SetStaticIp()
|
|
|
{
|
|
|
if (m_network != NULL)
|
|
|
{
|
|
|
unsigned int ip = 0;
|
|
|
unsigned int netMask = 0;
|
|
|
unsigned int gateway = 0;
|
|
|
std::string ipStr = m_network->ip;
|
|
|
if (GetNetInfo("eth0", ip, netMask, gateway))
|
|
|
{
|
|
|
// const
|
|
|
sockaddr_in addrIn = { AF_INET, 0, ip};
|
|
|
char buf[32] = { 0 };
|
|
|
inet_ntop(AF_INET, &addrIn.sin_addr, buf, sizeof(buf));//其中recvAddr为SOCKADDR_IN类型
|
|
|
if (strcmp(ipStr.c_str(), buf) == 0)
|
|
|
{
|
|
|
ipStr = "0.0.0.0";
|
|
|
}
|
|
|
}
|
|
|
|
|
|
SetStaticIp(m_network->iface, ipStr, m_network->netmask, m_network->gateway);
|
|
|
XYLOG(XYLOG_SEVERITY_INFO, "Set Static IP on %s: %s(%s)", m_network->iface.c_str(),
|
|
|
m_network->ip.c_str(), ipStr.c_str());
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
#ifdef USING_N938
|
|
|
SetStaticIp("eth0", "0.0.0.0", "255.255.255.0", "192.168.1.1");
|
|
|
#endif
|
|
|
XYLOG(XYLOG_SEVERITY_WARNING, "No Static IP Confg");
|
|
|
}
|
|
|
}
|
|
|
|
|
|
VendorCtrl* CPhoneDevice::MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle)
|
|
|
{
|
|
|
VendorCtrl* vendorCtrl = NULL;
|
|
|
switch (vendor)
|
|
|
{
|
|
|
case 1:
|
|
|
// Hai Kang
|
|
|
break;
|
|
|
case 2:
|
|
|
break;
|
|
|
case 3:
|
|
|
// Yu Shi
|
|
|
vendorCtrl = new YuShiCtrl(ip, userName, password, channel, netHandle);
|
|
|
break;
|
|
|
case 5:
|
|
|
// Hang Yu - New
|
|
|
vendorCtrl = new HangYuCtrl(ip, userName, password, channel, netHandle);
|
|
|
}
|
|
|
return vendorCtrl;
|
|
|
}
|