diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp index 2f57725d..7614904b 100644 --- a/app/src/main/cpp/PhoneDevice.cpp +++ b/app/src/main/cpp/PhoneDevice.cpp @@ -420,7 +420,7 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa mGetSystemInfoMid = env->GetMethodID(classService, "getSystemInfo", "()Ljava/lang/String;"); mInstallAppMid = env->GetMethodID(classService, "installApp", "(Ljava/lang/String;J)Z"); - mRebootMid = env->GetMethodID(classService, "reboot", "(IJ)V"); + mRebootMid = env->GetMethodID(classService, "reboot", "(IJLjava/lang/String;)V"); mEnableGpsMid = env->GetMethodID(classService, "enableGps", "(Z)V"); mRequestPositionMid = env->GetMethodID(classService, "requestPosition", "()Z"); @@ -1013,7 +1013,7 @@ bool CPhoneDevice::Reboot(int resetType) return true; } -void CPhoneDevice::RestartApp(int resetType, long timeout) +void CPhoneDevice::RestartApp(int resetType, long timeout, const std::string& reason) { JNIEnv* env = NULL; bool didAttachThread = false; @@ -1022,7 +1022,13 @@ void CPhoneDevice::RestartApp(int resetType, long timeout) { ALOGE("Failed to get JNI Env"); } - env->CallVoidMethod(m_javaService, mRebootMid, resetType, timeout); + + jstring jreason = NULL; + if (!reason.empty()) + { + jreason = env->NewStringUTF(reason.c_str()); + } + env->CallVoidMethod(m_javaService, mRebootMid, resetType, timeout, jreason); if (didAttachThread) { m_vm->DetachCurrentThread(); @@ -1166,7 +1172,7 @@ void CPhoneDevice::handleRebootTimer(union sigval v) CPhoneDevice* pDevice = (CPhoneDevice*)(v.sival_ptr); // Reboot APP XYLOG(XYLOG_SEVERITY_ERROR, "Camera Close Thread is DEAD, will RESTART app"); - pDevice->RestartApp(0, 2000); + pDevice->RestartApp(0, 2000, "Camera Can't Close"); } // void CPhoneDevice::handleRebootTimerImpl() diff --git a/app/src/main/cpp/PhoneDevice.cpp~ b/app/src/main/cpp/PhoneDevice.cpp~ new file mode 100644 index 00000000..f46440b4 --- /dev/null +++ b/app/src/main/cpp/PhoneDevice.cpp~ @@ -0,0 +1,3103 @@ +#include "PhoneDevice.h" +#include +#include +#include +#include +#include +#include "ncnn/yolov5ncnn.h" +#include "GPIOControl.h" +#include "CvText.h" +#include "PositionHelper.h" +#include "DngCreator.h" + +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#ifdef USING_HDRPLUS +#include +#endif + +#include +#include +#include +namespace fs = std::filesystem; + +#define CMD_SET_485_EN_STATE 131 +#define CMD_SET_CAM_3V3_EN_STATE 132 +#define CMD_SET_12V_EN_STATE 133 + +extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread); + +#define WAKELOCK_NAME "NDK_WK_" +// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their +// ranges +// are normalized to eight bits. +static const int kMaxChannelValue = 262143; + +class ByteArraysPointer +{ +public: + ByteArraysPointer() + { + } + ~ByteArraysPointer() + { +#ifdef _DEBUG + ALOGD("ByteArray Size=%u", (uint32_t)byteArrays.size()); + for (auto it = byteArrays.cbegin(); it != byteArrays.cend(); ++it) + { + ALOGD("ByteArray Free: Size=%u", (uint32_t)((*it).size())); + } + +#endif + byteArrays.clear(); + } + std::vector > byteArrays; +}; + +cv::Mat convert16bit2_8bit_(cv::Mat ans){ + if(ans.type()==CV_16UC3){ + cv::MatIterator_ it, end; + for( it = ans.begin(), end = ans.end(); it != end; ++it) + { + // std::cout<= '0' && *p <= '9') p++; + if (*p != 0) { + *p = 0; + p++; + if (*p == 0) p--; + } + mem += atoll(num) * 1024; + numFound++; + break; + } + i++; + } + p++; + } + + return numFound > 0 ? mem : -1; +} + +static jlong android_os_Process_getFreeMemory() +{ + static const char* const sums[] = { "MemFree:", "Cached:", NULL }; + static const size_t sumsLen[] = { strlen("MemFree:"), strlen("Cached:"), 0 }; + return getFreeMemoryImpl(sums, sumsLen, 2); +} + +static jlong android_os_Process_getTotalMemory() +{ + static const char* const sums[] = { "MemTotal:", NULL }; + static const size_t sumsLen[] = { strlen("MemTotal:"), 0 }; + return getFreeMemoryImpl(sums, sumsLen, 1); +} + +static inline uint32_t YUV2RGB(int nY, int nU, int nV) { + nY -= 16; + nU -= 128; + nV -= 128; + if (nY < 0) nY = 0; + + // This is the floating point equivalent. We do the conversion in integer + // because some Android devices do not have floating point in hardware. + // nR = (int)(1.164 * nY + 1.596 * nV); + // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); + // nB = (int)(1.164 * nY + 2.018 * nU); + + int nR = (int)(1192 * nY + 1634 * nV); + int nG = (int)(1192 * nY - 833 * nV - 400 * nU); + int nB = (int)(1192 * nY + 2066 * nU); + + nR = std::min(kMaxChannelValue, std::max(0, nR)); + nG = std::min(kMaxChannelValue, std::max(0, nG)); + nB = std::min(kMaxChannelValue, std::max(0, nB)); + + nR = (nR >> 10) & 0xff; + nG = (nG >> 10) & 0xff; + nB = (nB >> 10) & 0xff; + + return 0xff000000 | (nR << 16) | (nG << 8) | nB; +} + +CPhoneDevice::CPhoneCamera::CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params) : NdkCamera(width, height, params), m_dev(dev) +{ +} + +CPhoneDevice::CPhoneCamera::~CPhoneCamera() +{ + m_dev = NULL; +} + +bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat& rgb) +{ + if (m_dev != NULL) + { + return m_dev->OnImageReady(rgb); + } + + return false; +} + +bool CPhoneDevice::CPhoneCamera::onOneCapture(std::shared_ptr characteristics, std::shared_ptr result, uint32_t ldr, cv::Mat rgb) +{ + if (m_dev != NULL) + { + return m_dev->onOneCapture(characteristics, result, ldr, rgb); + } + return false; +} + +bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + if (m_dev != NULL) + { + return m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return false; +} + +bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + if (m_dev != NULL) + { + return m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return false; +} + +void CPhoneDevice::CPhoneCamera::on_error(const std::string& msg) +{ + if (m_dev != NULL) + { + m_dev->onError(msg); + } +} + +void CPhoneDevice::CPhoneCamera::onDisconnected(ACameraDevice* device) +{ + if (m_dev != NULL) + { + m_dev->onDisconnected(device); + } +} + + +CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t height, const std::string& path, const NdkCamera::CAMERA_PARAMS& params) : CPhoneDevice::CPhoneCamera(dev, width, height, params), m_path(path) +{ +} + +bool CPhoneDevice::CJpegCamera::onOneCapture(std::shared_ptr characteristics, std::shared_ptr result, uint32_t ldr, cv::Mat rgb) +{ + if (m_dev != NULL) + { + return m_dev->onOneCapture(characteristics, result, ldr, rgb); + } + return false; +} + +bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + if (m_dev != NULL) + { + m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return true; +} + +bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, std::vector >& frames) +{ + if (m_dev != NULL) + { + m_dev->onBurstCapture(characteristics, results, ldr, frames); + } + return true; +} + +void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader) +{ + ALOGD("onImageAvailable %p", reader); + + AImage* image = 0; + media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image); + + if (mstatus != AMEDIA_OK) + { + // error + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) + { + XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus); + } + return; + } + + uint8_t* y_data = 0; + int y_len = 0; +#if 0 + if (!lightDetected) + { + AImage_getPlaneData(image, 0, &y_data, &y_len); + + lightDetected = true; + +#if __cplusplus >= 201703L + uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); +#else + uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); +#endif + avgY = avgY / (uint64_t)y_len; + mLdr = avgY; +#if 1 + if (avgY < 50) + { + if (m_params.autoExposure) + { + uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; + camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); + + int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY); + status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); + + int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY); + status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); + + XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY, + mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity); + } + AImage_delete(image); + return; + } +#endif + } +#endif + + int32_t format; + AImage_getFormat(image, &format); + + if (format == AIMAGE_FORMAT_JPEG) + { + int planeCount; + media_status_t status = AImage_getNumberOfPlanes(image, &planeCount); + + // LOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount); + if (!(status == AMEDIA_OK && planeCount == 1)) + { + // LOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount); + return; + } + + uint8_t *data = nullptr; + int len = 0; + AImage_getPlaneData(image, 0, &data, &len); + + FILE *file = fopen(m_path.c_str(), "wb"); + if (file && data && len) + { + fwrite(data, 1, len, file); + fclose(file); + } + else + { + if (file) + fclose(file); + } + } + + AImage_delete(image); +} + +int32_t CPhoneDevice::CJpegCamera::getOutputFormat() const +{ + return AIMAGE_FORMAT_JPEG; +} + +std::mutex CPhoneDevice::m_powerLocker; +long CPhoneDevice::mCameraPowerCount = 0; +long CPhoneDevice::mOtgCount = 0; + +CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode) : mVersionCode(versionCode) +{ + mCamera = NULL; + m_listener = NULL; + m_pRecognizationCfg = NULL; + mAIInitialized = false; + mHeartbeatStartTime = 0; + mHeartbeatDuration = 0; + m_javaService = NULL; + m_appPath = appPath; + + mNetId = netId; + + m_signalLevel = 0; + m_signalLevelUpdateTime = time(NULL); + mBuildTime = 0; + + RegisterHandlerForSignal(SIGUSR2); + + m_vm = vm; + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + if (service != NULL) + { + m_javaService = env->NewGlobalRef(service); + + jclass classService = env->GetObjectClass(m_javaService); + mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(IJ)V"); + mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z"); + mUpdateCaptureScheduleMid = env->GetMethodID(classService, "updateCaptureSchedule", "(J)Z"); + mStartRecordingMid = env->GetMethodID(classService, "startRecording", "(ZIJIIIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); + + mRequestWakelockMid = env->GetMethodID(classService, "requestWakelock", "(Ljava/lang/String;J)V"); + mReleaseWakelockMid = env->GetMethodID(classService, "releaseWakelock", "(Ljava/lang/String;)V"); + + mGetSystemInfoMid = env->GetMethodID(classService, "getSystemInfo", "()Ljava/lang/String;"); + mInstallAppMid = env->GetMethodID(classService, "installApp", "(Ljava/lang/String;J)Z"); + mRebootMid = env->GetMethodID(classService, "reboot", "(IJLjava/lang/String;)V"); + mEnableGpsMid = env->GetMethodID(classService, "enableGps", "(Z)V"); + mRequestPositionMid = env->GetMethodID(classService, "requestPosition", "()Z"); + + env->DeleteLocalRef(classService); + } + + if (didAttachThread) + { + vm->DetachCurrentThread(); + } + + m_timerUidFeed = time(NULL) * 1000; + m_wakelockIdFeed = (unsigned long)m_timerUidFeed; + +#ifdef USING_NRSEC + TurnOnCameraPower(env); + GpioControl::setSpiPower(true); +#endif +} + +CPhoneDevice::~CPhoneDevice() +{ + m_devLocker.lock(); + for (auto it = mTimers.begin(); it != mTimers.end(); ++it) + { + timer_delete((timer_t)it->first); + delete it->second; + } + mTimers.clear(); + m_devLocker.unlock(); + + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + env->DeleteGlobalRef(m_javaService); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + m_javaService = NULL; + + if (m_pRecognizationCfg != NULL) + { + if (mAIInitialized) + { + ncnn_uninit(); + } + m_pRecognizationCfg = NULL; + } +} + +void CPhoneDevice::SetListener(IListener* listener) +{ + m_listener = listener; +} + +void CPhoneDevice::SetRecognizationCfg(const IDevice::CFG_RECOGNIZATION* pRecognizationCfg) +{ + if (m_pRecognizationCfg == NULL && pRecognizationCfg != NULL && (pRecognizationCfg->enabled != 0)) + { + // TODO + std::string paramFile = m_appPath + (APP_PATH_RECOG_PARAM); + std::string binFile = m_appPath + (APP_PATH_RECOG_BIN); + std::error_code err; + if (!existsFile(paramFile) || !existsFile(binFile) || fs::is_directory(fs::path(paramFile), err) || fs::is_directory(fs::path(binFile), err)) + { + XYLOG(XYLOG_SEVERITY_WARNING, "AI Config Files are invalid"); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "AI Enabled"); + ncnn_init(); + mAIInitialized = true; + bool res = YoloV5Ncnn_Init(paramFile, binFile); + if (res) + { + XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Init NCNN"); + } + else + { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Init NCNN"); + } + } + } + else + { + XYLOG(XYLOG_SEVERITY_WARNING, "AI Disabled"); + } + + m_pRecognizationCfg = pRecognizationCfg; +} + +bool CPhoneDevice::BindNetwork(int sock) +{ + return true; +} + +bool CPhoneDevice::SelfTest(std::string& result) +{ + result.clear(); + + const char* ITEM_SEP = "\t"; // + unsigned int numberOfChannels = 0; + + result += "设备自检 版本:" + GetVersion() + ITEM_SEP; + + Json::Value appConfig = Json::objectValue; + std::vector content; + std::string filePath = m_appPath + (APP_DATA_DIR DIR_SEP_STR APP_FILE_NAME_APP_CONF); + if (!readFile(filePath, content)) + { + result += "读取系统配置文件App.json失败"; + result += ITEM_SEP; + } + else + { + Json::CharReaderBuilder builder; + std::unique_ptr reader(builder.newCharReader()); + + const char* doc = (const char*)&(content[0]); + if (reader->parse(doc, doc + content.size(), &appConfig, NULL)) + { + unsigned int val = 0; + if (GetJSONUInt32Value(appConfig, "channels", val) && (val > 0 && val <= 255)) + { + numberOfChannels = val; + result += "通道数:" + std::to_string(numberOfChannels) + ITEM_SEP; + } + else + { + result += "通道数未定义或者无效" + std::string(ITEM_SEP); + } + } + else + { + result += "解析系统配置文件App.json失败" + std::string(ITEM_SEP); + } + } + + for (unsigned int channel = 1; channel <= numberOfChannels; channel++) + { + std::string path = m_appPath + (APP_PATH_CHANNELS DIR_SEP_STR); + + unsigned char cameraId = 0; + unsigned char usbCamera = 0; + Json::Value channelCfg = Json::objectValue; + content.clear(); + filePath = m_appPath + (APP_DATA_DIR DIR_SEP_STR APP_FILE_NAME_APP_CONF); + if (!readFile(filePath, content)) + { + result += "读取通道" + std::to_string(channel) + "配置文件失败" + std::string(ITEM_SEP); + } + else + { + Json::CharReaderBuilder builder; + std::unique_ptr reader(builder.newCharReader()); + + const char* doc = (const char*)&(content[0]); + if (reader->parse(doc, doc + content.size(), &channelCfg, NULL)) + { + GetJSONUInt8Value(channelCfg, "usbCamera", usbCamera); + if (GetJSONUInt8Value(channelCfg, "cameraId", cameraId)) + { + result += "通道" + std::to_string(channel) + " Camera ID为 " + std::to_string(cameraId) + ITEM_SEP; + } + else + { + cameraId = channel - 1; + result += "通道" + std::to_string(channel) + "未定义Camera ID, 使用默认值 " + std::to_string(cameraId) + ITEM_SEP; + } + } + else + { + result += "解析通道" + std::to_string(channel) + "配置文件App.json失败" + std::string(ITEM_SEP); + } + } + + int32_t width = 0; + int32_t height = 0; + NdkCamera::CAMERA_PARAMS params = { 0 }; + if (usbCamera) + { + TurnOnOtg(NULL); + } + TurnOnCameraPower(NULL); + + NdkCamera camera(width, height, params); + int res = camera.selfTest(std::to_string(cameraId), width, height); + TurnOffCameraPower(NULL); + if (usbCamera) + { + TurnOffOtg(NULL); + } + if (res == 0) + { + result += "通道" + std::to_string(channel) + "正常:最大分辨率:" + std::to_string(width) + "x" + std::to_string(height) + ITEM_SEP; + } + else + { + result += "通道" + std::to_string(channel) + " 异常 err=" + std::to_string(res) + ITEM_SEP; + } + } + + int bv = QueryBatteryVoltage(DEFAULT_BATTERY_QUERY_RETRIES); + if (bv > 0) + { + bv -= bv % 100; + result += std::string("电池电压:") + std::to_string(bv / 1000) + std::string(".") + std::to_string((bv % 1000) / 100) + ITEM_SEP; + } + + fs::space_info si = fs::space("/data"); + double fr = ((double)si.available * 100.0f) / ((double)si.capacity); + result += "可用存储:"; + result += std::to_string((int)fr); + result += "%%" + std::string(ITEM_SEP); + + long fm = android_os_Process_getFreeMemory(); + long tm = android_os_Process_getTotalMemory(); + double fmp = ((double)fm * 100.0f) / ((double)tm); + result += std::string("可用内存:") + std::to_string((int)fmp) + std::string("%%") + ITEM_SEP; + + if (!m_tfCardPath.empty()) + { + fs::space_info si2 = fs::space(m_tfCardPath.c_str()); + double fr2 = ((double)si2.available * 100.0f) / ((double)si2.capacity); + result += "TF卡可用空间:"; + result += std::to_string((int)fr2); + result += "%%" + std::string(ITEM_SEP); + } + + result += "4G信号强度:"; + result += std::to_string(m_signalLevel); + result += ITEM_SEP; + + result += "网络接口:"; + std::vector devices; + GetNetDevices(devices); + for (auto it = devices.cbegin(); it != devices.cend(); ++it) + { + result += (*it); + result += " "; + } + // result += ITEM_SEP; + + return true; +} + +bool CPhoneDevice::UpdateTime(time_t ts) +{ + JNIEnv* env = NULL; + jboolean ret = JNI_FALSE; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + jlong timeInMillis = ((jlong)ts) * 1000; + ret = env->CallBooleanMethod(m_javaService, mUpdateTimeMid, timeInMillis); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + return (ret == JNI_TRUE); +} + +bool CPhoneDevice::UpdateSchedules() +{ + JNIEnv* env = NULL; + jboolean ret = JNI_FALSE; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + time_t ts = time(NULL); + ret = env->CallBooleanMethod(m_javaService, mUpdateCaptureScheduleMid, ts); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + return (ret == JNI_TRUE); +} + +int CPhoneDevice::QueryBatteryVoltage(int retries) +{ + int val = -1; // // BatVol + for (int idx = 0; idx < retries; idx++) + { + val = GpioControl::getBatteryBusVoltage(); // // BatVol + if (val >= 0) + { + break; + } + std::this_thread::sleep_for(std::chrono::milliseconds(10)); + } + + return val; +} + +bool CPhoneDevice::QuerySystemProperties(std::map& properties) +{ + char value[PROP_VALUE_MAX] = { 0 }; + std::map powerInfo; + int res = 0; + int bv = -1; + + for (std::map::iterator it = properties.begin(); it != properties.end(); ++it) + { + if (!(it->second.empty())) + { + continue; + } + + if (it->first == PROP_EQUIP_NAME) + { + __system_property_get("ro.product.name", value); + it->second = value; + } + else if (it->first == PROP_MODEL) + { + __system_property_get("ro.product.model", value); + it->second = std::string(value); + } + else if (it->first == PROP_BS_MANU) + { + __system_property_get("ro.product.manufacturer", value); + it->second = std::string(value); + } + else if (it->first == PROP_VERSION) + { + // FOR Protocol + snprintf(value, sizeof(value), "%u.%03u", (mVersionCode / 1000), (mVersionCode % 1000)); + // __system_property_get("ro.build.version.release", value); + it->second = std::string(value); + } + else if (it->first == (PROP_VERSION_ABBR)) + { + // FOR OSD + string version = GetVersion(); +#if 0 + version += " " + FormatLocalTime(mBuildTime); +#endif + it->second = version; + } + else if (it->first == PROP_BUILD_TIME) + { + it->second = FormatLocalDateTime(mBuildTime); + } + else if (it->first == PROP_PROD_DATE) + { + __system_property_get("ro.build.date.utc", value); + it->second = std::string(value); + } + else if (it->first == PROP_SN || it->first == PROP_BS_ID) + { + __system_property_get("ro.serialno", value); + it->second = std::string(value); + } + else if (it->first == PROP_IMEI) + { + if (m_simcard.empty()) + { + __system_property_get("phone.imei", value); + it->second = std::string(value); + } + else + { + it->second = m_simcard; + } + } + else if (it->first == PROP_OPERATION_TEMP) + { + it->second = QueryCpuTemperature(); + } + else if (it->first == PROP_FREE_ROM) + { + fs::space_info si = fs::space("/data"); + it->second = std::to_string(si.available); // Unit: M + } + else if (it->first == PROP_FREE_ROM_PERCENT) + { + fs::space_info si = fs::space("/data"); + double fr = ((double)si.available * 100.0f) / ((double)si.capacity); + snprintf(value, sizeof(value), "%d%%", (int)fr); + it->second = std::string(value); + } + else if (it->first == PROP_TOTAL_ROM) + { + fs::space_info si = fs::space("/data"); + it->second = std::to_string(si.capacity); // Unit: M + } + else if (it->first == PROP_FREE_MEMORY) + { + it->second = std::to_string(android_os_Process_getFreeMemory()); // Unit: M + } + else if (it->first == PROP_FREE_MEMORY_PERCENT) + { + long fm = android_os_Process_getFreeMemory(); + long tm = android_os_Process_getTotalMemory(); + double fmp = ((double)fm * 100.0f) / ((double)tm); + snprintf(value, sizeof(value), "%d%%", (int)fmp); + it->second = std::string(value); // Unit: M + } + else if (it->first == PROP_TOTAL_MEMORY) + { + it->second = std::to_string(android_os_Process_getTotalMemory()); // Unit: M + } + else if (it->first == (PROP_LIGHTDEPENDENT_RESISTOR)) + { + int val = GpioControl::getLightAdc(); + it->second = std::to_string(val); + } + else if (it->first == (PROP_CHARGING_CURRENT)) + { + it->second = std::to_string(GpioControl::getChargingCurrent()); + } + else if (it->first == (PROP_CHARGING_POWER)) + { + it->second = std::to_string(GpioControl::getChargingPower()); + } + else if (it->first == (PROP_CHARGING_BUS_VOL) || it->first == (PROP_CHARGING_VOLTAGE)) + { + double val = -1; + char str[32] = { 0 }; + for (int idx = 0; idx < 3; idx++) + { + val = GpioControl::getChargingBusVoltage(); + if (val < 0) + { + continue; + } + snprintf(str, sizeof(str), "%.1f", (val / 1000.0)); + it->second = std::string(str); + break; + } + } + else if (it->first == (PROP_BATTERY_POWER)) + { + it->second = std::to_string(GpioControl::getBatteryPower()); + } + else if (it->first == (PROP_BATTERY_BUS_VOL) || it->first == (PROP_BATTERY_VOLTAGE)) + { + int val = QueryBatteryVoltage(DEFAULT_BATTERY_QUERY_RETRIES); // // BatVol + if (val > 0) + { + bv = val; + snprintf(value, sizeof(value), "%.1f", val / 1000.0); + it->second = std::string(value); + } + else + { +#ifdef _DEBUG + int aa = 0; +#endif + } + } + else if ((it->first == (PROP_SIGNAL_4G)) || (it->first == (PROP_SIGNAL_2G)) || (it->first == (PROP_SIGNAL_LEVEL))) + { + it->second = std::to_string(m_signalLevel); + } + /* + else if (startsWith(it->first, PROP_JAVA_PREFIX)) + { + if (powerInfo.empty()) + { + QueryPowerInfo(powerInfo); + } + auto it2 = powerInfo.find(it->first); + if (it2 != powerInfo.cend()) + { + it->second = it2->second; + } + } + */ + } + + std::map::iterator it = properties.find(PROP_BATTERY_CURRENT); + if (it != properties.end()) + { + if (bv == -1) + { + bv = QueryBatteryVoltage(DEFAULT_BATTERY_QUERY_RETRIES); + } + + if (bv > 0) + { + char str[32] = { 0 }; + float batteryCurrent = STANDARD_CURRENT_64V / ((float)bv / 1000.0f / STANDARD_VOLTAGE_64V); + snprintf(str, sizeof(str), "%d", (int)batteryCurrent); + it->second = std::string(str); + } + } + // __system_property_get("ro.telephony.default_network", value); + + return true; +} + +std::string CPhoneDevice::QueryCpuTemperature() +{ + // /sys/devices/virtual/thermal/thermal_zone0/temp + std::vector data; + // /sys/class/thermal/thermal zone*/temp + if (readFile("/sys/class/thermal/thermal_zone3/temp", data) && !data.empty()) + { + data.push_back(0); + int temp = atoi((const char*)(&data[0])); + return std::to_string((temp / 1000) + 20); + } + + return ""; +} + +void CPhoneDevice::QueryPowerInfo(std::map& powerInfo) +{ + JNIEnv* env = NULL; + jboolean ret = JNI_FALSE; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + jobject jobj = env->CallObjectMethod(m_javaService, mGetSystemInfoMid); + std::string str = jstring2string(env, (jstring)jobj); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + if (!str.empty()) + { + std::map queries = parseQuery(str); + powerInfo.swap(queries); + } +} + +bool CPhoneDevice::GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector& items) +{ + return false; +} + +bool CPhoneDevice::InstallAPP(const std::string& path, unsigned int delayedTime) +{ + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + + jstring jpath = env->NewStringUTF(path.c_str()); + env->CallBooleanMethod(m_javaService, mInstallAppMid, jpath, (jlong)delayedTime); + // env->ReleaseStringUTFChars(jpath, path.c_str()); + env->DeleteLocalRef(jpath); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + return true; +} + +bool CPhoneDevice::Reboot(int resetType) +{ + if (resetType == REBOOT_TYPE_DEVICE) + { + // reboot the device + std::thread t([]() + { + std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + GpioControl::reboot(); + }); + t.detach(); + } + else + { + long timeout = 1000; + RestartApp(resetType, timeout); + } + + return true; +} + +void CPhoneDevice::RestartApp(int resetType, long timeout, const std::string& reason) +{ + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + + jstring jreason = NULL; + if (!reason.empty()) + { + jreason = env->NewStringUTF(reason.c_str()); + } + env->CallVoidMethod(m_javaService, mRebootMid, resetType, timeout, jreason); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } +} + +bool CPhoneDevice::EnableGPS(bool enabled) +{ + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + return false; + } + jboolean jenabled = enabled ? JNI_TRUE : JNI_FALSE; + env->CallVoidMethod(m_javaService, mEnableGpsMid, jenabled); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + return true; +} + +float CPhoneDevice::QueryBattaryVoltage(int timesForAvg, bool* isCharging) +{ + if (timesForAvg <= 0) + { + return 0.0f; + } + + int val = 0; + int totalVals = 0; + float chargingBusVoltage = 0.0f; + for (int idx = 0; idx < timesForAvg; idx++) + { + val = GpioControl::getChargingBusVoltage(); + if (val > 1000) + { + chargingBusVoltage = (float)val / 1000.0; + break; + } + } + + if (isCharging != NULL) + { + *isCharging = chargingBusVoltage > DEFAULT_WARNING_CHARGING_BUS_VOL; + } + + int matched = 0; + for (int idx = 0; idx < timesForAvg; idx++) + { + val = GpioControl::getBatteryVoltage(); // // BatVol + if (val > 0) + { + totalVals += val > BATTARY_VOLTAGE_MAX ? BATTARY_VOLTAGE_MAX : val; + matched++; + } + } + + return (matched > 0) ? ((float)totalVals / 1000.0 / matched) : 0; +} + +bool CPhoneDevice::RequestPosition() +{ + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + return false; + } + + jboolean ret = env->CallBooleanMethod(m_javaService, mRequestPositionMid); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + return (ret == JNI_TRUE); +} + +void CPhoneDevice::handleSignal(int sig, siginfo_t *si, void *uc) +{ + TIMER_CONTEXT* context = (TIMER_CONTEXT*)(si->si_value.sival_ptr); + context->device->handleTimerImpl(context); +} + +bool CPhoneDevice::RegisterHandlerForSignal(int sig) +{ + return true; + // Establish handler for timer signal + struct sigaction sa; + sigset_t mask; + + sa.sa_flags = SA_SIGINFO; + sa.sa_sigaction = CPhoneDevice::handleSignal; + sigemptyset(&sa.sa_mask); + if (sigaction(sig, &sa, NULL) == -1) + { + return false; + } + + return true; + // Block timer signal temporarily + + // printf("Blocking signal %d\n", SIG); + sigemptyset(&mask); + sigaddset(&mask, sig); + if (sigprocmask(SIG_SETMASK, &mask, NULL) == -1) + { + return false; + } + + return true; +} + +void CPhoneDevice::handleTimer(union sigval v) +{ + TIMER_CONTEXT* context = (TIMER_CONTEXT*)(v.sival_ptr); + context->device->handleTimerImpl(context); +} + +void CPhoneDevice::handleTimerImpl(CPhoneDevice::TIMER_CONTEXT* context) +{ + context->times++; + if (context->expectedTimes == 0 || context->times <= context->expectedTimes) + { + if (m_listener != NULL) + { + m_listener->OnTimeout(context->uid, context->timerType, context->data, context->times); + } + } +} + +void CPhoneDevice::handleRebootTimer(union sigval v) +{ + CPhoneDevice* pDevice = (CPhoneDevice*)(v.sival_ptr); + // Reboot APP + XYLOG(XYLOG_SEVERITY_ERROR, "Camera Close Thread is DEAD, will RESTART app"); + pDevice->RestartApp(0, 2000, "Camera Can't Close"); +} + +// void CPhoneDevice::handleRebootTimerImpl() +// { +// } + +IDevice::timer_uid_t CPhoneDevice::RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, unsigned long times/* = 0*/) +{ + struct sigevent evp = { 0 }; + struct itimerspec ts = { 0 }; + timer_t timer; + int ret; + + TIMER_CONTEXT* context = new TIMER_CONTEXT(); + context->device = this; + context->data = data; + context->timerType = timerType; + context->expectedTimes = times; + context->times = 0; + context->uid = 0; + + evp.sigev_value.sival_ptr = context; + evp.sigev_notify = SIGEV_THREAD; //SIGEV_THREAD_ID; + evp.sigev_notify_function = CPhoneDevice::handleTimer; + // evp.sigev_notify_thread_id = gettid(); + // evp.sigev_notify = SIGEV_SIGNAL; + // evp.sigev_signo = SIGUSR2; + + ret = timer_create(CLOCK_REALTIME, &evp, &timer); + if( ret) + { + int err = errno; + delete context; + return INVALID_TIMER_UID; + } + + context->uid = (unsigned long)timer; + ts.it_value.tv_sec = (timeout / 1000); + ts.it_value.tv_nsec = (timeout % 1000) * 1000; + if (times != 1) + { + ts.it_interval.tv_sec = ts.it_value.tv_sec; + ts.it_interval.tv_nsec = ts.it_value.tv_nsec; + } + + ret = timer_settime(timer, 0, &ts, NULL); + if(ret) + { + timer_delete(timer); + delete context; + return INVALID_TIMER_UID; + } + + m_devLocker.lock(); + mTimers.insert(mTimers.end(), std::pair((IDevice::timer_uid_t)timer, context)); + m_devLocker.unlock(); + return (IDevice::timer_uid_t)timer; +} + +bool CPhoneDevice::UnregisterTimer(IDevice::timer_uid_t uid) +{ + timer_t timer = (timer_t)uid; + int res = timer_delete(timer); + + m_devLocker.lock(); + std::map::iterator it = mTimers.find(uid); + if (it != mTimers.end()) + { + delete it->second; + mTimers.erase(it); + m_devLocker.unlock(); + return true; + } + + m_devLocker.unlock(); + return false; +} + +unsigned long CPhoneDevice::RequestWakelock(unsigned long timeout) +{ + unsigned long wakelockId = m_wakelockIdFeed.fetch_add(1); + std::string name = WAKELOCK_NAME; + name += to_string(wakelockId); + + // ALOGI("RequestWakelock=%lld",wakelockId); + + jboolean ret = JNI_FALSE; + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + return 0; + } + jstring jname = env->NewStringUTF(name.c_str()); + jlong jtimeout = (jlong)timeout; + + env->CallVoidMethod(m_javaService, mRequestWakelockMid, jname, jtimeout); + // env->ReleaseStringUTFChars(jname, name.c_str()); + env->DeleteLocalRef(jname); + + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + return wakelockId; +} + +bool CPhoneDevice::ReleaseWakelock(unsigned long wakelock) +{ + // ALOGI("ReleaseWakelock=%lld", wakelock); + std::string name = WAKELOCK_NAME; + name += to_string(wakelock); + + jboolean ret = JNI_FALSE; + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + return false; + } + jstring jname = env->NewStringUTF(name.c_str()); + + env->CallVoidMethod(m_javaService, mReleaseWakelockMid, jname); + env->DeleteLocalRef(jname); + // env->ReleaseStringUTFChars(jname, name.c_str()); + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + return true; +} + +IDevice::timer_uid_t CPhoneDevice::RegisterHeartbeat(unsigned int timerType, unsigned int timeout, time_t tsForNextPhoto) +{ + mHeartbeatStartTime = time(NULL); + mHeartbeatDuration = timeout; + + IDevice::timer_uid_t uid = m_timerUidFeed.fetch_add(1); + + jboolean ret = JNI_FALSE; + JNIEnv* env = NULL; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + return 0; + } +#ifdef ALIGN_HB_TIMER_TO_PHOTO + env->CallVoidMethod(m_javaService, mRegisterHeartbeatMid, (jint)timeout, (jlong)tsForNextPhoto); +#else + env->CallVoidMethod(m_javaService, mRegisterHeartbeatMid, (jint)timeout, 0); +#endif + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + + return uid; +} + +bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector& osds, const std::string& path) +{ + if (photoInfo.width == 0 || photoInfo.height == 0) + { + XYLOG(XYLOG_SEVERITY_ERROR, "TP: Invalid Size: (%u-%u) PHOTOID=%u", (unsigned int)photoInfo.width, (unsigned int)photoInfo.height, photoInfo.photoId); + return false; + } + if (m_threadClose.joinable()) + { + XYLOG(XYLOG_SEVERITY_INFO, "TP: Wait Prev Thread CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + struct sigevent evp = { 0 }; + struct itimerspec ts = { 0 }; + timer_t timer; + int ret; + + evp.sigev_value.sival_ptr = this; + evp.sigev_notify = SIGEV_THREAD; //SIGEV_THREAD_ID; + evp.sigev_notify_function = CPhoneDevice::handleRebootTimer; + // evp.sigev_notify_thread_id = gettid(); + // evp.sigev_notify = SIGEV_SIGNAL; + // evp.sigev_signo = SIGUSR2; + + ret = timer_create(CLOCK_REALTIME, &evp, &timer); + if( ret == 0) + { + ts.it_value.tv_sec = 8; // 8 seconds + ts.it_value.tv_nsec = 0; + ret = timer_settime(timer, 0, &ts, NULL); + } + m_threadClose.join(); + timer_delete(timer); + XYLOG(XYLOG_SEVERITY_INFO, "TP: Wait Prev Thread End CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + } + + if (mCamera != NULL) + { + XYLOG(XYLOG_SEVERITY_INFO, "TP: mCamera ISNOT null CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + delete mCamera; + mCamera = NULL; + } + + XYLOG(XYLOG_SEVERITY_INFO, "TP: CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + mPhotoInfo = photoInfo; + mPath = path; + mOsds = osds; + + NdkCamera::CAMERA_PARAMS params; + memset(¶ms, 0, sizeof(params)); + + params.sceneMode = mPhotoInfo.sceneMode; + params.autoFocus = mPhotoInfo.autoFocus; + params.autoExposure = mPhotoInfo.autoExposure; + params.focusTimeout = mPhotoInfo.focusTimeout * 1000; + params.exposureTime = mPhotoInfo.exposureTime; + params.sensitivity = mPhotoInfo.sensitivity; + params.compensation = mPhotoInfo.compensation; + params.orientation = mPhotoInfo.orientation; + params.zoom = mPhotoInfo.zoom; + params.zoomRatio = mPhotoInfo.zoomRatio; + params.requestTemplate = mPhotoInfo.requestTemplate; + params.awbMode = mPhotoInfo.awbMode; + params.wait3ALocked = mPhotoInfo.wait3ALocked; + params.burstRawCapture = mPhotoInfo.usingRawFormat; + params.burstCaptures = mPhotoInfo.burstCaptures; + if (params.requestTemplate <= 0 || params.requestTemplate > 5) + { + params.requestTemplate = 2; + } + +#if 0 + if (photoInfo.ldrEnabled) + { + if (GpioControl::getLightAdc() > 1400) + { + params.autoExposure = 0; + params.exposureTime = 1200000000; + params.sensitivity = 1200; + } + } +#endif + + bool res = false; + + if (photoInfo.usbCamera) + { + TurnOnOtg(NULL); + } + TurnOnCameraPower(NULL); + + res = true; + if (mPhotoInfo.mediaType == 0/* && mPhotoInfo.usingRawFormat == 0*/) + { + mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params); + // mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params); + if (mCamera->open(to_string(mPhotoInfo.cameraId)) == 0) + { + XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Succeeded to OpenCamera CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + } + else + { + XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Failed to OpenCamera CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + delete mCamera; + mCamera = NULL; + res = false; + + TurnOffCameraPower(NULL); + if (photoInfo.usbCamera) + { + TurnOffOtg(NULL); + } + } + } + else + { + JNIEnv* env = NULL; + bool didAttachThread = false; + res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + return false; + } + + jstring leftTopOSD = NULL; + jstring rightTopOSD = NULL; + jstring rightBottomOSD = NULL; + jstring leftBottomOSD = NULL; + + for (vector::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it) + { + if (it->text.empty()) + { + continue; + } + switch (it->alignment) + { + case OSD_ALIGNMENT_TOP_LEFT: + leftTopOSD = env->NewStringUTF(it->text.c_str()); + break; + case OSD_ALIGNMENT_TOP_RIGHT: + rightTopOSD = env->NewStringUTF(it->text.c_str()); + break; + case OSD_ALIGNMENT_BOTTOM_RIGHT: + rightBottomOSD = env->NewStringUTF(it->text.c_str()); + break; + case OSD_ALIGNMENT_BOTTOM_LEFT: + leftBottomOSD = env->NewStringUTF(it->text.c_str()); + break; + } + } + + int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1) * 90; + jboolean photoOrVideo = mPhotoInfo.mediaType == 0 ? JNI_TRUE : JNI_FALSE; + env->CallVoidMethod(m_javaService, mStartRecordingMid, photoOrVideo, mPhotoInfo.cameraId, (unsigned long)mPhotoInfo.photoId, + mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height, mPhotoInfo.duration, orientation, + leftTopOSD, rightTopOSD, rightBottomOSD, leftBottomOSD); + + if (leftTopOSD) env->DeleteLocalRef(leftTopOSD); + if (rightTopOSD) env->DeleteLocalRef(rightTopOSD); + if (rightBottomOSD) env->DeleteLocalRef(rightBottomOSD); + if (leftBottomOSD) env->DeleteLocalRef(leftBottomOSD); + + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } + } + + return res; +} + +bool CPhoneDevice::CloseCamera() +{ + if (mCamera != NULL) + { + auto camera = mCamera; + mCamera = NULL; + + camera->close(); + delete camera; + } + return true; +} + +void CPhoneDevice::CloseCamera2(CPhoneDevice::CPhoneCamera* camera, unsigned int photoId, bool turnOffOtg) +{ + XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Start CloseCamera PHOTOID=%u", photoId); + // std::this_thread::sleep_for(std::chrono::milliseconds(16)); + if (camera != NULL) + { + camera->close(); + delete camera; + } + + XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Will Turn Off Power PHOTOID=%u", photoId); + if (turnOffOtg) + { + TurnOffOtg(NULL); + } + TurnOffCameraPower(NULL); + XYLOG(XYLOG_SEVERITY_DEBUG, "TP: End Turn Off Power PHOTOID=%u", photoId); + + XYLOG(XYLOG_SEVERITY_DEBUG, "TP: CloseCamera PHOTOID=%u", photoId); + +} + +void visualize(const char* filename, const ncnn::Mat& m) +{ + cv::Mat a(m.h, m.w, CV_8UC3); + m.to_pixels(a.data, ncnn::Mat::PIXEL_BGR2RGB); + + cv::imwrite(filename, a); +} + +void DrawOutlineText(cv::Ptr ft2, cv::Mat& mat, const std::string& str, cv::Point startPoint, int fontSize, cv::Scalar clr, int thickness) +{ + std::vector lines = split(str, "\n"); + int lineHeight = 0; + cv::Point pt = startPoint; + cv::Size textSize; + int baseline = 0; + + for (std::vector::const_iterator it = lines.cbegin(); it != lines.cend(); ++it ) + { + textSize = ft2->getTextSize(*it, fontSize, thickness, &baseline); + lineHeight = std::max(fontSize, textSize.height + baseline); + + ft2->putText(mat, *it, pt, fontSize, clr, thickness, cv::LINE_AA, false, true); + + pt.x = startPoint.x; + pt.y += lineHeight + (lineHeight >> 2); // 125% + } +} + +bool CPhoneDevice::onOneCapture(std::shared_ptr characteristics, + std::shared_ptr result, + uint32_t ldr, cv::Mat rgb) +{ + time_t takingTime = time(NULL); + if (mPhotoInfo.remedy != 0) + { + if ((takingTime - mPhotoInfo.scheduleTime) > 30) + { + takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + } + } + mPhotoInfo.photoTime = takingTime; + + vector osds; + osds.swap(mOsds); + PHOTO_INFO photoInfo = mPhotoInfo; + std::string path; + path.swap(mPath); + + // std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); + + acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; + ACameraMetadata_const_entry e = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e); + if (status == ACAMERA_OK) + { + facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; + } + + int sensorOrientation = 0; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e); + if (status == ACAMERA_OK) + { + sensorOrientation = (int)e.data.i32[0]; + } + } + + bool turnOffOtg = (photoInfo.usbCamera != 0); + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + media_status_t mstatus; + + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + + CPhoneDevice* pThis = this; + std::thread th([pThis, characteristics, result, photoInfo, osds, path, rgb, facing, sensorOrientation, ldr, takingTime]()mutable + { + std::string cameraInfo; + if (photoInfo.outputDbgInfo != 0) + { + NdkCamera::CAPTURE_RESULT captureResult = { 0 }; + NdkCamera::EnumCameraResult(result.get(), captureResult); + + char extimeunit[4] = { 0 }; + unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); + strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); + char str[128] = { 0 }; + snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", + captureResult.autoExposure, captureResult.autoFocus, + extime, extimeunit, captureResult.compensation, captureResult.sensitivity, + // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, + (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, + captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, + (uint32_t)captureResult.duration, captureResult.frameDuration); + cameraInfo = str; + } + +#ifdef OUTPUT_CAMERA_DBG_INFO +#if 0 + bool shouldRetry = false; + if (ldr != ~0) + { + if (ldr < MIN_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + + // photoInfo.usingRawFormat = 1; + } + } + else if (ldr > MAX_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } + + photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); + } + } +#endif // 0 +#endif // OUTPUT_CAMERA_DBG_INFO + + // Notify to take next photo + pThis->TakePhotoCb(1, photoInfo, "", takingTime); + + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } + }); + + th.detach(); + + return true; +} + +bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics, + std::vector >& results, + uint32_t ldr, std::vector >& frames) +{ + time_t takingTime = time(NULL); + if (mPhotoInfo.remedy != 0) + { + if ((takingTime - mPhotoInfo.scheduleTime) > 30) + { + takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + } + } + mPhotoInfo.photoTime = takingTime; + + vector osds; + osds.swap(mOsds); + PHOTO_INFO photoInfo = mPhotoInfo; + std::string path; + path.swap(mPath); + + // std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); + std::shared_ptr pByteArrays = std::make_shared(); + pByteArrays.get()->byteArrays.swap(frames); + + bool turnOffOtg = (photoInfo.usbCamera != 0); + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + + CPhoneDevice* pThis = this; + std::thread th([pThis, characteristics, results, photoInfo, osds, path, pByteArrays, ldr, takingTime]()mutable + { + cv::Mat rgb; + std::string cameraInfo; + media_status_t mstatus; + + acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; + ACameraMetadata_const_entry e = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e); + if (status == ACAMERA_OK) + { + facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; + } + + int sensorOrientation = 0; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e); + if (status == ACAMERA_OK) + { + sensorOrientation = (int)e.data.i32[0]; + } + } + + if (photoInfo.outputDbgInfo != 0) + { + if (!results.empty()) + { + NdkCamera::CAPTURE_RESULT captureResult = { 0 }; + NdkCamera::EnumCameraResult(results[0].get(), captureResult); + + char extimeunit[4] = { 0 }; + unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); + strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); + char str[128] = { 0 }; + snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", + captureResult.autoExposure, captureResult.autoFocus, + extime, extimeunit, captureResult.compensation, captureResult.sensitivity, + // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, + (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, + captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, + (uint32_t)captureResult.duration, captureResult.frameDuration); + cameraInfo = str; + } + } + +#ifdef OUTPUT_CAMERA_DBG_INFO +#if 0 + bool shouldRetry = false; + if (ldr != ~0) + { + if (ldr < MIN_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + + // photoInfo.usingRawFormat = 1; + } + } + else if (ldr > MAX_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } + + photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); + } + } +#endif // 0 +#endif // OUTPUT_CAMERA_DBG_INFO + + // Notify to take next photo + pThis->TakePhotoCb(1, photoInfo, "", takingTime); + + XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + hdrplus::hdrplus_pipeline pipeline; + std::vector > localFrames; + localFrames.swap(pByteArrays.get()->byteArrays); + pipeline.run_pipeline(localFrames, 0, rgb); + localFrames.clear(); + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + + { + cv::Mat tempPic = convert16bit2_8bit_(rgb); + rgb = tempPic; + } + + if (photoInfo.orientation > 0) + { + if (photoInfo.orientation == 1) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + cv::flip(rgb, rgb, 1); + } + } + else if (photoInfo.orientation == 2) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 1); + } + else if (photoInfo.orientation == 3) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + flip(rgb, rgb, 0); + } + else + { + cv::flip(rgb, rgb, -1); + } + } + else if (photoInfo.orientation == 4) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 0); + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + } + cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR); + + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } + }); + + th.detach(); + + return true; +} + +bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics, + std::vector >& results, + uint32_t ldr, std::vector >& frames) +{ + time_t takingTime = time(NULL); + if (mPhotoInfo.remedy != 0) + { + if ((takingTime - mPhotoInfo.scheduleTime) > 30) + { + takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + } + } + mPhotoInfo.photoTime = takingTime; + + vector osds; + osds.swap(mOsds); + PHOTO_INFO photoInfo = mPhotoInfo; + std::string path; + path.swap(mPath); + + // std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId); + + acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; + ACameraMetadata_const_entry e = { 0 }; + camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e); + if (status == ACAMERA_OK) + { + facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; + } + + int sensorOrientation = 0; + { + ACameraMetadata_const_entry e = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e); + if (status == ACAMERA_OK) + { + sensorOrientation = (int)e.data.i32[0]; + } + } + + bool turnOffOtg = (photoInfo.usbCamera != 0); + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + cv::Mat rgb; + std::vector > rawFiles; + media_status_t mstatus; + + if (photoInfo.usingRawFormat != 0) + { + for (int idx = 0; idx < frames.size(); idx++) + { + std::shared_ptr spImage = frames[idx]; + std::shared_ptr spResult = results[idx]; + + hdrplus::MemFile* rawImage = new hdrplus::MemFile(); + rawFiles.push_back(std::shared_ptr(rawImage)); + // rawImage->FromAImage(spImage.get(), characteristics.get(), spResult.get()); + + int32_t width = 0; + int32_t height = 0; + mstatus = AImage_getWidth(spImage.get(), &width); + mstatus = AImage_getHeight(spImage.get(), &height); + + int32_t planeCount = 0; + mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount); + AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount); + + uint8_t *planeData = NULL; + int planeDataLen = 0; + mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen); + ALOGD("Start Converting Dng"); + DngCreator dngCreator(characteristics.get(), spResult.get()); + dngCreator.writeInputBuffer(rawImage->content, planeData, planeDataLen, width, height, 0); + ALOGD("End Converting Dng"); + } + } + else + { + if (results.size() == 1 && frames.size() == 1) + { + std::shared_ptr result = results[0]; + std::shared_ptr frame = frames[0]; + + int32_t format; + mstatus = AImage_getFormat(frame.get(), &format); + + if (format == AIMAGE_FORMAT_YUV_420_888) + { + int32_t width; + int32_t height; + mstatus = AImage_getWidth(frame.get(), &width); + mstatus = AImage_getHeight(frame.get(), &height); + + int32_t y_pixelStride = 0; + int32_t u_pixelStride = 0; + int32_t v_pixelStride = 0; + AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride); + AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride); + AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride); + + int32_t y_rowStride = 0; + int32_t u_rowStride = 0; + int32_t v_rowStride = 0; + AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride); + AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride); + AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride); + + uint8_t* y_data = 0; + uint8_t* u_data = 0; + uint8_t* v_data = 0; + int y_len = 0; + int u_len = 0; + int v_len = 0; + AImage_getPlaneData(frame.get(), 0, &y_data, &y_len); + AImage_getPlaneData(frame.get(), 1, &u_data, &u_len); + AImage_getPlaneData(frame.get(), 2, &v_data, &v_len); + + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) + { + // already nv21 + ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + } + else + { + // construct nv21 + uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; + { + // Y + uint8_t* yptr = nv21; + for (int y = 0; y < height; y++) + { + const uint8_t* y_data_ptr = y_data + y_rowStride * y; + for (int x = 0; x < width; x++) + { + yptr[0] = y_data_ptr[0]; + yptr++; + y_data_ptr += y_pixelStride; + } + } + + // UV + uint8_t* uvptr = nv21 + width * height; + for (int y = 0; y < height / 2; y++) + { + const uint8_t* v_data_ptr = v_data + v_rowStride * y; + const uint8_t* u_data_ptr = u_data + u_rowStride * y; + for (int x = 0; x < width / 2; x++) + { + uvptr[0] = v_data_ptr[0]; + uvptr[1] = u_data_ptr[0]; + uvptr += 2; + v_data_ptr += v_pixelStride; + u_data_ptr += u_pixelStride; + } + } + } + + ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb); + + delete[] nv21; + } + } + } + } + + frames.clear(); + + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + + CPhoneDevice* pThis = this; + std::thread th([pThis, characteristics, results, photoInfo, osds, path, rgb, rawFiles, facing, sensorOrientation, ldr, takingTime]()mutable + { + std::string cameraInfo; + if (photoInfo.outputDbgInfo != 0) + { + if (!results.empty()) + { + NdkCamera::CAPTURE_RESULT captureResult = { 0 }; + NdkCamera::EnumCameraResult(results[0].get(), captureResult); + + char extimeunit[4] = { 0 }; + unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); + strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); + char str[128] = { 0 }; + snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", + captureResult.autoExposure, captureResult.autoFocus, + extime, extimeunit, captureResult.compensation, captureResult.sensitivity, + // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, + (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, + captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, + (uint32_t)captureResult.duration, captureResult.frameDuration); + cameraInfo = str; + } + } + +#ifdef OUTPUT_CAMERA_DBG_INFO +#if 0 + bool shouldRetry = false; + if (ldr != ~0) + { + if (ldr < MIN_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + + // photoInfo.usingRawFormat = 1; + } + } + else if (ldr > MAX_LIGHT_Y) + { + if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } + + photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); + } + } +#endif // 0 +#endif // OUTPUT_CAMERA_DBG_INFO + + // Notify to take next photo + pThis->TakePhotoCb(1, photoInfo, "", takingTime); + + if (photoInfo.usingRawFormat != 0) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + hdrplus::hdrplus_pipeline pipeline; + pipeline.run_pipeline(rawFiles, 0, rgb); + rawFiles.clear(); + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + + { + cv::Mat tempPic = convert16bit2_8bit_(rgb); + rgb = tempPic; + } + + if (photoInfo.orientation > 0) + { + if (photoInfo.orientation == 1) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + cv::flip(rgb, rgb, 1); + } + } else if (photoInfo.orientation == 2) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 1); + } + else if (photoInfo.orientation == 3) + { + if (facing == ACAMERA_LENS_FACING_FRONT) + { + flip(rgb, rgb, 0); + } + else + { + cv::flip(rgb, rgb, -1); + } + } + else if (photoInfo.orientation == 4) + { + cv::Mat tempPic; + cv::transpose(rgb, tempPic); + cv::flip(tempPic, rgb, 0); + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + } + cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR); + } + + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } + }); + + th.detach(); + + return true; +} + +bool CPhoneDevice::OnImageReady(cv::Mat& mat) +{ + time_t takingTime = time(NULL); + if (mPhotoInfo.remedy != 0) + { + if ((takingTime - mPhotoInfo.scheduleTime) > 30) + { + takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + } + } + mPhotoInfo.photoTime = takingTime; + int baseline = 0; + cv::Size textSize; + double height = mat.size().height; + double width = mat.size().width; + // double ratio = std::min(height / 1024, width / 1920); + double ratio = height / 1024.0; + int thickness = round(1.4 * ratio); + if (thickness < 1) thickness = 1; + else if (thickness > 5) thickness = 5; + cv::Scalar scalarWhite(255, 255, 255); // white + int fontSize = (int)(28.0 * ratio); + cv::Point pt; + + std::string fontPath; + if (existsFile("/system/fonts/NotoSansCJK-Regular.ttc")) + { + fontPath = "/system/fonts/NotoSansCJK-Regular.ttc"; + } + else if (existsFile("/system/fonts/NotoSerifCJK-Regular.ttc")) + { + fontPath = "/system/fonts/NotoSerifCJK-Regular.ttc"; + } + else + { + fontPath = m_appPath+ "fonts/Noto.otf"; + } + cv::Ptr ft2; + ft2 = cv::ft::createFreeType2(); + ft2->loadFontData(fontPath.c_str(), 0); + // cv::Rect rc(0, 0, mat.cols, mat.rows); + // cv::rectangle (mat, rc, cv::Scalar(255, 255, 255), cv::FILLED); + std::vector objs; + + if ((m_pRecognizationCfg != NULL) && (m_pRecognizationCfg->enabled != 0) && (mPhotoInfo.recognization != 0)) + { + XYLOG(XYLOG_SEVERITY_INFO, "Channel AI Enabled"); + + // visualize(ncnnPath.c_str(), in); +#ifdef _DEBUG + double startTime = ncnn::get_current_time(); +#endif // _DEBUG + + bool detected = YoloV5NcnnDetect(mat, true, m_pRecognizationCfg->blobName8, m_pRecognizationCfg->blobName16, m_pRecognizationCfg->blobName32, objs); +#ifdef _DEBUG + double elasped = ncnn::get_current_time() - startTime; + // __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "%.2fms detect", elasped); +#endif // _DEBUG +#ifdef _DEBUG + ALOGI( "NCNN recognization: %.2fms res=%d", elasped, ((detected && !objs.empty()) ? 1 : 0)); +#endif + if (detected && !objs.empty()) + { +#if 0 + static const char* class_names[] = { + "person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", + "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", + "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", + "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", + "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", + "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", + "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", + "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", + "hair drier", "toothbrush" + }; +#endif + + cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16); + cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16); + float minSizeW = m_pRecognizationCfg->minSize > 0 ? (mPhotoInfo.width * m_pRecognizationCfg->minSize / 100) : 0; + float minSizeH = m_pRecognizationCfg->minSize > 0 ? (mPhotoInfo.height * m_pRecognizationCfg->minSize / 100) : 0; + + for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend();) + { + if (it->label >= m_pRecognizationCfg->items.size()) + { + it = objs.erase(it); + continue; + } + + const IDevice::CFG_RECOGNIZATION::ITEM& item = m_pRecognizationCfg->items[it->label]; + if (item.enabled == 0 || it->prob < item.prob) + { + it = objs.erase(it); + continue; + } + + if (m_pRecognizationCfg->minSize > 0) + { + if (it->w < minSizeW || it->h < minSizeH) + { + it = objs.erase(it); + continue; + } + } + + if ((mPhotoInfo.recognization & 0x2) != 0) + { + cv::Rect rc(it->x, it->y, it->w, it->h); + cv::rectangle(mat, rc, borderColor, m_pRecognizationCfg->thickness); + textSize = ft2->getTextSize(item.name, fontSize, thickness, &baseline); + textSize.height += baseline; + if (it->y > textSize.height) + { + pt.y = it->y - textSize.height - 4 - m_pRecognizationCfg->thickness; + } + else if (mat.rows - it->y - it->h > textSize.height) + { + pt.y = it->y + it->h + 4 + m_pRecognizationCfg->thickness; + } + else + { + // Inner + pt.y = it->y + 4 + m_pRecognizationCfg->thickness; + } + if (mat.cols - it->x > textSize.width) + { + pt.x = it->x; + } + else + { + pt.x = it->x + it->w - textSize.width; + } + +#ifdef OUTPUT_CAMERA_DBG_INFO + char buf[128]; + snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)", + it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height); + XYLOG(XYLOG_SEVERITY_DEBUG, buf); +#endif + ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true); + } + ++it; + } + } + } + else + { + XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled"); + } + +// #ifdef OUTPUT_CAMERA_DBG_INFO + + if (mCamera != NULL) + { + + if (mPhotoInfo.outputDbgInfo != 0) + { + cv::Scalar scalarRed(0, 0, 255); // red + + char extimeunit[4] = { 0 }; + char str[128] = { 0 }; + + int fs = fontSize * 2 / 3; + textSize = ft2->getTextSize(str, fs, -1, &baseline); + cv::Point lt(0, mat.rows - fs - 20 * ratio); + cv::Point lt2(0, lt.y - 2 * ratio); + cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio); + + if (rb.x > (int)width - 1) + { + rb.x = (int)width - 1; + } + if (rb.y > (int)height - 1) + { + rb.y = (int)height - 1; + } + cv::Mat roi = mat(cv::Rect(lt2, rb)); + cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite); + double alpha = 0.5; + cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi); + + // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1); + ft2->putText(mat, str, lt, fs, scalarRed, -1, cv::LINE_AA, false); + // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1); + } + } +// #endif // OUTPUT_CAMERA_DBG_INFO + + for (vector::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it) + { + if (it->text.empty()) + { + continue; + } + +#ifdef _DEBUG + if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT) + { + int aa = 0; + } +#endif + + textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline); + XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline); + + if (it->alignment == OSD_ALIGNMENT_TOP_LEFT) + { + pt.x = it->x * ratio; + pt.y = it->y * ratio; + } + else if (it->alignment == OSD_ALIGNMENT_TOP_RIGHT) + { + pt.x = width - textSize.width - it->x * ratio; + pt.y= it->y * ratio; + } + else if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT) + { + pt.x = width - textSize.width - it->x * ratio; + pt.y = height - it->y * ratio - textSize.height - baseline; + } + else if (it->alignment == OSD_ALIGNMENT_BOTTOM_LEFT) + { + pt.x = it->x * ratio; + pt.y = height - it->y * ratio - textSize.height - baseline; + } + + // cv::Rect rc(pt.x, pt.y, textSize.width, textSize.height); + // cv::rectangle(mat, rc, cv::Scalar(0,255,255), 2); + DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness); + } + + std::vector params; + params.push_back(cv::IMWRITE_JPEG_QUALITY); + params.push_back((int)((uint32_t)mPhotoInfo.quality)); + + bool res = false; + std::string fullPath = endsWith(mPath, ".jpg") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo)); + +#ifdef OUTPUT_CAMERA_DBG_INFO + + bool shouldRetry = false; +#if 0 + if (mCamera != NULL) { + NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); + + if (captureResult.avgY < MIN_LIGHT_Y) + { + if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", mPhotoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + + // mPhotoInfo.usingRawFormat = 1; + } + } + else if (captureResult.avgY > MAX_LIGHT_Y) + { + if (mPhotoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1)) + { + shouldRetry = true; + char presetBuf[16] = {0}; + snprintf(presetBuf, sizeof(presetBuf), "%02X", mPhotoInfo.retries); + // replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(mPhotoInfo.retries) + ".jpg"); + replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_")); + XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it", + (uint32_t) captureResult.avgY); + } + + mPhotoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY)); + } + } +#endif + +#endif // OUTPUT_CAMERA_DBG_INFO + + if (!std::filesystem::exists(std::filesystem::path(fullPath))) + { + bool res = cv::imwrite(fullPath.c_str(), mat, params); + if (!res) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to write photo: %s", fullPath.c_str()); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to write photo: %s", fullPath.c_str()); + } +#ifdef OUTPUT_CAMERA_DBG_INFO + if (shouldRetry) + { + TakePhotoCb(0, mPhotoInfo, fullPath, takingTime, objs); + } + else + { + TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs); + } +#else + TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs); +#endif + } + else + { + ALOGI("Photo file exists: %s", mPath.c_str()); + } + + return res; +} + +bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat) +{ + int baseline = 0; + cv::Size textSize; + double height = mat.rows; + double width = mat.cols; + // double ratio = std::min(height / 1024, width / 1920); + double ratio = height / 1024.0; + int thickness = round(1.4 * ratio); + if (thickness < 1) thickness = 1; + else if (thickness > 5) thickness = 5; + cv::Scalar scalarWhite(255, 255, 255); // white + int fontSize = (int)(28.0 * ratio); + cv::Point pt; + + std::string fontPath; + if (existsFile("/system/fonts/NotoSansCJK-Regular.ttc")) + { + fontPath = "/system/fonts/NotoSansCJK-Regular.ttc"; + } + else if (existsFile("/system/fonts/NotoSerifCJK-Regular.ttc")) + { + fontPath = "/system/fonts/NotoSerifCJK-Regular.ttc"; + } + else + { + fontPath = m_appPath+ "fonts/Noto.otf"; + } + cv::Ptr ft2; + ft2 = cv::ft::createFreeType2(); + ft2->loadFontData(fontPath.c_str(), 0); + // cv::Rect rc(0, 0, mat.cols, mat.rows); + // cv::rectangle (mat, rc, cv::Scalar(255, 255, 255), cv::FILLED); + std::vector objs; + + if ((m_pRecognizationCfg != NULL) && (m_pRecognizationCfg->enabled != 0) && (photoInfo.recognization != 0)) + { + XYLOG(XYLOG_SEVERITY_INFO, "Channel AI Enabled"); + + // visualize(ncnnPath.c_str(), in); +#ifdef _DEBUG + double startTime = ncnn::get_current_time(); +#endif // _DEBUG + + bool detected = YoloV5NcnnDetect(mat, true, m_pRecognizationCfg->blobName8, m_pRecognizationCfg->blobName16, m_pRecognizationCfg->blobName32, objs); +#ifdef _DEBUG + double elasped = ncnn::get_current_time() - startTime; + // __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "%.2fms detect", elasped); +#endif // _DEBUG +#ifdef _DEBUG + ALOGI( "NCNN recognization: %.2fms res=%d", elasped, ((detected && !objs.empty()) ? 1 : 0)); +#endif + if (detected && !objs.empty()) + { + cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16); + cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16); + float minSizeW = m_pRecognizationCfg->minSize > 0 ? (photoInfo.width * m_pRecognizationCfg->minSize / 100) : 0; + float minSizeH = m_pRecognizationCfg->minSize > 0 ? (photoInfo.height * m_pRecognizationCfg->minSize / 100) : 0; + + for (std::vector::const_iterator it = objs.cbegin(); it != objs.cend();) + { + if (it->label >= m_pRecognizationCfg->items.size()) + { + it = objs.erase(it); + continue; + } + + const IDevice::CFG_RECOGNIZATION::ITEM& item = m_pRecognizationCfg->items[it->label]; + if (item.enabled == 0 || it->prob < item.prob) + { + it = objs.erase(it); + continue; + } + + if (m_pRecognizationCfg->minSize > 0) + { + if (it->w < minSizeW || it->h < minSizeH) + { + it = objs.erase(it); + continue; + } + } + + if ((photoInfo.recognization & 0x2) != 0) + { + cv::Rect rc(it->x, it->y, it->w, it->h); + cv::rectangle(mat, rc, borderColor, m_pRecognizationCfg->thickness); + textSize = ft2->getTextSize(item.name, fontSize, thickness, &baseline); + textSize.height += baseline; + if (it->y > textSize.height) + { + pt.y = it->y - textSize.height - 4 - m_pRecognizationCfg->thickness; + } + else if (mat.rows - it->y - it->h > textSize.height) + { + pt.y = it->y + it->h + 4 + m_pRecognizationCfg->thickness; + } + else + { + // Inner + pt.y = it->y + 4 + m_pRecognizationCfg->thickness; + } + if (mat.cols - it->x > textSize.width) + { + pt.x = it->x; + } + else + { + pt.x = it->x + it->w - textSize.width; + } + +#ifdef OUTPUT_CAMERA_DBG_INFO + char buf[128]; + snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)", + it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height); + XYLOG(XYLOG_SEVERITY_DEBUG, buf); +#endif + ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true); + } + ++it; + } + } + } + else + { + XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled"); + } + +// #ifdef OUTPUT_CAMERA_DBG_INFO + + if (!cameraInfo.empty()) + { + // NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); + + if (photoInfo.outputDbgInfo != 0) + { + cv::Scalar scalarRed(0, 0, 255); // red + + int fs = fontSize * 2 / 3; + textSize = ft2->getTextSize(cameraInfo, fs, -1, &baseline); + cv::Point lt(0, mat.rows - fs - 20 * ratio); + cv::Point lt2(0, lt.y - 2 * ratio); + cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio); + + if (rb.x > (int)width - 1) + { + rb.x = (int)width - 1; + } + if (rb.y > (int)height - 1) + { + rb.y = (int)height - 1; + } + cv::Mat roi = mat(cv::Rect(lt2, rb)); + cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite); + double alpha = 0.5; + cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi); + + // cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1); + ft2->putText(mat, cameraInfo, lt, fs, scalarRed, -1, cv::LINE_AA, false); + + // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1); + } + } +// #endif // OUTPUT_CAMERA_DBG_INFO + + for (vector::const_iterator it = osds.cbegin(); it != osds.cend(); ++it) + { + if (it->text.empty()) + { + continue; + } + +#ifdef _DEBUG + if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT) + { + int aa = 0; + } +#endif + + textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline); + XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline); + + if (it->alignment == OSD_ALIGNMENT_TOP_LEFT) + { + pt.x = it->x * ratio; + pt.y = it->y * ratio; + } + else if (it->alignment == OSD_ALIGNMENT_TOP_RIGHT) + { + pt.x = width - textSize.width - it->x * ratio; + pt.y= it->y * ratio; + } + else if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT) + { + pt.x = width - textSize.width - it->x * ratio; + pt.y = height - it->y * ratio - textSize.height - baseline; + } + else if (it->alignment == OSD_ALIGNMENT_BOTTOM_LEFT) + { + pt.x = it->x * ratio; + pt.y = height - it->y * ratio - textSize.height - baseline; + } + + // cv::Rect rc(pt.x, pt.y, textSize.width, textSize.height); + // cv::rectangle(mat, rc, cv::Scalar(0,255,255), 2); + DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness); + } + + std::vector params; + params.push_back(cv::IMWRITE_JPEG_QUALITY); + params.push_back((int)((uint32_t)photoInfo.quality)); + + bool res = false; + std::string fullPath = endsWith(path, ".jpg") ? path : (path + CTerminal::BuildPhotoFileName(photoInfo)); + + if (!std::filesystem::exists(std::filesystem::path(fullPath))) + { +#ifdef _DEBUG + char log[256] = { 0 }; + strcpy(log, fullPath.c_str()); +#endif + bool res = cv::imwrite(fullPath.c_str(), mat, params); + if (!res) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s", fullPath.c_str() + m_appPath.size()); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Write File: %s", fullPath.c_str() + m_appPath.size()); + } + TakePhotoCb(res ? 2 : 0, photoInfo, fullPath, photoInfo.photoTime, objs); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "Photo File Exists: %s", fullPath.c_str() + m_appPath.size()); + } + + return res; +} + +bool CPhoneDevice::OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, unsigned int photoId) +{ + XYLOG(XYLOG_SEVERITY_INFO, "RAW Capture finished: %u RES=%d", photoId, (result ? 1 : 0)); + if (photoOrVideo) + { + if (result) + { + OnImageReady(mat); + } + else + { + std::vector objs; + TakePhotoCb(0, mPhotoInfo, "", time(NULL), objs); + + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + bool turnOffOtg = (mPhotoInfo.usbCamera != 0); + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + if (closeThread.joinable()) + { + closeThread.detach(); + } + } + } + + return true; +} + +bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId) +{ + if (photoOrVideo) + { + } + else + { + mPhotoInfo.photoTime = time(NULL); + CPhoneCamera* pCamera = NULL; + + std::vector objs; + std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo); + if (result) + { + std::rename(path, fullPath.c_str()); + } + TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs); + + bool turnOffOtg = (mPhotoInfo.usbCamera != 0); + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); + m_threadClose.swap(closeThread); + } + + return result; +} + +void CPhoneDevice::onError(const std::string& msg) +{ + if (mCamera == NULL) + { + int aa = 0; + return; + } + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Take Photo (IMGID=%u): %s", mPhotoInfo.photoId, msg.c_str()); + + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + TakePhotoCb(0, mPhotoInfo, mPath, 0); + + bool turnOffOtg = (mPhotoInfo.usbCamera != 0); + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); + // closeThread.detach(); + m_threadClose.swap(closeThread); +} + +void CPhoneDevice::onDisconnected(ACameraDevice* device) +{ + if (mCamera == NULL) + { + return; + } + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Take Photo (IMGID=%u) as for Disconnection", mPhotoInfo.photoId); + + CPhoneCamera* pCamera = mCamera; + mCamera = NULL; + + TakePhotoCb(0, mPhotoInfo, mPath, 0); + + bool turnOffOtg = (mPhotoInfo.usbCamera != 0); + std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); + // closeThread.detach(); + m_threadClose.swap(closeThread); +} + +std::string CPhoneDevice::GetFileName() const +{ + return mPath; +} + +std::string CPhoneDevice::GetVersion() const +{ + // FOR OSD + string version = std::to_string(mVersionCode / 100000); + version += "."; + version += std::to_string((mVersionCode % 100000) / 1000); + version += "."; + version += std::to_string(mVersionCode % 1000); + + return version; +} + +void CPhoneDevice::UpdatePosition(double lon, double lat, double radius, time_t ts) +{ + if (m_listener != NULL) + { + if (shouldConvertPosition(lat, lon)) + { + transformPosition(lat, lon); + } + return m_listener->OnPositionDataArrived(lon, lat, radius, ts); + } +} + +void CPhoneDevice::TurnOnCameraPower(JNIEnv* env) +{ + m_powerLocker.lock(); + if (mCameraPowerCount == 0) + { + GpioControl::setCam3V3Enable(true); + } + mCameraPowerCount++; + m_powerLocker.unlock(); +} + +void CPhoneDevice::TurnOffCameraPower(JNIEnv* env) +{ + m_powerLocker.lock(); + if (mCameraPowerCount > 0) + { + mCameraPowerCount--; + if (mCameraPowerCount == 0) + { + GpioControl::setCam3V3Enable(false); + } + } + m_powerLocker.unlock(); +} + +void CPhoneDevice::TurnOnOtg(JNIEnv* env) +{ + m_powerLocker.lock(); + if (mOtgCount == 0) + { + ALOGD("setOtgState 1"); + GpioControl::setOtgState(true); + } + mOtgCount++; + m_powerLocker.unlock(); +} + +void CPhoneDevice::TurnOffOtg(JNIEnv* env) +{ + m_powerLocker.lock(); + if (mOtgCount > 0) + { + mOtgCount--; + if (mOtgCount == 0) + { + ALOGD("setOtgState 0"); + GpioControl::setOtgState(false); + } + } + m_powerLocker.unlock(); +} + +void CPhoneDevice::UpdateSignalLevel(int signalLevel) +{ + m_signalLevel = signalLevel; + m_signalLevelUpdateTime = time(NULL); +} + +void CPhoneDevice::UpdateSimcard(const std::string& simcard) +{ + m_simcard = simcard; +} + +bool CPhoneDevice::ProcessRawCapture(bool result, int numberOfCaptures, const std::string& pathsJoinedByTab, bool frontCamera, int rotation, long photoId) +{ + std::vector paths = split(pathsJoinedByTab, "\t"); + + if (paths.empty()) + { + cv::Mat mat; + OnCaptureReady(true, false, mat, (unsigned long)photoId); + return false; + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Start Processing Raw Capture CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + + hdrplus::hdrplus_pipeline pipeline; + cv::Mat mat; + pipeline.run_pipeline(paths, 0, mat); + XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + + mat = convert16bit2_8bit_(mat.clone()); + + if (rotation >= 0) + { + if (rotation == 90) + { + cv::Mat tempPic; + cv::transpose(mat, tempPic); + cv::flip(tempPic, mat, 1); + } + else if (rotation == 180) + { + if (frontCamera) + { + flip(mat, mat, 0); + + } + else + { + cv::flip(mat, mat, -1); + } + } + else if (rotation == 270) + { + cv::Mat tempPic; + cv::transpose(mat, tempPic); + cv::flip(tempPic, mat, 0); + } + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + } + cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR); + + XYLOG(XYLOG_SEVERITY_ERROR, "Finish Processing Raw Capture CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId); + +#ifdef _DEBUG + // cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR); + // cv::imwrite("/sdcard/com.xypower.mpapp/tmp/final.jpg", mat); +#endif + + OnCaptureReady(true, result != JNI_FALSE, mat, (unsigned long)photoId); + return true; +} + +int CPhoneDevice::GetIceData(IDevice::ICE_INFO *iceInfo, IDevice::ICE_TAIL *iceTail, SENSOR_PARAM sensorParam) +{ + Collect_sensor_data(); //15s + Data_DEF airt; + //++等值覆冰厚度, 综合悬挂载荷, 不均衡张力差 置0 + iceInfo->equal_icethickness = 0; + iceInfo->tension = 0; + iceInfo->tension_difference = 0; + + GetPullValue(2, &airt); + iceInfo->t_sensor_data[0].original_tension = airt.EuValue; + GetPullValue(4, &airt); + iceInfo->t_sensor_data[1].original_tension = airt.EuValue; + GetPullValue(5, &airt); + iceInfo->t_sensor_data[2].original_tension = airt.EuValue; + + + GetAngleValue(6, &airt, 0); + iceInfo->t_sensor_data[0].deflection_angle = airt.EuValue; + GetAngleValue(6, &airt, 1); + iceInfo->t_sensor_data[0].windage_yaw_angle = airt.EuValue; + GetAngleValue(7, &airt, 0); + iceInfo->t_sensor_data[1].deflection_angle = airt.EuValue; + GetAngleValue(7, &airt, 1); + iceInfo->t_sensor_data[1].windage_yaw_angle = airt.EuValue; + GetAngleValue(13, &airt, 0); + iceInfo->t_sensor_data[2].deflection_angle = airt.EuValue; + GetAngleValue(13, &airt, 1); + iceInfo->t_sensor_data[2].windage_yaw_angle = airt.EuValue; + + GetWindSpeedData(&airt); + iceTail->instantaneous_windspeed = airt.EuValue; + GetWindDirectionData(&airt); + iceTail->instantaneous_winddirection = airt.EuValue;//需求无符号整数给出浮点数 + GetAirTempData(&airt); + iceTail->air_temperature = airt.EuValue; + GetHumidityData(&airt); + iceTail->humidity = airt.EuValue;//需求无符号整数给出浮点数 + + return true; +} + + + +int CPhoneDevice::GetWData(IDevice::WEATHER_INFO *weatherInfo) +{ + Collect_sensor_data(); //15s + + Data_DEF airt; + GetWeatherData(&airt, 0); + weatherInfo->air_temperature = airt.EuValue; + + if (airt.AiState == -1) return false; + + GetWeatherData(&airt, 1); + weatherInfo->humidity = airt.EuValue; + GetWeatherData(&airt, 2); + weatherInfo->avg_windspeed_10min = airt.EuValue; + weatherInfo->extreme_windspeed = airt.EuValue; + weatherInfo->standard_windspeed = airt.EuValue; + GetWeatherData(&airt, 3); + weatherInfo->avg_winddirection_10min = airt.EuValue; + GetWeatherData(&airt, 4); + weatherInfo->precipitation = airt.EuValue; + GetWeatherData(&airt, 5); + weatherInfo->air_pressure = airt.EuValue; + GetWeatherData(&airt, 6); + weatherInfo->radiation_intensity = airt.EuValue; + return true; + +} + +#ifdef USING_N938 +bool CPhoneDevice::OpenSensors() +{ + GpioControl::setInt(CMD_SET_CAM_3V3_EN_STATE, true ? 1 : 0); + GpioControl::setInt(CMD_SET_485_EN_STATE, true ? 1 : 0); + int igpio; + GpioControl::setInt(CMD_SET_WTH_POWER, 1); + GpioControl::setInt(CMD_SET_PULL_POWER, 1); + GpioControl::setInt(CMD_SET_ANGLE_POWER, 1); + GpioControl::setInt(CMD_SET_OTHER_POWER, 1); + GpioControl::setInt(CMD_SET_PIC1_POWER, 1); +} + +bool CPhoneDevice::CloseSensors() +{ + return false; +} + +#else +bool CPhoneDevice::OpenSensors() +{ + return false; +} + +bool CPhoneDevice::CloseSensors() +{ + return false; +} + +#endif diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h index 0310d2d2..35e970df 100644 --- a/app/src/main/cpp/PhoneDevice.h +++ b/app/src/main/cpp/PhoneDevice.h @@ -205,7 +205,7 @@ public: virtual bool UpdateSchedules(); virtual bool QuerySystemProperties(map& properties); virtual bool InstallAPP(const std::string& path, unsigned int delayedTime); - virtual bool Reboot(int resetType); + virtual bool Reboot(int resetType, const std::string& reason); virtual bool EnableGPS(bool enabled); virtual float QueryBattaryVoltage(int timesForAvg, bool* isCharging); virtual bool RequestPosition(); @@ -293,7 +293,7 @@ protected: void handleTimerImpl(TIMER_CONTEXT* context); void static handleRebootTimer(union sigval v); // void handleRebootTimerImpl(); - void RestartApp(int rebootType, long timeout); + void RestartApp(int rebootType, long timeout, const std::string& reason); int QueryBatteryVoltage(int retries); diff --git a/app/src/main/java/com/xypower/mpapp/MainActivity.java b/app/src/main/java/com/xypower/mpapp/MainActivity.java index ddd3bb23..1a05af38 100644 --- a/app/src/main/java/com/xypower/mpapp/MainActivity.java +++ b/app/src/main/java/com/xypower/mpapp/MainActivity.java @@ -163,6 +163,10 @@ public class MainActivity extends AppCompatActivity { Intent intent = getIntent(); final int noDelay = intent.getIntExtra("noDelay", 0); int rebootFlag = intent.getIntExtra("reboot", 0); + String reason = intent.getStringExtra("reason"); + if (!TextUtils.isEmpty(reason)) { + Log.w(TAG, "App Started with reason: " + reason); + } if (rebootFlag == 1) { Log.i(TAG, "After Reboot"); } diff --git a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java index e848303c..48a4c350 100644 --- a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java +++ b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java @@ -9,6 +9,7 @@ import android.app.NotificationManager; import android.app.PendingIntent; import android.app.Service; import android.content.BroadcastReceiver; +import android.content.ComponentCallbacks2; import android.content.ComponentName; import android.content.Context; import android.content.Intent; @@ -156,6 +157,42 @@ public class MicroPhotoService extends Service { public MicroPhotoService() { } + + @Override + public void onTrimMemory(int level) { + if (level >= ComponentCallbacks2.TRIM_MEMORY_MODERATE) { + // Clear the caches. Note all pending requests will be removed too. + final Context context = getApplicationContext(); + try { + infoLog("Restart MpApp as for TrimMemory"); + mHander.postDelayed(new Runnable() { + @Override + public void run() { + restartApp(context, MicroPhotoContext.PACKAGE_NAME_MPAPP); + } + }, 1000); + } catch (Exception ex) { + ex.printStackTrace(); + } + } + } + + @Override + public void onLowMemory() { + final Context context = getApplicationContext(); + try { + Intent intent = new Intent(this, MainActivity.class); + int noDelay = 1; + intent.putExtra("noDelay", noDelay); + PendingIntent pi = PendingIntent.getActivity(this,0, intent,0); + AlarmManager alarmManager=(AlarmManager)getSystemService(ALARM_SERVICE); + alarmManager.set(AlarmManager.RTC_WAKEUP,System.currentTimeMillis() + 5000, pi); + infoLog("Restart MpApp after 5s as for LowMemory"); + } catch (Exception ex) { + ex.printStackTrace(); + } + } + @Override public IBinder onBind(Intent intent) { // TODO: Return the communication channel to the service. diff --git a/common/src/main/java/com/xypower/common/MicroPhotoContext.java b/common/src/main/java/com/xypower/common/MicroPhotoContext.java index 1cbad5ee..9246d6d9 100644 --- a/common/src/main/java/com/xypower/common/MicroPhotoContext.java +++ b/common/src/main/java/com/xypower/common/MicroPhotoContext.java @@ -362,7 +362,7 @@ public class MicroPhotoContext { } } - public static void restartMpApp(Context context) { + public static void restartMpApp(Context context, String reason) { /* Context context = MicroPhotoService.this.getApplicationContext(); Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName()); @@ -376,10 +376,10 @@ public class MicroPhotoContext { */ - restartApp(context, PACKAGE_NAME_MPAPP); + restartApp(context, PACKAGE_NAME_MPAPP, reason); } - public static void restartApp(Context context, String packageName) { + public static void restartApp(Context context, String packageName, String reason) { /* Context context = MicroPhotoService.this.getApplicationContext(); Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName()); @@ -398,6 +398,9 @@ public class MicroPhotoContext { Intent intent = new Intent(ACTION_RESTART_MP); intent.putExtra("noDelay", 1); + if (!TextUtils.isEmpty(reason)) { + intent.putExtra("reason", reason); + } intent.setPackage(PACKAGE_NAME_MPAPP); context.sendBroadcast(intent); @@ -407,6 +410,9 @@ public class MicroPhotoContext { Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName); if (intent != null) { intent.putExtra("noDelay", 1); + if (!TextUtils.isEmpty(reason)) { + intent.putExtra("reason", reason); + } intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); context.startActivity(intent); } diff --git a/mpmaster/src/main/java/com/xypower/mpmaster/AppMaster.java b/mpmaster/src/main/java/com/xypower/mpmaster/AppMaster.java index 7a6858b8..a52d1210 100644 --- a/mpmaster/src/main/java/com/xypower/mpmaster/AppMaster.java +++ b/mpmaster/src/main/java/com/xypower/mpmaster/AppMaster.java @@ -900,7 +900,7 @@ public class AppMaster { String packageName = jsonObject.optString("packageName", null); if (packageName != null) { - MicroPhotoContext.restartApp(mService.getApplicationContext(), packageName); + MicroPhotoContext.restartApp(mService.getApplicationContext(), packageName, "Config Updated"); } } } catch (Exception ex) { @@ -952,7 +952,7 @@ public class AppMaster { } catch (Exception ex) { } - MicroPhotoContext.restartMpApp(context); + MicroPhotoContext.restartMpApp(context, "CMA Updated"); } }); @@ -978,7 +978,7 @@ public class AppMaster { MicroPhotoContext.saveMpAppConfig(context, appConfig); - MicroPhotoContext.restartMpApp(mService.getApplicationContext()); + MicroPhotoContext.restartMpApp(mService.getApplicationContext(), "HB Duration Updated"); return true; } diff --git a/mpmaster/src/main/java/com/xypower/mpmaster/MainActivity.java b/mpmaster/src/main/java/com/xypower/mpmaster/MainActivity.java index 2712bae7..7da6d20f 100644 --- a/mpmaster/src/main/java/com/xypower/mpmaster/MainActivity.java +++ b/mpmaster/src/main/java/com/xypower/mpmaster/MainActivity.java @@ -130,7 +130,7 @@ public class MainActivity extends AppCompatActivity { } break; case R.id.action_reboot_mp:{ - MicroPhotoContext.restartMpApp(getApplicationContext()); + MicroPhotoContext.restartMpApp(getApplicationContext(), "Manual Restart from MpMst"); } break; case R.id.action_reboot_mpmst:{ diff --git a/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java b/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java index 4f65efad..a69a2819 100644 --- a/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java +++ b/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java @@ -141,6 +141,20 @@ public class MpMasterService extends Service { public MpMasterService() { } + @Override + public void onLowMemory() { + final Context context = getApplicationContext(); + try { + Intent intent = new Intent(this, MainActivity.class); + PendingIntent pi = PendingIntent.getActivity(this,0, intent,0); + AlarmManager alarmManager=(AlarmManager)getSystemService(ALARM_SERVICE); + alarmManager.set(AlarmManager.RTC_WAKEUP,System.currentTimeMillis() + 5000, pi); + logger.info("Restart MpApp after 5s as for LowMemory"); + } catch (Exception ex) { + ex.printStackTrace(); + } + } + @Override public IBinder onBind(Intent intent) { // TODO: Return the communication channel to the service. @@ -376,7 +390,7 @@ public class MpMasterService extends Service { if (mPreviousMpHbTime <= ts && ts - mPreviousMpHbTime > mMpHeartbeatDuration * 2) { // MpApp is not running if (ts - mTimeToStartMpApp >= 30000) { - MicroPhotoContext.restartMpApp(context); + MicroPhotoContext.restartMpApp(context, "MpMST Keep Alive Detection"); mTimeToStartMpApp = ts; logger.warning("Restart MpAPP as it is NOT Running Prev MPAPP HB=" + Long.toString((ts - mPreviousMpHbTime) / 1000) + " MPAPP HBDuration=" + Long.toString(mMpHeartbeatDuration)); @@ -578,7 +592,7 @@ public class MpMasterService extends Service { int restart = intent.getIntExtra("restart", 0); mService.logger.info("Update Config Fired ACTION=" + action + " restart=" + restart); if (restart != 0) { - MicroPhotoContext.restartApp(context, context.getPackageName()); + MicroPhotoContext.restartApp(context, context.getPackageName(), "Config Updated"); } else { mService.loadConfig(); mService.registerHeartbeatTimer(); @@ -1023,7 +1037,7 @@ public class MpMasterService extends Service { th.start(); } - public void reboot(final int rebootType) { + public void reboot(final int rebootType, String reason) { Runnable runnable = new Runnable() { @Override @@ -1031,7 +1045,7 @@ public class MpMasterService extends Service { if (rebootType == 0) { logger.warning("Recv REBOOT MpMst APP cmd"); Context context = MpMasterService.this.getApplicationContext(); - MicroPhotoContext.restartApp(context, context.getPackageName()); + MicroPhotoContext.restartApp(context, context.getPackageName(), reason); } else { logger.warning("Recv RESET cmd"); @@ -1167,7 +1181,7 @@ public class MpMasterService extends Service { } copyAssetsDir(context, "mpapp", destPath); - MicroPhotoContext.restartMpApp(context); + MicroPhotoContext.restartMpApp(context, "FIRST Config Init"); } }; diff --git a/mpmaster/src/main/java/com/xypower/mpmaster/UpdateReceiver.java b/mpmaster/src/main/java/com/xypower/mpmaster/UpdateReceiver.java index 9fd79613..a51f5eb4 100644 --- a/mpmaster/src/main/java/com/xypower/mpmaster/UpdateReceiver.java +++ b/mpmaster/src/main/java/com/xypower/mpmaster/UpdateReceiver.java @@ -31,7 +31,7 @@ public class UpdateReceiver extends BroadcastReceiver { MpMasterService.resetVersions(context); if (packageName.equals("package:" + targetPackageName)) { // SysApi.enableApp(context, targetPackageName); - restartAPP(context, targetPackageName); + restartAPP(context, targetPackageName, "App Upgraded"); } } else if (action.equals(Intent.ACTION_PACKAGE_ADDED)) {// Install broadcast Log.e(TAG, "onReceive:Installed and Start the App:" + targetPackageName); @@ -39,7 +39,7 @@ public class UpdateReceiver extends BroadcastReceiver { if (packageName.equals("package:" + targetPackageName)) { /*SystemUtil.reBootDevice();*/ // SysApi.enableApp(context, targetPackageName); - restartAPP(context, targetPackageName); + restartAPP(context, targetPackageName, "App Installed"); } } else if (action.equals(Intent.ACTION_PACKAGE_REMOVED)) { // Uninstall // Logger.e(TAG, "onReceive:uninstall" + packageName); @@ -52,7 +52,7 @@ public class UpdateReceiver extends BroadcastReceiver { MpMasterService.resetVersions(context); if (packageName.equals("package:" + targetPackageName)) { // SysApi.enableApp(context, targetPackageName); - tryToRestartApp(context, targetPackageName); + tryToRestartApp(context, targetPackageName, "App Upgraded"); } } else if (action.equals(Intent.ACTION_PACKAGE_ADDED)) {// Install broadcast Log.e(TAG, "onReceive:Installed and Start the App:" + targetPackageName); @@ -60,36 +60,39 @@ public class UpdateReceiver extends BroadcastReceiver { if (packageName.equals("package:" + targetPackageName)) { /*SystemUtil.reBootDevice();*/ // SysApi.enableApp(context, targetPackageName); - tryToRestartApp(context, targetPackageName); + tryToRestartApp(context, targetPackageName, "App Installed"); } } else if (action.equals(Intent.ACTION_PACKAGE_REMOVED)) { // Uninstall // Logger.e(TAG, "onReceive:uninstall" + packageName); } } - private void tryToRestartApp(final Context context, final String targetPackageName) { + private void tryToRestartApp(final Context context, final String targetPackageName, String reason) { Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { if (TextUtils.equals(targetPackageName, APP_PACKAGE_MPAPP)) { - startMpApp(context); + startMpApp(context, reason); } else { - restartAPP(context, targetPackageName); + restartAPP(context, targetPackageName, reason); } } }, 10000); } - public static void restartAPP(Context context, String packageName) { + public static void restartAPP(Context context, String packageName, String reason) { Intent intent = context.getPackageManager() .getLaunchIntentForPackage(packageName); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); + if (!TextUtils.isEmpty(reason)) { + intent.putExtra("reason", reason); + } context.startActivity(intent); // ActManager.getAppManager().finishAllActivity(); } - public void startMpApp(final Context context) { + private void startMpApp(final Context context, String reason) { try { if (MicroPhotoContext.isAppAlive(context, MicroPhotoContext.PACKAGE_NAME_MPAPP)) { @@ -107,7 +110,7 @@ public class UpdateReceiver extends BroadcastReceiver { if ((ts - modifiedTimeOfDb) > 12 * 1000) { // greater than 12 seconds // logger.warning("Start MpAPP as it is NOT running"); - MicroPhotoContext.restartMpApp(context); + MicroPhotoContext.restartMpApp(context, reason); } } catch (Exception ex) {