diff --git a/app/build.gradle b/app/build.gradle index f0014d4f..331168ac 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -5,7 +5,7 @@ plugins { // 10,00,000 major-minor-build def AppMajorVersion = 1 def AppMinorVersion = 3 -def AppBuildNumber = 4 +def AppBuildNumber = 196 def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber @@ -24,15 +24,9 @@ android { defaultConfig { applicationId "com.xypower.mpapp" - if (com.android.build.OutputFile.ABI.equalsIgnoreCase('arm64-v8a')) { - minSdk COMPILE_MIN_SDK_VERSION as int - //noinspection ExpiredTargetSdkVersion - targetSdk TARGET_SDK_VERSION as int - } else { - minSdk COMPILE_MIN_SDK_VERSION_N938 as int - //noinspection ExpiredTargetSdkVersion - targetSdk TARGET_SDK_VERSION_N938 as int - } + minSdk COMPILE_MIN_SDK_VERSION as int + //noinspection ExpiredTargetSdkVersion + targetSdk TARGET_SDK_VERSION as int versionCode AppVersionCode versionName AppVersionName @@ -88,6 +82,7 @@ android { enable isReleaseTask reset() include "armeabi-v7a", "arm64-v8a" + // include "arm64-v8a" universalApk false } } @@ -103,7 +98,7 @@ android { def abi = output.getFilter(com.android.build.OutputFile.ABI) if (abi == null) abi = "all" if (abi.contains("v7a")) prevFileName = "N938" - def fileName = "${prevFileName}_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}_${abi}.apk" + def fileName = "${prevFileName}_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}.apk" outputFileName = fileName } } @@ -131,10 +126,10 @@ android { dependencies { - implementation 'androidx.legacy:legacy-support-v4:1.0.0' - implementation 'androidx.legacy:legacy-support-v13:1.0.0' // implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" implementation 'androidx.appcompat:appcompat:1.0.0' + // implementation "androidx.core:core:1.10.0" // 使用最新版本 + implementation 'androidx.fragment:fragment:1.3.6' implementation 'androidx.constraintlayout:constraintlayout:2.1.4' implementation 'com.google.android.material:material:1.8.0' implementation project(path: ':common') diff --git a/app/libs/arm64-v8a/libavcodec.so b/app/libs/arm64-v8a/libavcodec.so new file mode 100644 index 00000000..42246066 Binary files /dev/null and b/app/libs/arm64-v8a/libavcodec.so differ diff --git a/app/libs/arm64-v8a/libavdevice.so b/app/libs/arm64-v8a/libavdevice.so new file mode 100644 index 00000000..b9e8a65b Binary files /dev/null and b/app/libs/arm64-v8a/libavdevice.so differ diff --git a/app/libs/arm64-v8a/libavfilter.so b/app/libs/arm64-v8a/libavfilter.so new file mode 100644 index 00000000..bd1c4e5c Binary files /dev/null and b/app/libs/arm64-v8a/libavfilter.so differ diff --git a/app/libs/arm64-v8a/libavformat.so b/app/libs/arm64-v8a/libavformat.so new file mode 100644 index 00000000..07f6c139 Binary files /dev/null and b/app/libs/arm64-v8a/libavformat.so differ diff --git a/app/libs/arm64-v8a/libavutil.so b/app/libs/arm64-v8a/libavutil.so new file mode 100644 index 00000000..485ea360 Binary files /dev/null and b/app/libs/arm64-v8a/libavutil.so differ diff --git a/app/libs/arm64-v8a/libswresample.so b/app/libs/arm64-v8a/libswresample.so new file mode 100644 index 00000000..de0f2b25 Binary files /dev/null and b/app/libs/arm64-v8a/libswresample.so differ diff --git a/app/libs/arm64-v8a/libswscale.so b/app/libs/arm64-v8a/libswscale.so new file mode 100644 index 00000000..1301fac8 Binary files /dev/null and b/app/libs/arm64-v8a/libswscale.so differ diff --git a/app/libs/arm64-v8a/libx264.so b/app/libs/arm64-v8a/libx264.so new file mode 100644 index 00000000..15f9d75b Binary files /dev/null and b/app/libs/arm64-v8a/libx264.so differ diff --git a/app/libs/armeabi-v7a/libavcodec.so b/app/libs/armeabi-v7a/libavcodec.so new file mode 100644 index 00000000..e0e7a188 Binary files /dev/null and b/app/libs/armeabi-v7a/libavcodec.so differ diff --git a/app/libs/armeabi-v7a/libavdevice.so b/app/libs/armeabi-v7a/libavdevice.so new file mode 100644 index 00000000..304f7564 Binary files /dev/null and b/app/libs/armeabi-v7a/libavdevice.so differ diff --git a/app/libs/armeabi-v7a/libavfilter.so b/app/libs/armeabi-v7a/libavfilter.so new file mode 100644 index 00000000..d0d5dc13 Binary files /dev/null and b/app/libs/armeabi-v7a/libavfilter.so differ diff --git a/app/libs/armeabi-v7a/libavformat.so b/app/libs/armeabi-v7a/libavformat.so new file mode 100644 index 00000000..5e4c9f22 Binary files /dev/null and b/app/libs/armeabi-v7a/libavformat.so differ diff --git a/app/libs/armeabi-v7a/libavutil.so b/app/libs/armeabi-v7a/libavutil.so new file mode 100644 index 00000000..e15b72cb Binary files /dev/null and b/app/libs/armeabi-v7a/libavutil.so differ diff --git a/app/libs/armeabi-v7a/libswresample.so b/app/libs/armeabi-v7a/libswresample.so new file mode 100644 index 00000000..50f860cd Binary files /dev/null and b/app/libs/armeabi-v7a/libswresample.so differ diff --git a/app/libs/armeabi-v7a/libswscale.so b/app/libs/armeabi-v7a/libswscale.so new file mode 100644 index 00000000..0663239a Binary files /dev/null and b/app/libs/armeabi-v7a/libswscale.so differ diff --git a/app/libs/armeabi-v7a/libx264.so b/app/libs/armeabi-v7a/libx264.so new file mode 100644 index 00000000..139305c5 Binary files /dev/null and b/app/libs/armeabi-v7a/libx264.so differ diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml index f95f9c1d..0a55b15e 100644 --- a/app/src/main/AndroidManifest.xml +++ b/app/src/main/AndroidManifest.xml @@ -1,6 +1,8 @@ + xmlns:tools="http://schemas.android.com/tools" + android:sharedUserId="com.xypower.mp" + tools:ignore="Deprecated"> @@ -10,9 +12,10 @@ - + @@ -55,6 +58,7 @@ + @@ -63,14 +67,23 @@ tools:ignore="ProtectedPermissions" /> + tools:ignore="ProtectedPermissions" /> + + + + + + + + + + - - + android:exported="true" > + - - - - - - - - - + - + + + + + diff --git a/app/src/main/assets/eth.sh b/app/src/main/assets/eth.sh new file mode 100644 index 00000000..009279de --- /dev/null +++ b/app/src/main/assets/eth.sh @@ -0,0 +1,227 @@ +#!/system/bin/sh + +# ============================================== +# Configuration parameters - modify as needed +# ============================================== +ETH_IP="192.168.68.91" # Ethernet IP address +ETH_NETMASK="24" # Subnet mask (CIDR format) +ETH_NETWORK="192.168.68.0" # Network address +ETH_BROADCAST="192.168.68.255" # Broadcast address +ETH_GATEWAY="192.168.68.1" # Default gateway +ROUTE_TABLE="20" # Routing table number +MAX_INIT_WAIT=150 # Maximum seconds to wait for ethernet interface +MAX_UP_WAIT=10 # Maximum seconds to wait for interface to come UP +MAX_ROUTE_WAIT=5 # Maximum seconds to wait for routing rules + +# For debugging only - comment out in production +# set -x + +ANDROID_VERSION=$(getprop ro.build.version.release 2>/dev/null | cut -d '.' -f1) + +# Record script start time +SCRIPT_START=$(date +%s) + +# Cleanup function - handles unexpected interruptions +cleanup() { + echo "Script interrupted, cleaning up..." >&2 + # Add additional cleanup code here if needed + exit 1 +} +trap cleanup INT TERM + +# Get script directory for finding tools like ethtool +SCRIPT_PATH="$0" +# Ensure path is absolute +case "$SCRIPT_PATH" in + /*) ;; # Already absolute path + *) SCRIPT_PATH="$PWD/$SCRIPT_PATH" ;; +esac +SCRIPT_DIR=$(dirname "$SCRIPT_PATH") +echo "Script directory detected as: $SCRIPT_DIR" + +# Only configure rp_filter for eth0 interface +echo 0 > /proc/sys/net/ipv4/conf/eth0/rp_filter 2>/dev/null || true + +# Wait for eth0 interface to appear +WAITED=0 +while [ $WAITED -lt $MAX_INIT_WAIT ]; do + if [ -d "/sys/class/net/eth0" ]; then + echo "eth0 found after $WAITED seconds" + break + fi + echo "Wait eth0... ($WAITED/$MAX_INIT_WAIT)" + sleep 0.1 + WAITED=$((WAITED+1)) +done + +# Check if eth0 exists +if ! [ -d "/sys/class/net/eth0" ]; then + echo "Error: eth0 not exists" >&2 + exit 1 +fi + +# Check physical connection status +if [ -f "/sys/class/net/eth0/carrier" ]; then + CARRIER=$(cat /sys/class/net/eth0/carrier) + echo "Physical connection status: $CARRIER (1=connected, 0=disconnected)" + if [ "$CARRIER" != "1" ]; then + echo "Warning: Ethernet physical connection may have issues, please check the cable" >&2 + fi +fi + +# Clear previous configuration +/system/bin/ip link set eth0 down +/system/bin/ip addr flush dev eth0 +/system/bin/ip route flush dev eth0 +/system/bin/ip route flush table $ROUTE_TABLE +/system/bin/ip rule del to $ETH_NETWORK/$ETH_NETMASK 2>/dev/null || true + +# Configure physical layer with ethtool (while interface is DOWN) +if [ -x "$SCRIPT_DIR/ethtool" ]; then + echo "Using ethtool from script directory: $SCRIPT_DIR/ethtool" + "$SCRIPT_DIR/ethtool" -s eth0 speed 10 duplex full autoneg off +# Try alternative path next +elif [ -x "/data/data/com.xypower.mpapp/files/ethtool" ]; then + echo "Configuring eth0 to 10Mbps full duplex..." + /data/data/com.xypower.mpapp/files/ethtool -s eth0 speed 10 duplex full autoneg off +else + echo "Warning: ethtool not found, falling back to sysfs configuration" >&2 + # Try sysfs configuration as fallback + if [ -f "/sys/class/net/eth0/speed" ]; then + echo "off" > /sys/class/net/eth0/autoneg 2>/dev/null || true + echo "10" > /sys/class/net/eth0/speed 2>/dev/null || true + echo "full" > /sys/class/net/eth0/duplex 2>/dev/null || true + fi +fi + +# ==================================================== +# MTK Android 9 IP configuration with loss prevention +# ==================================================== + +# Configure IP address first while interface is DOWN +echo "Setting IP address while interface is DOWN..." +/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0 +PRE_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP") +echo "IP configuration before UP: $PRE_UP_IP (1=configured, 0=missing)" + +# Enable interface and wait for UP +echo "Bringing up interface..." +/system/bin/ip link set eth0 up +if [ "$ANDROID_VERSION" = "9" ]; then + sleep 3 +else + # Use standard configuration for other devices + sleep 1 +fi + +# Check if IP was lost after interface UP (common issue on MTK devices) +POST_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP") +echo "IP configuration after UP: $POST_UP_IP (1=retained, 0=lost)" + +# IP address lost detection and recovery +if [ "$PRE_UP_IP" = "1" ] && [ "$POST_UP_IP" = "0" ]; then + echo "Warning: IP address was lost after bringing interface up - MTK issue detected" + echo "Reapplying IP configuration..." + /system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0 + + # Check if reapplied configuration worked + FIXED_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP") + echo "IP reapplication result: $FIXED_IP (1=success, 0=still missing)" + + # If standard method fails, try MTK-specific approaches + if [ "$FIXED_IP" = "0" ]; then + echo "Standard IP configuration failed, trying MTK-specific methods" + + # Try ifconfig if available (works better on some MTK devices) + if command -v ifconfig >/dev/null 2>&1; then + echo "Using ifconfig method..." + ifconfig eth0 $ETH_IP netmask 255.255.255.0 up + sleep 1 + fi + + # Try Android's netd service if available + if [ -x "/system/bin/ndc" ]; then + echo "Using MTK netd service..." + /system/bin/ndc network interface setcfg eth0 $ETH_IP 255.255.255.0 up + sleep 1 + fi + fi +fi + +# Use loop to wait for interface UP instead of fixed sleep +WAITED=0 +while [ $WAITED -lt $MAX_UP_WAIT ]; do + # Check both link status and IP configuration + IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP") + IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP") + + if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "1" ]; then + echo "Interface is UP with correct IP after $WAITED seconds" + break + fi + + echo "Waiting for interface UP with IP... ($WAITED/$MAX_UP_WAIT)" + + # If interface is UP but IP is missing, reapply IP + if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "0" ]; then + echo "Interface UP but IP missing, reapplying IP..." + /system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0 + fi + + sleep 0.5 + WAITED=$((WAITED+1)) +done + +# Final status check +FINAL_IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP") +FINAL_IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP") + +if [ "$FINAL_IF_STATUS" != "1" ] || [ "$FINAL_IP_STATUS" != "1" ]; then + echo "Warning: Failed to achieve stable interface state with IP" >&2 + echo "Final interface status: $FINAL_IF_STATUS (1=UP, 0=DOWN)" + echo "Final IP status: $FINAL_IP_STATUS (1=configured, 0=missing)" + /system/bin/ip addr show eth0 +else + echo "Successfully configured eth0 with IP $ETH_IP" +fi + +# First add to main routing table +/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link + +# Then add to specified routing table +/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link table $ROUTE_TABLE +ADD_ROUTE_STATUS=$? + +if [ $ADD_ROUTE_STATUS -eq 0 ]; then + echo "Add route successfully" +else + echo "Failed to add route: $ADD_ROUTE_STATUS" >&2 +fi + +# Only clear ARP and neighbor cache for eth0 +/system/bin/ip neigh flush dev eth0 + +# Add routing rules - only flush cache once after rule is added +/system/bin/ip rule add from all to $ETH_NETWORK/$ETH_NETMASK lookup $ROUTE_TABLE prio 1000 +/system/bin/ip route flush cache dev eth0 + +# Only enable forwarding for eth0 interface +echo 1 > /proc/sys/net/ipv4/conf/eth0/forwarding 2>/dev/null || true + +# Wait for routing rules to take effect - using loop check instead of fixed wait +WAITED=0 +while [ $WAITED -lt $MAX_ROUTE_WAIT ]; do + if /system/bin/ip rule | grep -q "$ETH_NETWORK/$ETH_NETMASK"; then + echo "Routing rules are now effective after $WAITED seconds" + break + fi + echo "Waiting for routing rules to take effect... ($WAITED/$MAX_ROUTE_WAIT)" + sleep 0.5 + WAITED=$((WAITED+1)) +done + +# Display execution time +SCRIPT_END=$(date +%s) +TOTAL_TIME=$((SCRIPT_END - SCRIPT_START)) +echo "Total script execution time: $TOTAL_TIME seconds" +exit 0 diff --git a/app/src/main/assets/ethtool b/app/src/main/assets/ethtool new file mode 100644 index 00000000..c28aa430 Binary files /dev/null and b/app/src/main/assets/ethtool differ diff --git a/app/src/main/assets/ethtool-v7a b/app/src/main/assets/ethtool-v7a new file mode 100644 index 00000000..6a5ba4dc Binary files /dev/null and b/app/src/main/assets/ethtool-v7a differ diff --git a/app/src/main/cpp/CMakeLists.txt b/app/src/main/cpp/CMakeLists.txt index 393acd36..6ca10a0b 100644 --- a/app/src/main/cpp/CMakeLists.txt +++ b/app/src/main/cpp/CMakeLists.txt @@ -14,6 +14,27 @@ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -ffunction-sections -fdata-sections -Wformat set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS}") # SET_TARGET_PROPERTIES(microphoto PROPERTIES LINK_FLAGS "-Wl,-s,--gc-sections") + +add_definitions(-DUSING_ETHERNET) + +if(ANDROID_ABI STREQUAL "armeabi-v7a") + add_definitions(-DUSING_N938) +elseif(ANDROID_ABI STREQUAL "arm64-v8a") + # add_definitions(-DUSING_N938) + # add_definitions(-DUSING_PTZ) +endif() + +# OUTPUT_DBG_INFO: 输出调试相关信息 +add_definitions(-DOUTPUT_DBG_INFO) +# OUTPUT_SOCKET_DBG_INFO Depends ON OUTPUT_DBG_INFO +# TerminalService.cpp +# add_definitions(-DOUTPUT_SOCKET_DBG_INFO) +# OUTPUT_DB_DBG_INFO Depends ON OUTPUT_DBG_INFO +# Database.cpp +# add_definitions(-DOUTPUT_DB_DBG_INFO) + +add_definitions(-DUSING_FFMPEG) + IF (CMAKE_BUILD_TYPE STREQUAL Debug) ADD_DEFINITIONS(-D_DEBUG) ELSE() @@ -36,23 +57,14 @@ add_definitions(-DHAVE_STRING_H) # for memcpy in md5.c # add_definitions(-DUSING_NRSEC_VPN) # add_definitions(-DUSING_CERT) # add_definitions(-DUSING_DOWSE) -# OUTPUT_CAMERA_DBG_INFO: 照片上打印CARERA相关信息 -# add_definitions(-DOUTPUT_CAMERA_DBG_INFO) + add_definitions(-DALIGN_HB_TIMER_TO_PHOTO) add_definitions(-DENABLE_3V3_ALWAYS) add_definitions(-DCURL_STATICLIB) add_definitions(-DUSING_HDRPLUS) -add_definitions(-DUSING_EXEC_HDRP=1) -set(USING_EXEC_HDRP 1) - - -if(ANDROID_ABI STREQUAL "armeabi-v7a") - add_definitions(-DUSING_N938) -elseif(ANDROID_ABI STREQUAL "arm64-v8a") - # add_definitions(-DUSING_N938) - add_definitions(-DUSING_PLZ) -endif() +add_definitions(-DUSING_EXEC_HDRP=0) +#set(USING_EXEC_HDRP 1) # include_directories(${OpenCV_DIR}/include) # add_library( lib_opencv SHARED IMPORTED ) @@ -83,6 +95,8 @@ set(ncnn_DIR ${NCNN_ROOT}/${ANDROID_ABI}/lib/cmake/ncnn) find_package(ncnn REQUIRED) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/breakpad) + include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libcutils/include) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libutils/include) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/img_utils/include) @@ -136,9 +150,19 @@ include_directories(hdrplus2/${ANDROID_ABI}) include_directories(${HALIDE_ROOT}/${ANDROID_ABI}/include) -SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX) +SET(ZLMEDIAKIT_LIBS "") +SET(STREAMING_SRCS "") -SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline) +add_definitions(-DDISABLE_RTTI) +# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLMediaKit ) +# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLToolKit/src/ ) +# SET(ZLMEDIAKIT_LIBS ${ZLMEDIAKIT_LIBS} zlmediakit zltoolkit) + +SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp media/Streaming.cpp ) + +#SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX) + +#SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline) SET(HDRPLUS_SOURCES hdrplus/src/align.cpp @@ -156,6 +180,7 @@ SET(HDRPLUS2_SOURCES hdrplus2/src/InputSource.cpp hdrplus2/src/LibRaw2DngConverter.cpp hdrplus2/${ANDROID_ABI}/hdrplus_pipeline.registration.cpp) +SET(HDRPLUS2_SOURCES ) SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR}) @@ -165,11 +190,6 @@ SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR}) SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp) SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include) -SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite) -SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite) - -SET(BREAKPAD_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/breakpad) - SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2) SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype) @@ -177,66 +197,12 @@ SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype) # SET(EVPP_SRC_DIR ${EVPP_ROOT}/evpp) include_directories(${YAMC_INC_DIR}) -include_directories(${BREAKPAD_ROOT} ${BREAKPAD_ROOT}/common/android/include) include_directories(${ASIO_ROOT}/include) -add_library( # Sets the name of the library. - sqlite3 - - # Sets the library as a shared library. - STATIC - - # Provides a relative path to your source file(s). - ${SQLITE_SRC_DIR}/sqlite3.c - ) - -INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR}) - -file(GLOB BREAKPAD_SOURCES_COMMON - native-lib.cpp - ${BREAKPAD_ROOT}/client/linux/crash_generation/crash_generation_client.cc - ${BREAKPAD_ROOT}/client/linux/dump_writer_common/thread_info.cc - ${BREAKPAD_ROOT}/client/linux/dump_writer_common/ucontext_reader.cc - ${BREAKPAD_ROOT}/client/linux/handler/exception_handler.cc - ${BREAKPAD_ROOT}/client/linux/handler/minidump_descriptor.cc - ${BREAKPAD_ROOT}/client/linux/log/log.cc - ${BREAKPAD_ROOT}/client/linux/microdump_writer/microdump_writer.cc - ${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_dumper.cc - ${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_ptrace_dumper.cc - ${BREAKPAD_ROOT}/client/linux/minidump_writer/minidump_writer.cc - ${BREAKPAD_ROOT}/client/linux/minidump_writer/pe_file.cc - ${BREAKPAD_ROOT}/client/minidump_file_writer.cc - ${BREAKPAD_ROOT}/common/convert_UTF.cc - ${BREAKPAD_ROOT}/common/md5.cc - ${BREAKPAD_ROOT}/common/string_conversion.cc - ${BREAKPAD_ROOT}/common/linux/elfutils.cc - ${BREAKPAD_ROOT}/common/linux/file_id.cc - ${BREAKPAD_ROOT}/common/linux/guid_creator.cc - ${BREAKPAD_ROOT}/common/linux/linux_libc_support.cc - ${BREAKPAD_ROOT}/common/linux/memory_mapped_file.cc - ${BREAKPAD_ROOT}/common/linux/safe_readlink.cc - ) - -file(GLOB BREAKPAD_ASM_SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S) -set_property(SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S PROPERTY LANGUAGE C) -# set_source_files_properties(${BREAKPAD_ASM_SOURCE} PROPERTIES LANGUAGE C) - -# Creates and names a library, sets it as either STATIC -# or SHARED, and provides the relative paths to its source code. -# You can define multiple libraries, and CMake builds them for you. -# Gradle automatically packages shared libraries with your APK. - -add_library( # Sets the name of the library. - breakpad - - # Sets the library as a shared library. - STATIC - - # Provides a relative path to your source file(s). - ${BREAKPAD_SOURCES_COMMON} - ${BREAKPAD_ASM_SOURCE} - ) - +# SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite) +# SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite) +# add_library(sqlite3 STATIC ${SQLITE_SRC_DIR}/sqlite3.c ) +# INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR}) INCLUDE_DIRECTORIES(${JSONCPP_INCLUDE_DIR}) @@ -330,10 +296,8 @@ include_directories(${TERM_CORE_ROOT}) add_library( # Sets the name of the library. jsoncpp - # Sets the library as a shared library. STATIC - # Provides a relative path to your source file(s). ${JSONCPP_SOURCES} ) @@ -375,6 +339,7 @@ add_library( # Sets the name of the library. GPIOControl.cpp MicroPhoto.cpp PhoneDevice.cpp + PtzController.cpp # PhoneDevice2.cpp Camera.cpp Camera2Reader.cpp @@ -388,6 +353,12 @@ add_library( # Sets the name of the library. ncnn/yolov5ncnn.cpp netcamera/httpclient.cpp + netcamera/VendorCtrl.cpp + netcamera/YuShiCtrl.cpp + netcamera/HangYuCtrl.cpp + netcamera/HikonCtrl.cpp + + ${STREAMING_SRCS} #serial/WeatherComm.cpp @@ -407,9 +378,11 @@ add_library( # Sets the name of the library. ${TERM_CORE_ROOT}/SpecData_I1_JS.cpp ${TERM_CORE_ROOT}/SpecData_I1_HN.cpp ${TERM_CORE_ROOT}/SpecData_I1_HEN.cpp + ${TERM_CORE_ROOT}/SpecData_I1_HEN_TY.cpp ${TERM_CORE_ROOT}/SpecData_I1_HENZZ.cpp ${TERM_CORE_ROOT}/SpecData_I1_SHX.cpp ${TERM_CORE_ROOT}/SpecData_I1_NX.cpp + ${TERM_CORE_ROOT}/SpecData_I1_SX_ZY.cpp ${TERM_CORE_ROOT}/SpecData_XY.cpp ${TERM_CORE_ROOT}/SpecData_ZJ.cpp ${TERM_CORE_ROOT}/SpecData_NW.cpp @@ -427,14 +400,18 @@ add_library( # Sets the name of the library. ${TERM_CORE_ROOT}/Client/Terminal_AH.cpp ${TERM_CORE_ROOT}/Client/Terminal_HEN_ZZ.cpp ${TERM_CORE_ROOT}/Client/Terminal_HEN.cpp + ${TERM_CORE_ROOT}/Client/Terminal_HEN_TY.cpp ${TERM_CORE_ROOT}/Client/Terminal_SHX.cpp ${TERM_CORE_ROOT}/Client/Terminal_JS.cpp ${TERM_CORE_ROOT}/Client/Terminal_NX.cpp + ${TERM_CORE_ROOT}/Client/Terminal_SX_ZY.cpp ${TERM_CORE_ROOT}/Client/Terminal_ZJ.cpp ${TERM_CORE_ROOT}/Client/Terminal_NW.cpp + ${TERM_CORE_ROOT}/Client/DataController.cpp ${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp ${TERM_CORE_ROOT}/Client/Database.cpp - ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp + # ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp + ${TERM_CORE_ROOT}/Client/DataController.cpp ) @@ -457,20 +434,15 @@ find_library( # Sets the name of the path variable. target_link_libraries( # Specifies the target library. ${PROJECT_NAME} - jsoncpp - freetype - # breakpad - + breakpad # Links the target library to the log library # included in the NDK. + avcodec avfilter avformat avutil swresample swscale x264 ${log-lib} - android camera2ndk mediandk z curl - - ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED} - + ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED} ${ZLMEDIAKIT_LIBS} ) # set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all") diff --git a/app/src/main/cpp/DngCreator.cpp b/app/src/main/cpp/DngCreator.cpp index 0c1727fb..12d31d5a 100644 --- a/app/src/main/cpp/DngCreator.cpp +++ b/app/src/main/cpp/DngCreator.cpp @@ -2509,8 +2509,8 @@ void DngCreator::writeInputStream(std::vector& outStream, uint64_t uOffset = static_cast(offset); ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, " - "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth, - uHeight, rowStride, pixStride, offset); + "rowStride=%d, pixStride=%d, offset=%lld", __FUNCTION__, uWidth, + uHeight, rowStride, pixStride, (int64_t)offset); ByteVectorOutput out(outStream); // std::vector& out = outStream; @@ -2578,8 +2578,8 @@ void DngCreator::writeInputBuffer(std::vector& outStream, uint64_t uOffset = static_cast(offset); ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, " - "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth, - uHeight, rowStride, pixStride, offset); + "rowStride=%d, pixStride=%d, offset=%lld", __FUNCTION__, uWidth, + uHeight, rowStride, pixStride, (int64_t)offset); ByteVectorOutput out(outStream); // std::vector& out = outStream; diff --git a/app/src/main/cpp/GPIOControl.cpp b/app/src/main/cpp/GPIOControl.cpp index 1a85feb6..de85c330 100644 --- a/app/src/main/cpp/GPIOControl.cpp +++ b/app/src/main/cpp/GPIOControl.cpp @@ -23,50 +23,108 @@ #define IOT_PARAM_READ 0xAF std::mutex GpioControl::m_locker; -CSemaphore GpioControl::m_semaphore; +std::mutex GpioControl::m_gpioLocker; std::vector GpioControl::m_items; -std::thread GpioControl::m_thread; -bool GpioControl::m_exitSignal = false; bool GpioControl::m_cameraPowerStatus = false; +#define ENABLE_GPIO_TRACING + +#ifdef ENABLE_GPIO_TRACING +class GpioDebugLogger +{ +public: + GpioDebugLogger(int cmd, int value) + { + m_startTime = GetMicroTimeStamp(); + m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_val." + std::to_string(value); + + CreateEmptyFile(m_path + ".enter"); + } + + GpioDebugLogger(int cmd) + { + m_startTime = GetMicroTimeStamp(); + m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_get"; + + CreateEmptyFile(m_path + ".enter"); + } + + ~GpioDebugLogger() + { + uint64_t ts = (GetMicroTimeStamp() - m_startTime); + if (ts > 1000) + { + CreateEmptyFile(m_path + ".leave." + std::to_string(ts)); + } + else + { + std::string path = m_path + ".enter"; + std::remove(path.c_str()); + } + } + +private: + std::string m_path; + uint64_t m_startTime; +}; +#endif + size_t GpioControl::turnOnImpl(const IOT_PARAM& param) { + size_t oldRef = 0; size_t references = 1; std::vector::iterator it; int res = 0; int fd = -1; time_t now = time(NULL); - fd = open(GPIO_NODE_MP, O_RDONLY); - if( fd > 0 ) +// check res??? + for (it = m_items.begin(); it != m_items.end(); ++it) { - res = ioctl(fd, IOT_PARAM_WRITE, ¶m); - close(fd); - // check res??? - for (it = m_items.begin(); it != m_items.end(); ++it) + if (it->cmd == param.cmd) { - if (it->cmd == param.cmd) - { - it->references++; - // it->closeTime = 0; - references = it->references; - if(it->openTime == 0) - it->openTime = now; - SetCamerastatus(it->cmd, true); - break; - } + oldRef = it->references; + it->references++; + // it->closeTime = 0; + references = it->references; + if(it->openTime == 0) + it->openTime = now; + SetCamerastatus(it->cmd, true); + break; } - if (it == m_items.end()) + } + if (it == m_items.end()) + { + oldRef = 0; + ITEM item = {param.cmd, references, now}; + m_items.push_back(item); + SetCamerastatus(param.cmd, true); + } + + if (oldRef == 0/* || param.cmd != CMD_SET_3V3_PWR_EN*/) + { +#ifdef ENABLE_GPIO_TRACING + GpioDebugLogger logger(param.cmd, param.value); +#endif + m_gpioLocker.lock(); + fd = open(GPIO_NODE_MP, O_RDONLY); + if( fd > 0 ) { - ITEM item = {param.cmd, references, 0, 0, now}; - m_items.push_back(item); - SetCamerastatus(param.cmd, true); + res = ioctl(fd, IOT_PARAM_WRITE, ¶m); + close(fd); +#ifdef OUTPUT_DBG_INFO + // int realVal = getInt(param.cmd); + // XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, param.value, param.result/*, realVal*/); + XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result); +#endif } - } + m_gpioLocker.unlock(); #ifdef _DEBUG - ALOGI("PWR TurnOn cmd=%d,result=%d ref=%u\r\n",param.cmd, param.result, (uint32_t)references); + ALOGI("PWR TurnOn cmd=%d,result=%d ref=%u\r\n",param.cmd, param.result, (uint32_t)references); #endif - std::this_thread::sleep_for(std::chrono::milliseconds(100)); + std::this_thread::sleep_for(std::chrono::milliseconds(100)); + } + return references; } @@ -76,19 +134,30 @@ void GpioControl::setInt(int cmd, int value) // param.cmd = cmd; // param.value = value; +#ifdef ENABLE_GPIO_TRACING + GpioDebugLogger logger(cmd, value); +#endif + m_gpioLocker.lock(); int fd = open(GPIO_NODE_MP, O_RDONLY); if (fd > 0) { int res = ioctl(fd, IOT_PARAM_WRITE, ¶m); -#ifdef _DEBUG - ALOGI("setInt cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result); -#endif close(fd); +#ifdef OUTPUT_DBG_INFO + // int realVal = getInt(param.cmd); + // XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, value, param.result/*, realVal*/); + XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, value, param.result); +#endif } + m_gpioLocker.unlock(); } int GpioControl::getInt(int cmd) { +#ifdef ENABLE_GPIO_TRACING + GpioDebugLogger logger(cmd); +#endif + m_gpioLocker.lock(); int fd = open(GPIO_NODE_MP, O_RDONLY); // LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd); if( fd > 0 ) @@ -100,29 +169,34 @@ int GpioControl::getInt(int cmd) ALOGI("getInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result); #endif close(fd); + m_gpioLocker.unlock(); return param.value; } + m_gpioLocker.unlock(); return -1; } void GpioControl::setLong(int cmd, long value) { - int fd = open(GPIO_NODE_MP, O_RDONLY); IOT_PARAM param; param.cmd = cmd; param.value2 = value; // LOGE("set_long fd=%d,cmd=%d,value2=%ld\r\n",fd, param.cmd, param.value2); + m_gpioLocker.lock(); + int fd = open(GPIO_NODE_MP, O_RDONLY); if( fd > 0 ) { ioctl(fd, IOT_PARAM_WRITE, ¶m); // LOGE("set_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result); close(fd); } + m_gpioLocker.unlock(); } long GpioControl::getLong(int cmd) { + m_gpioLocker.lock(); int fd = open(GPIO_NODE_MP, O_RDONLY); // LOGE("get_long fd=%d,cmd=%d\r\n",fd, cmd); if( fd > 0 ) @@ -132,32 +206,37 @@ long GpioControl::getLong(int cmd) ioctl(fd, IOT_PARAM_READ, ¶m); // LOGE("get_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result); close(fd); + m_gpioLocker.unlock(); return param.value2; } + m_gpioLocker.unlock(); return -1; } void GpioControl::setString(int cmd, const std::string& value) { IOT_PARAM param; - int fd = open(GPIO_NODE_MP, O_RDONLY); - int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size(); - param.cmd = cmd; memset(param.str, 0, MAX_STRING_LEN); + int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size(); memcpy(param.str, value.c_str(), len); // LOGE("set_string fd=%d,cmd=%d,str=%s\r\n",fd, param.cmd, param.str); + + m_gpioLocker.lock(); + int fd = open(GPIO_NODE_MP, O_RDONLY); if( fd > 0 ) { ioctl(fd, IOT_PARAM_WRITE, ¶m); // LOGE("set_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result); close(fd); } + m_gpioLocker.unlock(); return; } std::string GpioControl::getString(int cmd) { + m_gpioLocker.lock(); int fd = open(GPIO_NODE_MP, O_RDONLY); // LOGE("get_string fd=%d,cmd=%d\r\n",fd, cmd); if( fd > 0 ) @@ -167,8 +246,10 @@ std::string GpioControl::getString(int cmd) ioctl(fd, IOT_PARAM_READ, ¶m); // LOGE("get_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result); close(fd); + m_gpioLocker.unlock(); return std::string(param.str); } + m_gpioLocker.unlock(); return ""; } @@ -196,6 +277,10 @@ size_t GpioControl::TurnOn(const std::vector& cmds) m_locker.lock(); for (it = cmds.cbegin(); it != cmds.cend(); ++it) { + if (*it == 0) + { + continue; + } param.cmd = *it; turnOnImpl(param); } @@ -214,14 +299,17 @@ size_t GpioControl::TurnOffImmediately(int cmd) { if (it->cmd == cmd) { - ref = it->references; - it->closeCmds++; - it->closeTime = ts; + if (it->references > 0) + { + it->references = 0; + SetCamerastatus(cmd, false); + setInt(it->cmd, 0); + it->openTime = 0; + } break; } } m_locker.unlock(); - m_semaphore.release(); #ifdef _DEBUG ALOGI("PWR TurnOffNow cmd=%d ref=%u", cmd, (uint32_t)ref); #endif @@ -237,22 +325,36 @@ size_t GpioControl::TurnOff(int cmd, uint32_t delayedCloseTime/* = 0*/) } size_t ref = 0; std::vector::iterator it; + + if (delayedCloseTime > 0) + { + std::shared_ptr powerCtrl = std::make_shared(cmd); + std::thread th([delayedCloseTime, powerCtrl]() mutable { + std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime)); + powerCtrl.reset(); + }); + th.detach(); + } + m_locker.lock(); for (it = m_items.begin(); it != m_items.end(); ++it) { if (it->cmd == cmd) { - ref = it->references; - it->closeCmds++; - if (ts > it->closeTime) + if (it->references > 0) { - it->closeTime = ts; + it->references--; + if (it->references == 0) + { + SetCamerastatus(cmd, false); + setInt(it->cmd, 0); + it->openTime = 0; + } } break; } } m_locker.unlock(); - m_semaphore.release(); #ifdef _DEBUG ALOGI("PWR TurnOff cmd=%d ref=%u", cmd, (uint32_t)ref); #endif @@ -268,6 +370,17 @@ size_t GpioControl::TurnOff(const std::vector& cmds, uint32_t delayedCloseT } std::vector::iterator it; std::vector::const_reverse_iterator itCmd; + + if (delayedCloseTime > 0) + { + std::shared_ptr powerCtrl = std::make_shared(cmds); + std::thread th([delayedCloseTime, powerCtrl]() mutable { + std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime)); + powerCtrl.reset(); + }); + th.detach(); + } + m_locker.lock(); // turnOnImpl(param); for (itCmd = cmds.crbegin(); itCmd != cmds.crend(); ++itCmd) @@ -276,25 +389,41 @@ size_t GpioControl::TurnOff(const std::vector& cmds, uint32_t delayedCloseT { if (it->cmd == *itCmd) { - it->closeCmds++; - if (ts > it->closeTime) + if (it->references > 0) { - it->closeTime = ts; + it->references--; + if (it->references == 0) + { + SetCamerastatus(it->cmd, false); + setInt(it->cmd, 0); + it->openTime = 0; + } } break; } } } m_locker.unlock(); - m_semaphore.release(); return 0; } size_t GpioControl::TurnOff(const std::vector >& cmds) { - time_t ts = time(NULL); - time_t ts2; + for (auto itCmd = cmds.cbegin(); itCmd != cmds.end(); ++itCmd) + { + if (itCmd->second > 0) + { + uint32_t delayedCloseTime = itCmd->second; + std::shared_ptr powerCtrl = std::make_shared(itCmd->first); + std::thread th([delayedCloseTime, powerCtrl]() mutable { + std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime)); + powerCtrl.reset(); + }); + th.detach(); + } + } + std::vector::iterator it; std::vector >::const_iterator itCmd; m_locker.lock(); @@ -304,14 +433,14 @@ size_t GpioControl::TurnOff(const std::vector >& cmds) { if (it->cmd == itCmd->first) { - - it->closeCmds++; - if (itCmd->second != 0) + if (it->references > 0) { - ts2 = itCmd->second + ts; - if (ts2 > it->closeTime) + it->references--; + if (it->references == 0) { - it->closeTime = ts2; + SetCamerastatus(it->cmd, false); + setInt(it->cmd, 0); + it->openTime = 0; } } break; @@ -319,7 +448,6 @@ size_t GpioControl::TurnOff(const std::vector >& cmds) } } m_locker.unlock(); - m_semaphore.release(); return 0; } @@ -329,7 +457,7 @@ bool GpioControl::SetCamerastatus(int cmd, bool status) if(cmd == CMD_SET_PIC1_POWER) m_cameraPowerStatus = status; #endif -#ifdef USING_PLZ +#ifdef USING_PTZ if(cmd == CMD_SET_PTZ_PWR_ENABLE) { m_cameraPowerStatus = status; @@ -345,18 +473,18 @@ bool GpioControl::GetCamerastatus() bool GpioControl::GetSelftestStatus(time_t wait_time) { - int cmd; + int cmd = 0; #ifdef USING_N938 cmd = CMD_SET_PIC1_POWER; #endif -#ifdef USING_PLZ +#ifdef USING_PTZ cmd = CMD_SET_PTZ_PWR_ENABLE; #endif time_t now = time(NULL); std::vector::iterator it; for (it = m_items.begin(); it != m_items.end(); ++it) { - if (it->cmd == cmd && it->openTime!=0 && (now - it->openTime >= wait_time)) + if (it->cmd == cmd && it->references > 0 && it->openTime!=0 && (now - it->openTime >= wait_time)) { return true;//自检完成 } @@ -365,115 +493,26 @@ bool GpioControl::GetSelftestStatus(time_t wait_time) } -void GpioControl::PowerControlThreadProc() +time_t GpioControl::GetSelfTestRemain(time_t wait_time) { - time_t ts = 0; - std::vector::iterator it; - std::vector items; - time_t minDelayTime = 0; - time_t delayTime = 0; - int fd = -1; - int res = -1; - m_cameraPowerStatus = 0; - - while(1) - { - // Check if there is close cmd - ts = time(NULL); - - minDelayTime = std::numeric_limits::max(); - - m_locker.lock(); - for (it = m_items.begin(); it != m_items.end(); ++it) - { - if (it->references == 0 && it->closeCmds == 0 && it->closeTime == 0) - { -#ifdef _DEBUG - ALOGI("PWR THREAD cmd=%d ref=%u closeCmds=%u", it->cmd, (uint32_t)it->references, (uint32_t)it->closeCmds); -#endif - continue; - } - - if (it->closeCmds > 0) - { - if (it->references <= it->closeCmds) - { - it->references = 0; - } - else - { - it->references -= it->closeCmds; - if(it->references < 0) - it->references = 0; - } - it->closeCmds = 0; - } - - if (it->references == 0) - { - // Should turn off the power - if ((it->closeTime == 0) || (it->closeTime <= ts)) - { - // close it directly - setInt(it->cmd, 0); - it->closeTime = 0; - it->openTime = 0; -#ifdef _DEBUG - ALOGI("PWR THREAD DO TurnOff cmd=%d", it->cmd); + int cmd = 0; +#ifdef USING_N938 + cmd = CMD_SET_PIC1_POWER; #endif - SetCamerastatus(it->cmd, false); - } - else - { - // Check Time - delayTime = ts - it->closeTime; - if (delayTime < minDelayTime) - { - minDelayTime = delayTime; - } - } - } -#ifdef _DEBUG - ALOGI("PWR THREAD cmd=%d ref=%u closeCmds=%u", it->cmd, (uint32_t)it->references, (uint32_t)it->closeCmds); +#ifdef USING_PTZ + cmd = CMD_SET_PTZ_PWR_ENABLE; #endif - } - m_locker.unlock(); - - if (minDelayTime < std::numeric_limits::max()) - { - m_semaphore.try_acquire_for(std::chrono::seconds(1)); - } - else - { - m_semaphore.acquire(); - } - - if (m_exitSignal) + time_t now = time(NULL); + std::vector::iterator it; + for (it = m_items.begin(); it != m_items.end(); ++it) + { + if (it->cmd == cmd && it->references > 0) { - break; + time_t remaintime = (now - it->openTime); + remaintime = (wait_time > remaintime) ? (wait_time - remaintime) : 0; + return remaintime;//自检完成 } } + return 0; } -bool GpioControl::Startup() -{ - // if (m_thread.) - m_exitSignal = false; - m_thread = std::thread(PowerControlThreadProc); -#ifdef _DEBUG - pthread_t nativeHandle = m_thread.native_handle(); - pthread_setname_np(nativeHandle, "gpioclose"); -#endif - return true; -} - -void GpioControl::Stop() -{ - // Notify - m_exitSignal = true; - m_semaphore.release(); - m_thread.detach(); -} - - - diff --git a/app/src/main/cpp/GPIOControl.h b/app/src/main/cpp/GPIOControl.h index c66baec4..c84c92b5 100644 --- a/app/src/main/cpp/GPIOControl.h +++ b/app/src/main/cpp/GPIOControl.h @@ -13,10 +13,11 @@ #include #include +#include #ifndef USING_N938 -#ifndef USING_PLZ // MicroPhoto +#ifndef USING_PTZ // MicroPhoto #define CMD_GET_LIGHT_ADC 101 #define CMD_SET_LIGHT_ADC 102 @@ -43,6 +44,7 @@ #define CMD_SET_PWM_BEE_STATE 126 // Removed #define CMD_SET_ALM_MODE 128 // Removed #define CMD_SET_SYSTEM_RESET 202 +#define CMD_SET_SYSTEM_RESET2 203 #define CMD_SET_485_EN_STATE 131 #define CMD_SET_12V_EN_STATE 133 #if 1 @@ -50,15 +52,28 @@ #define CMD_SET_3V3_PWR_EN 132 #endif -#else // defined(USING_PLZ) +#define CMD_GET_CAMERA_STATUS 310 +#define CMD_SET_MADA_INIT_STATUS 312 +#define CMD_SET_MADA_CLOSE_STATUS 313 +#define CMD_SET_MADA_REG 314 +#define CMD_GET_MADA_REG 315 + +#define CMD_SET_INIT_STATUS 401 + +#define CMD_SET_5V_PWR_ENABLE 517 +#define CMD_SET_NEW_OTG_STATE 507 + +#else // defined(USING_PTZ) #define CMD_SET_OTG_STATE 107 #define CMD_GET_OTG_STATE 108 #define CMD_SET_SPI_POWER 129 +#define CMD_SET_MADA_MOVE_STATUS 311 #define CMD_SET_12V_EN_STATE 0 // TO BE ADDED #define CMD_SET_SYSTEM_RESET 202 +#define CMD_SET_SYSTEM_RESET2 203 #define CMD_GET_LIGHT_ADC 101 #define CMD_SET_LIGHT_ADC 102 #define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112 @@ -67,9 +82,9 @@ #define CMD_SET_SPI_BITS_PER_WORD 0 // TO BE ADDED #define CMD_SET_SPI_MAXSPEEDHZ 0 // TO BE ADDED -#define CMD_SET_485_ENABLE 512 -#define CMD_SET_3V3_PWR_EN 516 -#define CMD_SET_5V_PWR_ENABLE 517 +#define CMD_SET_485_ENABLE 131 +#define CMD_SET_3V3_PWR_EN 132 +// #define CMD_SET_5V_PWR_ENABLE 517 #define CMD_SET_SENSOR_ENABLE 504 #define CMD_SET_SENSOR_PWR_ENABLE 505 #define CMD_SET_SENSOR2_ENABLE 506 @@ -96,20 +111,31 @@ #define CMD_SET_LIGHT1_RESISTOR_ENABLE 524 #define CMD_SET_100M_RESET 526 -#endif // USING_PLZ +#define CMD_GET_CAMERA_STATUS 310 + +#define CMD_SET_MADA_MOVE_STATUS 311 +#define CMD_SET_MADA_INIT_STATUS 312 +#define CMD_SET_MADA_CLOSE_STATUS 313 +#define CMD_SET_MADA_REG 314 +#define CMD_GET_MADA_REG 315 + +#define CMD_SET_INIT_STATUS 401 + +#endif // USING_PTZ #else // defined(USING_N938) #define CMD_SET_SYSTEM_RESET 202 +#define CMD_SET_SYSTEM_RESET2 203 #define CMD_SET_485_EN1 302 -#define CMD_SET_3V3_PWR_EN 360 +#define CMD_SET_3V3_PWR_EN 132 #define CMD_SET_UART0_EN 361 #define CMD_SET_485_EN0 301 #define CMD_SET_NETWORK_POWER_EN 362 #define CMD_SET_485_EN3 304 #define CMD_SET_485_EN2 303 #define CMD_SET_SPI_POWER 129 -#define CMD_SET_5V_EN 363 +// #define CMD_SET_5V_EN 363 #define CMD_SDCARD_DETECT_EN 364 #define CMD_SET_PIC1_POWER 494 #define CMD_SET_OTHER_POWER 493 @@ -130,6 +156,8 @@ #define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112 #define CMD_GET_BAT_BUS_VOLTAGE_STATE 117 +#define CMD_SET_INIT_STATUS 0 // 401 + #endif // USING_N938 @@ -152,19 +180,16 @@ public: { int cmd; size_t references; - size_t closeCmds; - time_t closeTime; time_t openTime; }; private: static std::mutex m_locker; - static CSemaphore m_semaphore; static std::vector m_items; - static bool m_exitSignal; - static std::thread m_thread; static bool m_cameraPowerStatus; + static std::mutex m_gpioLocker; + protected: static size_t turnOnImpl(const IOT_PARAM& param); static size_t turnOffImpl(const IOT_PARAM& param); @@ -181,11 +206,7 @@ public: static bool SetCamerastatus(int cmd, bool status); static bool GetCamerastatus(); static bool GetSelftestStatus(time_t wait_time); - - static void PowerControlThreadProc(); - - static bool Startup(); - static void Stop(); + static time_t GetSelfTestRemain(time_t wait_time); public: static void setInt(int cmd, int value); @@ -211,7 +232,7 @@ public: static void setCam3V3Enable(bool enabled, uint32_t delayedCloseTime) { -#ifdef USING_PLZ +#ifdef USING_PTZ enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime); #else enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime); @@ -220,7 +241,7 @@ public: static void setCam3V3Enable(bool enabled) { -#ifdef USING_PLZ +#ifdef USING_PTZ enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN); #else enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN); @@ -230,7 +251,7 @@ public: static void setBeeOn(bool z) { #ifndef USING_N938 -#ifndef USING_PLZ +#ifndef USING_PTZ z ? TurnOn(CMD_SET_PWM_BEE_STATE) : TurnOff(CMD_SET_PWM_BEE_STATE); #endif #endif @@ -238,7 +259,7 @@ public: static void setJidianqiState(bool z) { #ifndef USING_N938 -#ifndef USING_PLZ +#ifndef USING_PTZ z ? TurnOn(CMD_SET_ALM_MODE) : TurnOff(CMD_SET_ALM_MODE); #endif #endif @@ -255,7 +276,7 @@ public: static void setRS485Enable(bool z, uint32_t delayedCloseTime) { #ifndef USING_N938 -#ifdef USING_PLZ +#ifdef USING_PTZ z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE, delayedCloseTime); #else z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE, delayedCloseTime); @@ -273,7 +294,7 @@ public: static void setRS485Enable(bool z) { #ifndef USING_N938 -#ifdef USING_PLZ +#ifdef USING_PTZ z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE); #else z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE); @@ -293,10 +314,15 @@ public: setInt(CMD_SET_SYSTEM_RESET, 1); } + static void reboot2() + { + setInt(CMD_SET_SYSTEM_RESET2, 1); + } + static void setLightAdc(int i) { #ifndef USING_N938 -#ifdef USING_PLZ +#ifdef USING_PTZ setInt(CMD_SET_LIGHT1_RESISTOR_ENABLE, i); #else setInt(CMD_SET_LIGHT_ADC, i); @@ -307,7 +333,7 @@ public: static int getLightAdc() { #ifndef USING_N938 -#ifdef USING_PLZ +#ifdef USING_PTZ return getInt(CMD_SET_LIGHT1_RESISTOR_ENABLE); #else return getInt(CMD_GET_LIGHT_ADC); @@ -338,11 +364,7 @@ public: #endif static int getChargingBusVoltage() { -#ifndef USING_N938 return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE); -#else - return -1; -#endif } #if 0 @@ -440,6 +462,12 @@ public: m_cmds.resize(1, cmd1); TurnOn(); } + PowerControl(const std::vector& cmds) : m_delayCloseTime(0) + { + m_cmds = cmds; + TurnOn(); + } + PowerControl(int cmd1, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime) { m_cmds.resize(1, cmd1); @@ -498,14 +526,52 @@ public: TurnOn(); } + PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, int cmd7, int cmd8, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime) + { + m_cmds.resize(8, cmd1); + m_cmds[1] = cmd2; + m_cmds[2] = cmd3; + m_cmds[3] = cmd4; + m_cmds[4] = cmd5; + m_cmds[5] = cmd6; + m_cmds[6] = cmd7; + m_cmds[7] = cmd8; + TurnOn(); + } + virtual ~PowerControl() { GpioControl::TurnOff(m_cmds, m_delayCloseTime); +#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO) + std::string status = GetStatus(); + XYLOG(XYLOG_SEVERITY_INFO, "PWR After TurnOff %s, DelayCloseTime=%u", status.c_str(), m_delayCloseTime); +#endif + } + +#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO) + std::string GetStatus() + { + std::string result; + for (auto it = m_cmds.cbegin(); it != m_cmds.cend(); ++it) + { + if (*it == 0) + { + continue; + } + result += std::to_string(*it) + "=" + std::to_string(GpioControl::getInt(*it)) + " "; + } + + return result; } +#endif // #if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO) protected: void TurnOn() { +#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO) + // std::string status = GetStatus(); + // XYLOG(XYLOG_SEVERITY_INFO, "PWR Before TurnOn %s", status.c_str()); +#endif GpioControl::TurnOn(m_cmds); } @@ -521,11 +587,11 @@ public: #ifdef USING_N938 PowerControl(0, closeDelayTime) #else // USING_N938 -#ifdef USING_PLZ +#ifdef USING_PTZ PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime) -#else // USING_PLZ +#else // USING_PTZ PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime) -#endif // USING_PLZ +#endif // USING_PTZ #endif // USING_N938 { } @@ -536,13 +602,14 @@ class NetCameraPowerCtrl : public PowerControl public: NetCameraPowerCtrl(uint32_t closeDelayTime) : #ifdef USING_N938 - PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime) + PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime) #else // USING_N938 -#ifdef USING_PLZ - PowerControl(CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime) -#else // USING_PLZ - PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, CMD_SET_485_EN_STATE, closeDelayTime) -#endif // USING_PLZ +#ifdef USING_PTZ + PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime) +#else // USING_PTZ + // MicroPhoto + PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_EN_STATE, closeDelayTime) +#endif // USING_PTZ #endif // USING_N938 { } @@ -553,13 +620,32 @@ class PlzCameraPowerCtrl : public PowerControl public: PlzCameraPowerCtrl(uint32_t closeDelayTime) : #ifdef USING_N938 - PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime) + PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime) #else // USING_N938 -#ifdef USING_PLZ - PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_485_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime) -#else // USING_PLZ +#ifdef USING_PTZ + PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_485_ENABLE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_12V_EN_STATE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime) +#else // USING_PTZ PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime) -#endif // USING_PLZ +#endif // USING_PTZ +#endif // USING_N938 + { + } +}; + +class EthernetPowerCtrl : public PowerControl +{ +public: + EthernetPowerCtrl(uint32_t closeDelayTime) : +#ifdef USING_N938 + PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, closeDelayTime) +#else // USING_N938 +#ifdef USING_PTZ + // PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_5V_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime) + PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, closeDelayTime) +#else // USING_PTZ + // Micro Photo + PowerControl(CMD_SET_OTG_STATE, CMD_SET_485_EN_STATE/* Only for wp6*/, closeDelayTime) +#endif // USING_PTZ #endif // USING_N938 { } @@ -572,11 +658,11 @@ public: #ifdef USING_N938 PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime) #else // USING_N938 -#ifdef USING_PLZ +#ifdef USING_PTZ PowerControl(CMD_SET_PTZ_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime) -#else // USING_PLZ +#else // USING_PTZ PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime) -#endif // USING_PLZ +#endif // USING_PTZ #endif // USING_N938 { } @@ -589,11 +675,11 @@ public: #ifdef USING_N938 PowerControl(CMD_SET_SPI_POWER, CMD_SPI2SERIAL_POWER_EN, CMD_RS485_3V3_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN4, closeDelayTime) #else // USING_N938 -#ifdef USING_PLZ +#ifdef USING_PTZ PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_ENABLE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_PTZ_PWR_ENABLE, closeDelayTime) -#else // USING_PLZ +#else // USING_PTZ PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_485_EN_STATE, closeDelayTime) -#endif // USING_PLZ +#endif // USING_PTZ #endif // USING_N938 { } diff --git a/app/src/main/cpp/MicroPhoto.cpp b/app/src/main/cpp/MicroPhoto.cpp index a5b03ead..58b3cae0 100644 --- a/app/src/main/cpp/MicroPhoto.cpp +++ b/app/src/main/cpp/MicroPhoto.cpp @@ -1,6 +1,7 @@ #include #include #include +#include #include #include #include "PhoneDevice.h" @@ -19,8 +20,18 @@ #endif #ifdef USING_BREAK_PAD -#include "client/linux/handler/exception_handler.h" -#include "client/linux/handler/minidump_descriptor.h" +#include +#include +#endif + +#ifdef USING_MQTT +#include +#endif + +#ifdef USING_FFMPEG +extern "C" { +#include +} #endif #include @@ -30,6 +41,7 @@ #include "GPIOControl.h" #ifdef USING_BREAK_PAD +static google_breakpad::ExceptionHandler* g_breakpad_handler = nullptr; bool DumpCallback(const google_breakpad::MinidumpDescriptor& descriptor, void* context, bool succeeded) { @@ -161,11 +173,27 @@ void Runner::RequestCapture(CTerminal* pTerminal, unsigned int channel, unsigned pTerminal->RequestCapture(channel, preset, type, scheduleTime); } +#include +#include + +#if 0 +void sighandler(int sig) { + __android_log_print(ANDROID_LOG_ERROR, "NativeCrash", "Caught signal %d", sig); + + exit(1); +} +#endif + jint JNI_OnLoad(JavaVM* vm, void* reserved) { JNIEnv* env = NULL; jint result = -1; + // 在 JNI_OnLoad 或其他初始化函数中注册 +#if 0 + signal(SIGSEGV, sighandler); +#endif + #if defined(JNI_VERSION_1_6) if (result==-1 && vm->GetEnv((void**)&env, JNI_VERSION_1_6) == JNI_OK) { @@ -191,8 +219,15 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved) } #ifdef USING_BREAK_PAD - google_breakpad::MinidumpDescriptor descriptor("/sdcard/Android/data/com.xypower.mpapp/files/logs/"); - google_breakpad::ExceptionHandler eh(descriptor, NULL, DumpCallback, NULL, true, -1); + google_breakpad::MinidumpDescriptor descriptor("/sdcard/com.xypower.mpapp/logs/"); + g_breakpad_handler = new google_breakpad::ExceptionHandler( + descriptor, + nullptr, // Filter callback + DumpCallback, // Minidump callback + nullptr, // Context + true, // Install handlers + -1 // Server FD (not used) + ); #endif #if 0 @@ -227,9 +262,40 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved) curl_global_init(CURL_GLOBAL_ALL); +#ifdef USING_MQTT + mosquitto_lib_init(); +#endif + +#ifdef USING_FFMPEG + // av_register_all(); + avformat_network_init(); +#endif + return result; } +JNIEXPORT void JNICALL JNI_OnUnload(JavaVM* vm, void* reserved) +{ +#ifdef USING_MQTT + mosquitto_lib_cleanup(); +#endif + + curl_global_cleanup(); + +#ifdef USING_FFMPEG + // av_register_all(); + avformat_network_deinit(); +#endif + +#ifdef USING_BREAKPAD + // Clean up breakpad handler + if (g_breakpad_handler) { + delete g_breakpad_handler; + g_breakpad_handler = nullptr; + } +#endif +} + bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread) { didAttachThread = false; @@ -265,12 +331,10 @@ Java_com_xypower_mpapp_MainActivity_takePhoto( unsigned char id = (unsigned char)channel - 1; Camera2Reader *camera = new Camera2Reader(id); - const char *pathStr = env->GetStringUTFChars(path, 0); - const char *fileNameStr = env->GetStringUTFChars(fileName, 0); + std::string pathStr = jstring2string(env, path); + std::string fileNameStr = jstring2string(env, fileName); - camera->Open(pathStr, fileNameStr); - env->ReleaseStringUTFChars(fileName, fileNameStr); - env->ReleaseStringUTFChars(path, pathStr); + camera->Open(pathStr.c_str(), fileNameStr.c_str()); camera->start(); @@ -301,13 +365,12 @@ Java_com_xypower_mpapp_MicroPhotoService_init( jstring modelName = env->NewStringUTF(model); env->SetObjectField(pThis, fieldId, modelName); - bool udpOrTcp = (networkProtocol != 0); // 0: tcp - const char *appPathStr = appPath == NULL ? NULL : env->GetStringUTFChars(appPath, 0); - const char *ipStr = ip == NULL ? NULL : env->GetStringUTFChars(ip, 0); - const char *cmdidStr = cmdid == NULL ? NULL : env->GetStringUTFChars(cmdid, 0); - const char *simcardStr = simcard == NULL ? NULL : env->GetStringUTFChars(simcard, 0); - const char *tfCardPathStr = tfCardPath == NULL ? NULL : env->GetStringUTFChars(tfCardPath, 0); - const char *nativeLibraryDirStr = nativeLibraryDir == NULL ? NULL : env->GetStringUTFChars(nativeLibraryDir, 0); + std::string appPathStr = jstring2string(env, appPath); + std::string ipStr = jstring2string(env, ip); + std::string cmdidStr = jstring2string(env, cmdid); + std::string simcardStr = jstring2string(env, simcard); + std::string tfCardPathStr = jstring2string(env, tfCardPath); + std::string nativeLibraryDirStr = jstring2string(env, nativeLibraryDir); JavaVM* vm = NULL; jint ret = env->GetJavaVM(&vm); @@ -318,14 +381,14 @@ Java_com_xypower_mpapp_MicroPhotoService_init( CTerminal* pTerminal = NewTerminal(protocol); - CPhoneDevice* device = new CPhoneDevice(vm, pThis, MakeString(appPathStr), NETID_UNSET, versionCode, MakeString(nativeLibraryDirStr)); + CPhoneDevice* device = new CPhoneDevice(vm, pThis, appPathStr, (uint64_t)netHandle, versionCode, nativeLibraryDirStr); device->SetListener(pTerminal); device->UpdateSignalLevel(signalLevel); device->SetBuildTime(buildTime / 1000); - device->UpdateSimcard(MakeString(simcardStr)); - device->UpdateTfCardPath(MakeString(tfCardPathStr)); + device->UpdateSimcard(simcardStr); + device->UpdateTfCardPath(tfCardPathStr); - pTerminal->InitServerInfo(MakeString(appPathStr), MakeString(cmdidStr), MakeString(ipStr), port, udpOrTcp, encryptData); + pTerminal->InitServerInfo(appPathStr, cmdidStr, ipStr, port, networkProtocol, encryptData); // pTerminal->SetPacketSize(1 * 1024); // 1K #if defined(USING_NRSEC) && !defined(USING_NRSEC_VPN) pTerminal->InitEncryptionInfo(simcardStr, "/dev/spidev0.0", ""); @@ -337,12 +400,6 @@ Java_com_xypower_mpapp_MicroPhotoService_init( #ifdef _DEBUG ALOGD("Finish Startup"); #endif - if (appPathStr != NULL) env->ReleaseStringUTFChars(appPath, appPathStr); - if (ipStr != NULL) env->ReleaseStringUTFChars(ip, ipStr); - if (cmdidStr != NULL) env->ReleaseStringUTFChars(cmdid, cmdidStr); - if (simcardStr != NULL) env->ReleaseStringUTFChars(simcard, simcardStr); - if (tfCardPathStr != NULL) env->ReleaseStringUTFChars(tfCardPath, tfCardPathStr); - if (nativeLibraryDirStr != NULL) env->ReleaseStringUTFChars(nativeLibraryDir, nativeLibraryDirStr); if (!res) { @@ -356,7 +413,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init( extern "C" JNIEXPORT jboolean JNICALL Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto( JNIEnv* env, - jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jboolean photoOrVideo) { + jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jstring url, jint mediaType) { if (channel < 0 || channel > 0xFFFF) { @@ -368,24 +425,44 @@ Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto( return JNI_FALSE; } - unsigned char type = photoOrVideo ? 0 : 1; + uint8_t type = (uint8_t)mediaType; // std::thread th(&Runner::RequestCapture, pTerminal, (unsigned int)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true); // th.detach(); - if (channel < 0x100) + if (channel == 0x200) { - pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true); + // Heartbeat + } - else + else if (channel >= 0x100) { uint32_t packetType = channel; packetType &= 0xFF; pTerminal->RequestSampling(packetType, (uint64_t)scheduleTime, 0); } + else + { + if (mediaType == XY_MEDIA_TYPE_PHOTO || mediaType == XY_MEDIA_TYPE_VIDEO) + { + pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true); + } + else if (mediaType == XY_MEDIA_TYPE_STREAM) + { + // virtual bool StartStream(unsigned char channel, unsigned char preset, const std::string& url, uint32_t* photoId = NULL); + // virtual bool StopStream(unsigned char channel, unsigned char preset, uint32_t photoId); + uint32_t photoId = 0; + std::string urlStr = jstring2string(env, url); + pTerminal->StartStream(channel, preset, urlStr, &photoId); + } + else if (mediaType == XY_MEDIA_TYPE_STREAM_OFF) + { + pTerminal->StopStream(channel, preset, 0); + } + + } return JNI_TRUE; } - extern "C" JNIEXPORT jlong JNICALL Java_com_xypower_mpapp_MicroPhotoService_takePhoto( JNIEnv* env, @@ -429,11 +506,8 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto( osds[2].text = cfg.osd.rightBottom; osds[3].text = cfg.osd.leftBottom; - const char* pathStr = env->GetStringUTFChars(path, 0); - - device->TakePhoto(photoInfo, osds, MakeString(pathStr)); - - env->ReleaseStringUTFChars(path, pathStr); + std::string pathStr = jstring2string(env, path); + device->TakePhoto(photoInfo, osds, pathStr); // device->TurnOffCameraPower(NULL); // if (photoInfo.usbCamera) @@ -459,7 +533,7 @@ extern "C" JNIEXPORT jboolean JNICALL Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat( JNIEnv* env, jobject pThis, - jlong handler, jint signalLevel) { + jlong handler, jint signalLevel, jboolean scheduled) { CTerminal* pTerminal = reinterpret_cast(handler); if (pTerminal == NULL) @@ -473,7 +547,25 @@ Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat( device->UpdateSignalLevel(signalLevel); } - pTerminal->SendHeartbeat(); + pTerminal->SendHeartbeat(scheduled != JNI_FALSE); + +#ifdef OUTPUT_DBG_INFO +#if 0 + std::thread t([]() + { + time_t ts = time(NULL); + int ldr = GpioControl::getLightAdc(); + + char buf[64] = { 0 }; + snprintf(buf, sizeof(buf), "%s %d\r\n", FormatLocalDateTime(ts).c_str(), ldr); + + appendFile("/sdcard/com.xypower.mpapp/tmp/ldr.txt", (const unsigned char* )buf, strlen(buf)); + + + }); + t.detach(); +#endif +#endif return JNI_TRUE; } @@ -667,9 +759,7 @@ Java_com_xypower_mpapp_MicroPhotoService_recoganizePicture( JNIEnv* env, jclass cls, jstring paramPath, jstring binPath, jstring blobName8, jstring blobName16, jstring blobName32, jstring picPath) { - const char* pParamPathStr = env->GetStringUTFChars(paramPath, 0); - std::string paramPathStr = MakeString(pParamPathStr); - env->ReleaseStringUTFChars(paramPath, pParamPathStr); + std::string paramPathStr = jstring2string(env, paramPath); const char* pBinPathStr = env->GetStringUTFChars(binPath, 0); std::string binPathStr = MakeString(pBinPathStr); @@ -877,19 +967,21 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished( extern "C" JNIEXPORT jboolean JNICALL Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs( JNIEnv* env, - jobject pThis, jlong handler) { + jobject pThis, jlong handler, jint channelToClean) { CTerminal* pTerminal = reinterpret_cast(handler); if (pTerminal == NULL) { return JNI_FALSE; } - - bool res = pTerminal->LoadAppConfigs(); + if (channelToClean != -1) + { + pTerminal->CleanCaptureSchedules((uint32_t)((int)channelToClean)); + } + bool res = pTerminal->LoadAppConfigs(true); return res ? JNI_TRUE : JNI_FALSE; } - extern "C" JNIEXPORT jboolean JNICALL Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto( JNIEnv* env, jclass cls, jlong handler, jstring path, jlong photoInfo) { @@ -936,9 +1028,18 @@ Java_com_xypower_mpapp_MicroPhotoService_infoLog( return; } - const char *msgStr = env->GetStringUTFChars(msg, 0); - XYLOG(XYLOG_SEVERITY_INFO, msgStr); - env->ReleaseStringUTFChars(msg, msgStr); + std::string str = jstring2string(env, msg); + XYLOG(XYLOG_SEVERITY_INFO, str.c_str()); +} + +extern "C" JNIEXPORT jboolean JNICALL +Java_com_xypower_mpapp_MicroPhotoService_usingEthernet( + JNIEnv* env, jclass cls) { +#ifdef USING_ETHERNET + return JNI_TRUE; +#else + return JNI_FALSE; +#endif } extern "C" JNIEXPORT void JNICALL @@ -1336,8 +1437,142 @@ Java_com_xypower_mpapp_MicroPhotoService_updateEhernet( CPhoneDevice* device = (CPhoneDevice*)pTerminal->GetDevice(); if (device != NULL) { - device->UpdateEthernet(static_cast(networkHandle), available != JNI_FALSE); + bool changed = false; + device->UpdateNetwork(static_cast(networkHandle), available != JNI_FALSE, false, changed); + if (changed) + { + XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Changed and Check socket connection"); + pTerminal->ResetNetwork(); + } + else + { + XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Changing Not Cause Socket Disconnection"); + } } + return JNI_TRUE; +} + +extern "C" JNIEXPORT jboolean JNICALL +Java_com_xypower_mpapp_MicroPhotoService_updateActiveNetwork( + JNIEnv* env, jobject pThis, jlong handle, jlong networkHandle, jboolean available) { + + CTerminal* pTerminal = reinterpret_cast(handle); + if (pTerminal == NULL) + { + return JNI_FALSE; + } + + CPhoneDevice* device = (CPhoneDevice*)pTerminal->GetDevice(); + if (device != NULL) + { + bool changed = false; + device->UpdateNetwork(static_cast(networkHandle), available != JNI_FALSE, true, changed); + } + + return JNI_TRUE; +} + + +extern "C" JNIEXPORT jlong JNICALL +Java_com_xypower_mpapp_MicroPhotoService_requestPowerControl( + JNIEnv* env, jclass cls, jint type) { + if (type == 1) // Net + { + NetCameraPowerCtrl* powerControl = new NetCameraPowerCtrl(2); + return reinterpret_cast(powerControl); + } + + return 0L; +} + +extern "C" JNIEXPORT jboolean JNICALL +Java_com_xypower_mpapp_MicroPhotoService_releasePowerControl( + JNIEnv* env, jclass cls, jlong powerControlHandle) { + + PowerControl* powerControl = reinterpret_cast(powerControlHandle); + delete powerControl; + + return JNI_TRUE; +} + +extern "C" +JNIEXPORT jint JNICALL +Java_com_xypower_mpapp_MicroPhotoService_getCustomAppId(JNIEnv *env, jobject thiz) { +#ifdef USING_N938 + return 2; +#elif defined(USING_PTZ) + return 1; +#else + return 0; +#endif +} + +extern "C" JNIEXPORT void JNICALL +Java_com_xypower_mpapp_MicroPhotoService_sendCameraCtrl( + JNIEnv* env, jobject pThis, jlong handle, jint channel, jint preset, jint cmd) { + + CTerminal* pTerminal = reinterpret_cast(handle); + if (pTerminal == NULL) + { + return; + } + + pTerminal->SendCameraCtrl(channel, preset, cmd); +} + + +extern "C" JNIEXPORT void JNICALL +Java_com_xypower_mpapp_MicroPhotoService_notifyTimeUpdated( + JNIEnv* env, jobject pThis, jlong handle) { + + CTerminal* pTerminal = reinterpret_cast(handle); + if (pTerminal == NULL) + { + return; + } + + std::thread t([pTerminal]() + { + pTerminal->OnTimeUpdated(); + }); + t.detach(); +} + +extern "C" +JNIEXPORT jboolean JNICALL +Java_com_xypower_mpapp_MicroPhotoService_sendBasicInfo(JNIEnv *env, jobject thiz, jlong handler) { + // TODO: implement sendBasicInfo() + CTerminal* pTerminal = reinterpret_cast(handler); + if (pTerminal == NULL) + { + return JNI_FALSE; + } + pTerminal->SendBasicInfo(); + return JNI_TRUE; +} +extern "C" +JNIEXPORT jboolean JNICALL +Java_com_xypower_mpapp_MicroPhotoService_sendWorkStatus(JNIEnv *env, jobject thiz, jlong handler) { + // TODO: implement sendWorkStatus() + CTerminal* pTerminal = reinterpret_cast(handler); + if (pTerminal == NULL) + { + return JNI_FALSE; + } + pTerminal->SendWorkStatus(); + return JNI_TRUE; +} +extern "C" +JNIEXPORT jboolean JNICALL +Java_com_xypower_mpapp_MicroPhotoService_sendFault(JNIEnv *env, jobject thiz, jlong handler, jstring faultCode, jstring faultInfo) { + // TODO: implement sendFault() + CTerminal* pTerminal = reinterpret_cast(handler); + if (pTerminal == NULL) + { + return JNI_FALSE; + } + std::string faultInfoStr = jstring2string(env, faultInfo); + pTerminal->SendFaultInfo(faultInfoStr); return JNI_TRUE; } \ No newline at end of file diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp index 19c07fbe..63b2bb67 100644 --- a/app/src/main/cpp/PhoneDevice.cpp +++ b/app/src/main/cpp/PhoneDevice.cpp @@ -9,6 +9,14 @@ #include "CvText.h" #include "PositionHelper.h" #include "DngCreator.h" +#include "media/Streaming.h" + +#include "netcamera/VendorCtrl.h" +#include "netcamera/YuShiCtrl.h" +#include "netcamera/HangYuCtrl.h" +#include "netcamera/HikonCtrl.h" + +#include "media/RTSPRecorder.h" #include #include @@ -25,6 +33,7 @@ #include #include #include +#include #ifdef USING_HDRPLUS #include @@ -39,15 +48,14 @@ #include namespace fs = std::filesystem; -#if 0 -#define CMD_SET_485_EN_STATE 131 -#define CMD_SET_CAM_3V3_EN_STATE 132 -#define CMD_SET_12V_EN_STATE 133 +#ifdef NDEBUG +#define MAX_NETCAMERA_RETRIES 128 +#else +#define MAX_NETCAMERA_RETRIES 32 #endif extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread); - bool makeHdr(vector& times, std::vector& paths, cv::Mat& rgb) { // Read images and exposure times @@ -296,7 +304,7 @@ CPhoneDevice::CPhoneCamera::~CPhoneCamera() m_dev = NULL; } -bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat& rgb) +bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat rgb) { if (m_dev != NULL) { @@ -450,6 +458,7 @@ void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader) if (!(status == AMEDIA_OK && planeCount == 1)) { // LOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount); + AImage_delete(image); return; } @@ -461,6 +470,7 @@ void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader) if (file && data && len) { fwrite(data, 1, len, file); + fdatasync(fileno(file)); fclose(file); } else @@ -478,8 +488,8 @@ int32_t CPhoneDevice::CJpegCamera::getOutputFormat() const return AIMAGE_FORMAT_JPEG; } -CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode, const std::string& nativeLibDir) - : mVersionCode(versionCode), m_nativeLibraryDir(nativeLibDir), m_network(NULL), m_netHandle(NETWORK_UNSPECIFIED) +CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, uint64_t activeNetHandle, unsigned int versionCode, const std::string& nativeLibDir) + : mVersionCode(versionCode), m_nativeLibraryDir(nativeLibDir), m_network(NULL), m_defNetHandle(activeNetHandle), m_ethnetHandle(NETWORK_UNSPECIFIED), m_ethernetFailures(0) { mCamera = NULL; m_listener = NULL; @@ -490,21 +500,26 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa m_javaService = NULL; m_appPath = appPath; - mNetId = netId; - m_signalLevel = 0; m_signalLevelUpdateTime = time(NULL); mBuildTime = 0; - m_cameraStatus = false; - m_sensorsStatus = false; m_lastTime = 0; m_shouldStopWaiting = false; m_collecting = false; localDelayTime = GetMicroTimeStamp(); - RegisterHandlerForSignal(SIGUSR2); + // Reset all powers + char propValue[PROP_VALUE_MAX] = { 0 }; + int propRes = __system_property_get("ro.custom.ota.version", propValue); + if (propRes > 0) + { + if (!startsWith(propValue, "xinying_WP_")) + { + GpioControl::setInt(CMD_SET_INIT_STATUS, 1); + } + } - GpioControl::Startup(); + RegisterHandlerForSignal(SIGUSR2); LoadNetworkInfo(); @@ -529,7 +544,7 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa mRequestWakelockMid = env->GetMethodID(classService, "requestWakelock", "(Ljava/lang/String;J)V"); mReleaseWakelockMid = env->GetMethodID(classService, "releaseWakelock", "(Ljava/lang/String;)V"); - mGetSystemInfoMid = env->GetMethodID(classService, "getSystemInfo", "()Ljava/lang/String;"); + mGetFlowInfoMid = env->GetMethodID(classService, "getFlowInfo", "()Ljava/lang/String;"); mInstallAppMid = env->GetMethodID(classService, "installApp", "(Ljava/lang/String;J)Z"); mRebootMid = env->GetMethodID(classService, "reboot", "(IJLjava/lang/String;)V"); mEnableGpsMid = env->GetMethodID(classService, "enableGps", "(Z)V"); @@ -537,7 +552,8 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa mExecHdrplusMid = env->GetMethodID(classService, "execHdrplus", "(IILjava/lang/String;Ljava/lang/String;)I"); - mSetStaticIpMid = env->GetMethodID(classService, "setStaticNetwork", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); + mSetStaticIpMid = env->GetMethodID(classService, "setStaticNetwork", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)V"); + mExecuteCmdMid = env->GetMethodID(classService, "executeCommand", "(Ljava/lang/String;)I"); mConvertDngToPngMid = env->GetMethodID(classService, "convertDngToPng", "(Ljava/lang/String;Ljava/lang/String;)V"); @@ -559,18 +575,23 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa GpioControl::setCam3V3Enable(true); GpioControl::setSpiPower(true); #endif + + m_ptzController = new PtzController(this); + m_ptzController->Startup(); } CPhoneDevice::~CPhoneDevice() { + std::map timers; m_devLocker.lock(); - for (auto it = mTimers.begin(); it != mTimers.end(); ++it) + mTimers.swap(timers); + m_devLocker.unlock(); + for (auto it = timers.begin(); it != timers.end(); ++it) { timer_delete((timer_t)it->first); delete it->second; } - mTimers.clear(); - m_devLocker.unlock(); + timers.clear(); JNIEnv* env = NULL; bool didAttachThread = false; @@ -600,8 +621,6 @@ CPhoneDevice::~CPhoneDevice() delete m_network; m_network = NULL; } - - GpioControl::Stop(); } void CPhoneDevice::SetListener(IListener* listener) @@ -623,6 +642,7 @@ void CPhoneDevice::SetRecognizationCfg(const IDevice::CFG_RECOGNIZATION* pRecogn } else { +#ifndef ISING_N938 XYLOG(XYLOG_SEVERITY_INFO, "AI Enabled and will Init NCNN"); ncnn_init(); mAIInitialized = true; @@ -635,6 +655,7 @@ void CPhoneDevice::SetRecognizationCfg(const IDevice::CFG_RECOGNIZATION* pRecogn { XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Init NCNN"); } +#endif // #ifndef ISING_N938 } } else @@ -647,6 +668,24 @@ void CPhoneDevice::SetRecognizationCfg(const IDevice::CFG_RECOGNIZATION* pRecogn bool CPhoneDevice::BindNetwork(int sock) { +#ifdef USING_ETHERNET +#if 0 + m_devLocker.lock(); + net_handle_t defNetHandle = m_defNetHandle; + m_devLocker.unlock(); + if (defNetHandle != NETWORK_UNSPECIFIED) + { + int res = android_setsocknetwork(defNetHandle, sock); + if (res == -1) + { + int errcode = errno; + printf("android_setsocknetwork errno=%d", errcode); + } + return res == 0; + } +#endif +#endif // USING_ETHERNET + return true; } @@ -785,7 +824,7 @@ bool CPhoneDevice::SelfTest(std::string& result) } result += "4G信号强度:"; - result += std::to_string(m_signalLevel); + result += std::to_string(m_signalLevel & 0xFF); result += ITEM_SEP; result += "网络接口:"; @@ -863,6 +902,7 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro std::map powerInfo; int res = 0; int bv = -1; + std::map flowInfo; for (std::map::iterator it = properties.begin(); it != properties.end(); ++it) { @@ -991,7 +1031,14 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro { continue; } - snprintf(str, sizeof(str), "%.1f", (val / 1000.0)); + if (val < 3000) + { + strcpy(str, "0"); + } + else + { + snprintf(str, sizeof(str), "%.1f", (val / 1000.0)); + } it->second = std::string(str); break; } @@ -1020,14 +1067,30 @@ bool CPhoneDevice::QuerySystemProperties(std::map& pro } else if ((it->first == (PROP_SIGNAL_4G)) || (it->first == (PROP_SIGNAL_2G)) || (it->first == (PROP_SIGNAL_LEVEL))) { - it->second = std::to_string(m_signalLevel); + it->second = std::to_string((m_signalLevel & 0xFF)); + } + else if (it->first == (PROP_SIGNAL_STRENGTH)) + { + it->second = std::to_string((m_signalLevel >> 8)); + } + else if ((it->first == (PROP_MOBILE_FLOW_TX)) || (it->first == (PROP_MOBILE_FLOW_RX))|| (it->first == (PROP_MOBILE_FLOW_TODAY_TX))|| (it->first == (PROP_MOBILE_FLOW_TODAY_RX))) + { + if (flowInfo.empty()) + { + QueryFlowInfo(flowInfo); + } + auto it2 = flowInfo.find(it->first); + if (it2 != flowInfo.cend()) + { + it->second = it2->second; + } } /* else if (startsWith(it->first, PROP_JAVA_PREFIX)) { if (powerInfo.empty()) { - QueryPowerInfo(powerInfo); + QueryFlowInfo(powerInfo); } auto it2 = powerInfo.find(it->first); if (it2 != powerInfo.cend()) @@ -1074,7 +1137,7 @@ std::string CPhoneDevice::QueryCpuTemperature() return ""; } -void CPhoneDevice::QueryPowerInfo(std::map& powerInfo) +void CPhoneDevice::QueryFlowInfo(std::map& flowInfo) { JNIEnv* env = NULL; jboolean ret = JNI_FALSE; @@ -1084,7 +1147,7 @@ void CPhoneDevice::QueryPowerInfo(std::map& powerInfo) { ALOGE("Failed to get JNI Env"); } - jobject jobj = env->CallObjectMethod(m_javaService, mGetSystemInfoMid); + jobject jobj = env->CallObjectMethod(m_javaService, mGetFlowInfoMid); std::string str = jstring2string(env, (jstring)jobj); if (didAttachThread) { @@ -1094,7 +1157,7 @@ void CPhoneDevice::QueryPowerInfo(std::map& powerInfo) if (!str.empty()) { std::map queries = parseQuery(str); - powerInfo.swap(queries); + flowInfo.swap(queries); } } @@ -1125,10 +1188,13 @@ bool CPhoneDevice::InstallAPP(const std::string& path, unsigned int delayedTime) return true; } -bool CPhoneDevice::Reboot(int resetType, bool manually, const std::string& reason) +bool CPhoneDevice::Reboot(int resetType, bool manually, const std::string& reason, uint32_t timeout/* = 1000*/) { if (resetType == REBOOT_TYPE_DEVICE) { + std::string fileName = (manually ? "0_" : "1_") + std::to_string(GetMicroTimeStamp()); + fileName = m_appPath + (APP_PATH_TMP DIR_SEP_STR "reboot_") + fileName + std::string(".txt"); + writeFile(fileName, (const unsigned char*)reason.c_str(), reason.size()); // reboot the device if (!manually) { @@ -1136,21 +1202,24 @@ bool CPhoneDevice::Reboot(int resetType, bool manually, const std::string& reaso time_t ts = time(NULL); if ((ts - rebootTime) < 1800) { - XYLOG(XYLOG_SEVERITY_WARNING, "Frequent REBOOT DEV Cancelled Prev RBT Time=%lld", (int64_t)rebootTime); + XYLOG(XYLOG_SEVERITY_INFO, "Frequent REBOOT DEV Cancelled Prev RBT Time=%lld", (int64_t)rebootTime); return false; } } - std::thread t([]() - { - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); - GpioControl::reboot(); - }); - t.detach(); + XYLOG(XYLOG_SEVERITY_WARNING_EX, "REBOOT DEV manually=%d After %ums Reason:%s", manually ? 1 : 0, timeout, reason.c_str()); + // if (manually) + { +#ifdef USING_N938 + + GpioControl::reboot(); +#else + RestartApp(resetType, timeout, reason); +#endif + } } else { - long timeout = 1000; - RestartApp(resetType, timeout, reason); + RestartApp(resetType, (long)timeout, reason); } return true; @@ -1172,6 +1241,8 @@ void CPhoneDevice::RestartApp(int resetType, long timeout, const std::string& re jreason = env->NewStringUTF(reason.c_str()); } env->CallVoidMethod(m_javaService, mRebootMid, resetType, (jlong)timeout, jreason); + if (jreason != NULL) env->DeleteLocalRef(jreason); + if (didAttachThread) { m_vm->DetachCurrentThread(); @@ -1198,43 +1269,67 @@ bool CPhoneDevice::EnableGPS(bool enabled) return true; } -float CPhoneDevice::QueryBattaryVoltage(int timesForAvg, bool* isCharging) +int CPhoneDevice::QueryBattaryVoltage(int timesForAvg, int* chargingVoltage) { if (timesForAvg <= 0) { - return 0.0f; + timesForAvg = 1; } int val = 0; int totalVals = 0; - float chargingBusVoltage = 0.0f; - for (int idx = 0; idx < timesForAvg; idx++) + int cv = 0; + int idx = 0; + // XYLOG(XYLOG_SEVERITY_DEBUG, "WorkStatus BusVoltage"); + int matched = 0; + for (idx = 0; idx < timesForAvg; idx++) { + // auto now = std::chrono::system_clock::now(); val = GpioControl::getChargingBusVoltage(); + // auto now2 = std::chrono::system_clock::now(); + // auto duration = std::chrono::duration_cast(now2 - now).count(); + // XYLOG(XYLOG_SEVERITY_DEBUG, "WorkStatus BusVoltage val=%d, time=%lld", val, static_cast(duration)); if (val > 1000) { - chargingBusVoltage = (float)val / 1000.0; + cv += val; + matched++; break; } } + // XYLOG(XYLOG_SEVERITY_DEBUG, "WorkStatus BusVoltage end"); - if (isCharging != NULL) + if (chargingVoltage != NULL) { - *isCharging = chargingBusVoltage > DEFAULT_WARNING_CHARGING_BUS_VOL; + *chargingVoltage = matched > 0 ? (cv / matched) : 0; } - - int matched = 0; + // XYLOG(XYLOG_SEVERITY_DEBUG, "WorkStatus BatteryVoltage"); + matched = 0; for (int idx = 0; idx < timesForAvg; idx++) { + // auto now3 = std::chrono::system_clock::now(); val = GpioControl::getBatteryVoltage(); // // BatVol + // auto now4 = std::chrono::system_clock::now(); + // auto duration = std::chrono::duration_cast(now4 - now3).count(); + // XYLOG(XYLOG_SEVERITY_DEBUG, "WorkStatus BatteryVoltage val=%d, time=%lld", val, static_cast(duration)); if (val > 0) { - totalVals += val > BATTARY_VOLTAGE_MAX ? BATTARY_VOLTAGE_MAX : val; + totalVals += val; matched++; } } + // XYLOG(XYLOG_SEVERITY_DEBUG, "WorkStatus BatteryVoltage end"); + return (matched > 0) ? (totalVals / matched) : 0; +} + +uint32_t CPhoneDevice::QueryLdr() +{ + int val = GpioControl::getLightAdc(); + if (val > 0) + { + return static_cast(val); + } - return (matched > 0) ? ((float)totalVals / 1000.0 / matched) : 0; + return 0; } bool CPhoneDevice::RequestPosition() @@ -1315,16 +1410,21 @@ void CPhoneDevice::handleTimerImpl(CPhoneDevice::TIMER_CONTEXT* context) void CPhoneDevice::handleRebootTimer(union sigval v) { +#ifdef OUTPUT_DBG_INFO + const char *path = "/sdcard/com.xypower.mpapp/tmp/closeThreadReboot.txt"; + FILE* file = fopen(path, "w"); + if (file) { + fprintf(file, "Restarting app due to: Camera Can't Close\n"); + fclose(file); + } +#endif CPhoneDevice* pDevice = (CPhoneDevice*)(v.sival_ptr); + const IDevice::PHOTO_INFO& photoInfo = pDevice->mPhotoInfo; // Reboot APP - XYLOG(XYLOG_SEVERITY_ERROR, "Camera Close Thread is DEAD, will RESTART app"); - pDevice->RestartApp(0, 2000, "Camera Can't Close"); + XYLOG(XYLOG_SEVERITY_ERROR, "Camera Close Thread is DEAD, will RESTART app CH=%u PR=%X", photoInfo.channel, photoInfo.preset); + pDevice->RestartApp(REBOOT_TYPE_APP, 30000, "Camera Cant Close"); } -// void CPhoneDevice::handleRebootTimerImpl() -// { -// } - IDevice::timer_uid_t CPhoneDevice::RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times/* = 0*/) { struct sigevent evp = { 0 }; @@ -1358,7 +1458,7 @@ IDevice::timer_uid_t CPhoneDevice::RegisterTimer(unsigned int timerType, unsigne context->uid = (uint64_t)timer; ts.it_value.tv_sec = (timeout / 1000); ts.it_value.tv_nsec = (timeout % 1000) * 1000; - if (times != 1) + if (times > 1) { ts.it_interval.tv_sec = ts.it_value.tv_sec; ts.it_interval.tv_nsec = ts.it_value.tv_nsec; @@ -1380,20 +1480,26 @@ IDevice::timer_uid_t CPhoneDevice::RegisterTimer(unsigned int timerType, unsigne bool CPhoneDevice::UnregisterTimer(IDevice::timer_uid_t uid) { - timer_t timer = (timer_t)uid; - int res = timer_delete(timer); - + TIMER_CONTEXT *timerContext = NULL; + std::map::iterator it; m_devLocker.lock(); - std::map::iterator it = mTimers.find(uid); + it = mTimers.find(uid); if (it != mTimers.end()) { - delete it->second; + timerContext = it->second; mTimers.erase(it); - m_devLocker.unlock(); + } + m_devLocker.unlock(); + if (timerContext != NULL) + { + ALOGI("UnregisterTimer: %lld", uid); + timer_t timer = (timer_t)uid; + int res = timer_delete(timer); + delete timerContext; + return true; } - m_devLocker.unlock(); return false; } @@ -1446,7 +1552,10 @@ bool CPhoneDevice::ReleaseWakelock(uint64_t wakelock) } jstring jname = env->NewStringUTF(name.c_str()); - env->CallVoidMethod(m_javaService, mReleaseWakelockMid, jname); + if (!env->IsSameObject(m_javaService, NULL)) + { + env->CallVoidMethod(m_javaService, mReleaseWakelockMid, jname); + } env->DeleteLocalRef(jname); // env->ReleaseStringUTFChars(jname, name.c_str()); if (didAttachThread) @@ -1486,159 +1595,354 @@ IDevice::timer_uid_t CPhoneDevice::RegisterHeartbeat(unsigned int timerType, uns return uid; } -bool CPhoneDevice::TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector& osds, std::shared_ptr powerCtrlPtr) +bool CPhoneDevice::TakePhotoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector& osds, std::shared_ptr powerCtrlPtr) { // AutoEnv autoEnv(pThis->m_vm); time_t ts = time(NULL); uint32_t waitTime = localPhotoInfo.selfTestingTime; +#if 0 if(!GpioControl::GetSelftestStatus(waitTime)) { - waitTime = (waitTime != 0) ? (waitTime * 1024) : 10240; + m_isSelfTesting.store(true); + waitTime = (waitTime != 0) ? (waitTime * 1000) : 10000; std::this_thread::sleep_for(std::chrono::milliseconds(waitTime)); + m_isSelfTesting.store(false); } +#endif + + std::shared_ptr ethernetPowerCtrl = std::make_shared(localPhotoInfo.closeDelayTime); + + uint64_t startTime = GetMicroTimeStamp(); SetStaticIp(); - std::this_thread::sleep_for(std::chrono::milliseconds(256)); + uint64_t ethDuration = GetMicroTimeStamp() - startTime; + if (ethDuration < 5000) + { + std::this_thread::sleep_for(std::chrono::milliseconds(5000 - ethDuration)); + } + net_handle_t netHandle = GetEthnetHandle(); + + XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON Init Time=%u NetHandle=%lld", (uint32_t)ethDuration, netHandle); + + std::string ip = GetIpStr(localPhotoInfo.ip); + std::unique_ptr vendorCtrl(MakeVendorCtrl(localPhotoInfo.vendor, localPhotoInfo.channel, ip, localPhotoInfo.userName, localPhotoInfo.password, netHandle, false)); + + NET_PHOTO_INFO netPhotoInfo = { netHandle, 0 }; + if (!vendorCtrl) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.vendor, (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId); + TakePhotoCb(0, localPhotoInfo, "", 0); + return false; + } - net_handle_t netHandle = GetNetHandle(); + strcpy(netPhotoInfo.outputPath, path.c_str()); + + std::vector img; + cv::Mat rgb; - if (netHandle == 0) + int netCaptureResult = 0; + std::string lastError; + int idx = 0; + vendorCtrl->SetResolution(localPhotoInfo.channel, localPhotoInfo.cameraId, localPhotoInfo.width, localPhotoInfo.height); + for (; idx < MAX_NETCAMERA_RETRIES; idx++) { - // Wait about 10s - for (int idx = 0; idx < 84; idx++) + netHandle = GetEthnetHandle(); + vendorCtrl->UpdateNetHandle(netHandle); + netPhotoInfo.netHandle = netHandle; + + XYLOG(XYLOG_SEVERITY_INFO, "NetCapture %d NetHandle=%lld PHOTOID=%u", idx, netHandle, localPhotoInfo.photoId); + +#if 0 + if(localPhotoInfo.vendor == 3) { - std::this_thread::sleep_for(std::chrono::milliseconds(128)); - netHandle = GetNetHandle(); + UniviewResolutionSet(netPhotoInfo, localPhotoInfo.cameraId,localPhotoInfo.resolution); + } +#endif + img.clear(); + netCaptureResult = vendorCtrl->TakePhoto(localPhotoInfo.cameraId, img); - if (netHandle != 0) + if (netCaptureResult && !img.empty()) + { + if(img.size() <= 1000) { + lastError = ByteArrayToString(&img[0], img.size()); + } + rgb = cv::imdecode(cv::Mat(img), cv::IMREAD_COLOR); + if (!rgb.empty()) + { + XYLOG(XYLOG_SEVERITY_INFO, "NetCapture Succeeded PHOTOID=%u Img Size=%u Retries=%d", localPhotoInfo.photoId, (uint32_t)img.size(), idx); break; } } + if (vendorCtrl->IsTimeout()) + { + idx += 9; + } + std::this_thread::sleep_for(std::chrono::milliseconds(1000)); } - if (netHandle == 0) + // if (netCaptureResult) + // { + // ShutdownEthernet(); + //} + ethernetPowerCtrl.reset(); + XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF"); + + if (netCaptureResult) { - // timeout - XYLOG(XYLOG_SEVERITY_ERROR, "Ethernet not existing CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId); - TakePhotoCb(0, localPhotoInfo, "", 0); - return false; + m_ethernetFailures = 0; } else { - XYLOG(XYLOG_SEVERITY_INFO, "Ethernet is Available CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId); + m_ethernetFailures++; + if (m_ethernetFailures > 3) + { + time_t rebootTime = GetRebootTime(); + if (ts - rebootTime > 1800) + { + Reboot(REBOOT_TYPE_DEVICE, true, "Ethernet Not Existing"); + } + } } - NET_PHOTO_INFO netPhotoInfo = { netHandle, 0 }; - if (localPhotoInfo.vendor == 1) + + if (!rgb.empty()) { - // Hai Kang - netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST; - snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/ISAPI/Streaming/channels/1/picture?"); + time_t takingTime = ts; + if (localPhotoInfo.remedy != 0) + { + time_t scheduleTime = localPhotoInfo.scheduleTime; + if (scheduleTime == 0) + { + scheduleTime = localPhotoInfo.requestTime; + } + if ((takingTime - scheduleTime) > 30) + { + takingTime = scheduleTime + localPhotoInfo.channel * 2; + } + } + + // Notify to take next photo + TakePhotoCb(1, localPhotoInfo, "", takingTime); + +#ifdef _DEBUG + // cv::imwrite("/sdcard/com.xypower.mpapp/tmp/netimg2.jpg", rgb); +#endif + netCaptureResult = PostProcessPhoto(localPhotoInfo, osds, path, "", rgb, takingTime); } - else if (localPhotoInfo.vendor == 2) + else { - // Hang Yu - strcpy(netPhotoInfo.url, "/cgi-bin/snapshot.cgi"); + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u Retries=%d LastErr:%s", + (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId, idx, lastError.c_str()); + TakePhotoCb(0, localPhotoInfo, "", 0); } - else if (localPhotoInfo.vendor == 3) + + return true; +} + + +bool CPhoneDevice::TakeVideoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector& osds, std::shared_ptr powerCtrlPtr) +{ + // AutoEnv autoEnv(pThis->m_vm); + time_t ts = time(NULL); + uint32_t waitTime = localPhotoInfo.selfTestingTime; + if(!GpioControl::GetSelftestStatus(waitTime)) { - // Yu Shi - netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST; - int streamSid = 0; // should put into config - snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/LAPI/V1.0/Channels/%u/Media/Video/Streams/%d/Snapshot", (uint32_t)localPhotoInfo.cameraId, streamSid); + m_isSelfTesting.store(true); + waitTime = (waitTime != 0) ? (waitTime * 1000) : 10000; + std::this_thread::sleep_for(std::chrono::milliseconds(waitTime)); + m_isSelfTesting.store(false); } - else if (localPhotoInfo.vendor == 5) + + XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON"); + std::shared_ptr ethernetPowerCtrl = std::make_shared(1); + + uint64_t startTime = GetMicroTimeStamp(); + SetStaticIp(); + uint64_t ethDuration = GetMicroTimeStamp() - startTime; + if (ethDuration < 5000) { - // Hang Yu - New - netPhotoInfo.authType = HTTP_AUTH_TYPE_BASIC; - // http://192.168.1.46/Snapshot/%u/RemoteImageCapture?ImageFormat=2&HorizontalPixel=1920&VerticalPixel=1080 - // http://192.168.1.101/Snapshot/1/2/RemoteImageCaptureV2?ImageFormat=jpg - // http://192.168.1.101/Snapshot/1/1/RemoteImageCaptureV2?ImageFormat=jpg - snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", (uint32_t)localPhotoInfo.cameraId); + std::this_thread::sleep_for(std::chrono::milliseconds(5000 - ethDuration)); } - else + + net_handle_t netHandle = GetEthnetHandle(); + std::string ip = GetIpStr(localPhotoInfo.ip); + std::unique_ptr vendorCtrl(MakeVendorCtrl(localPhotoInfo.vendor, localPhotoInfo.channel, ip, localPhotoInfo.userName, localPhotoInfo.password, netHandle, false)); + if (!vendorCtrl) { XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.vendor, (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId); TakePhotoCb(0, localPhotoInfo, "", 0); return false; } - struct in_addr addr; - addr.s_addr = localPhotoInfo.ip; - strcpy(netPhotoInfo.ip, inet_ntoa(addr)); - strcpy(netPhotoInfo.outputPath, path.c_str()); - if (!localPhotoInfo.userName.empty()) + if(localPhotoInfo.vendor == 5) { - size_t len = std::min(sizeof(netPhotoInfo.userName) - 1, localPhotoInfo.userName.size()); - strncpy(netPhotoInfo.userName, localPhotoInfo.userName.c_str(), len); + vendorCtrl->SetOsd(localPhotoInfo.cameraId, osds[0].text, 0); + vendorCtrl->SetResolution(localPhotoInfo.cameraId, 1, localPhotoInfo.width, localPhotoInfo.height); } - if (!localPhotoInfo.password.empty()) + + std::string streamingUrl = vendorCtrl->GetStreamingUrl(localPhotoInfo.cameraId); + + if (streamingUrl.empty()) { - size_t len = std::min(sizeof(netPhotoInfo.password) - 1, localPhotoInfo.password.size()); - strncpy(netPhotoInfo.password, localPhotoInfo.password.c_str(), len); + XYLOG(XYLOG_SEVERITY_ERROR, "Invalid Streaming URL CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId); + TakePhotoCb(0, localPhotoInfo, "", 0); + return false; } + // strcpy(netPhotoInfo.outputPath, path.c_str()); + // strcpy(netPhotoInfo.interface, "eth0"); - std::vector img; + time_t photoTime = time(NULL); + std::string tmpFile = m_appPath + (APP_PATH_TMP DIR_SEP_STR) + std::to_string(localPhotoInfo.photoId) + ".mp4"; + // RTSPToMP4 dumper(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000); + // dumper.start(); + XYLOG(XYLOG_SEVERITY_DEBUG, "Start Recording CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId); - bool netCaptureResult = false; - for (int idx = 0; idx < 3; idx++) + if (vendorCtrl->HasAuthOnStreaming()) { - netHandle = GetNetHandle(); - netPhotoInfo.netHandle = netHandle; + dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, localPhotoInfo.userName, localPhotoInfo.password, GetEthnetHandle()); + } + else + { + dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, "", "", GetEthnetHandle()); + } - XYLOG(XYLOG_SEVERITY_INFO, "NetCapture %d NetHandle=%lld PHOTOID=%u", idx, netHandle, localPhotoInfo.photoId); + XYLOG(XYLOG_SEVERITY_DEBUG, "Stop Recording CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId); - if(localPhotoInfo.vendor == 3) + ethernetPowerCtrl.reset(); + XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF"); + + std::string fullPath = endsWith(mPath, ".mp4") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo, photoTime)); + + if (existsFile(tmpFile)) + { + std::rename(tmpFile.c_str(), fullPath.c_str()); + TakePhotoCb(3, localPhotoInfo, fullPath, photoTime); + } + else + { + TakePhotoCb(0, localPhotoInfo, "", 0); + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, + localPhotoInfo.photoId, ip.c_str(), streamingUrl.c_str()); + } + // Notify to take next photo + // TakePhotoCb(1, localPhotoInfo, "", takingTime); + + // XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, + // localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url); + // TakePhotoCb(0, localPhotoInfo, "", 0); + + return true; +} + +bool CPhoneDevice::StartPushStreaming(const IDevice::PHOTO_INFO& photoInfo, const std::string& url, const std::vector& osds, std::shared_ptr powerCtrlPtr) +{ + if (photoInfo.mediaType == XY_MEDIA_TYPE_STREAM) + { + time_t ts = time(NULL); + uint32_t waitTime = photoInfo.selfTestingTime; + if(!GpioControl::GetSelftestStatus(waitTime)) { - UniviewResolutionSet(netPhotoInfo, localPhotoInfo.cameraId,localPhotoInfo.resolution); + m_isSelfTesting.store(true); + waitTime = (waitTime != 0) ? (waitTime * 1000) : 10000; + std::this_thread::sleep_for(std::chrono::milliseconds(waitTime)); + m_isSelfTesting.store(false); } - img.clear(); - netCaptureResult = requestCapture(localPhotoInfo.channel, localPhotoInfo.preset, netPhotoInfo, img); - if (netCaptureResult) + XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON"); + std::shared_ptr ethernetPowerCtrl = std::make_shared(1); + // std::shared_ptr ethernetPowerCtrl; + + uint64_t startTime = GetMicroTimeStamp(); + SetStaticIp(); + uint64_t ethDuration = GetMicroTimeStamp() - startTime; + if (ethDuration < 5000) { - XYLOG(XYLOG_SEVERITY_INFO, "NetCapture Succeeded PHOTOID=%u Img Size=%u", localPhotoInfo.photoId, (uint32_t)img.size()); - break; + std::this_thread::sleep_for(std::chrono::milliseconds(5000 - ethDuration)); } - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); - } - cv::Mat rgb; - if (netCaptureResult && !img.empty()) - { - rgb = cv::imdecode(cv::Mat(img), cv::IMREAD_COLOR); - } - if (!rgb.empty()) - { - time_t takingTime = ts; - if(localPhotoInfo.scheduleTime != 0) + net_handle_t netHandle = GetEthnetHandle(); + std::map::iterator it = m_streamings.find(photoInfo.channel); + if (it != m_streamings.end()) { - takingTime = localPhotoInfo.scheduleTime; + it->second.stream->stop(); + it->second.stream.reset(); + it->second.powerCtrl.reset(); + it->second.ethernetPowerCtrl.reset(); + m_streamings.erase(it); } - if (localPhotoInfo.remedy != 0) + + std::string ip = GetIpStr(photoInfo.ip); + std::unique_ptr vendorCtrl (MakeVendorCtrl(photoInfo.vendor, photoInfo.channel, ip, photoInfo.userName, photoInfo.password, netHandle, true)); + if (!vendorCtrl) { - if ((takingTime - localPhotoInfo.scheduleTime) > 30) - { - takingTime = localPhotoInfo.scheduleTime + localPhotoInfo.channel * 2; - } + XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.vendor, (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + TakePhotoCb(0, photoInfo, "", 0); + return false; } - localPhotoInfo.photoTime = takingTime; + std::string streamingUrl = vendorCtrl->GetStreamingUrl(photoInfo.cameraId); - // Notify to take next photo - TakePhotoCb(1, localPhotoInfo, "", takingTime); + if (streamingUrl.empty()) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Invalid Streaming URL CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + TakePhotoCb(0, photoInfo, "", 0); + return false; + } -#ifdef _DEBUG - // cv::imwrite("/sdcard/com.xypower.mpapp/tmp/netimg2.jpg", rgb); + RtspForwarder* forwarder = new RtspForwarder(streamingUrl, url); + + bool res = false; + if (vendorCtrl->HasAuthOnStreaming()) + { + forwarder->setAuth(photoInfo.userName, photoInfo.password); + } + + STREAMING_CONTEXT ctx; + ctx.stream = std::shared_ptr((Streaming*)forwarder); + ctx.powerCtrl = powerCtrlPtr; + ctx.ethernetPowerCtrl = ethernetPowerCtrl; + m_streamings[photoInfo.channel] = ctx; + + // Optional: Set callback to process video frames +#if 0 + forwarder->setFrameCallback([](uint8_t* data, int linesize, int width, int height) { + // Process frame data here + // Example: Add OSD overlay + }); +#endif + + XYLOG(XYLOG_SEVERITY_INFO, "Start Streaming CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + // Start forwarding + res = forwarder->start(); +#if 0 + // Initialize with RTSP input and RTMP output + if (!res) + { + XYLOG(XYLOG_SEVERITY_ERROR, "TP: Failed to open stream: %s (%u/%02X) PHOTOID=%u", streamingUrl.c_str(), (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + delete forwarder; + return -1; + } #endif - netCaptureResult = PostProcessPhoto(localPhotoInfo, osds, path, "", rgb); + // Wait for user input to stop + // std::cout << "Press Enter to stop streaming..." << std::endl; + // std::cin.get(); + + // forwarder.stop(); } - else + else if (photoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF) { - XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, - localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url); - TakePhotoCb(0, localPhotoInfo, "", 0); + XYLOG(XYLOG_SEVERITY_INFO, "Stop Streaming CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + + auto it = m_streamings.find(photoInfo.channel); + if (it != m_streamings.end()) + { + it->second.stream->stop(); + it->second.stream.reset(); + it->second.powerCtrl.reset(); + it->second.ethernetPowerCtrl.reset(); + m_streamings.erase(it); + } } return true; @@ -1696,11 +2000,11 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< if (photoInfo.cameraType == CAM_TYPE_MIPI) { - powerCtrlPtr = std::shared_ptr(new CameraPowerCtrl(mPhotoInfo.closeDelayTime)); + powerCtrlPtr = std::shared_ptr(new CameraPowerCtrl(0)); } else if (photoInfo.cameraType == CAM_TYPE_USB) { - powerCtrlPtr = std::shared_ptr(new UsbCameraPowerCtrl(mPhotoInfo.closeDelayTime)); + powerCtrlPtr = std::shared_ptr(new UsbCameraPowerCtrl(0)); } else if (photoInfo.cameraType == CAM_TYPE_NET) { @@ -1715,10 +2019,13 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< } else if (photoInfo.cameraType == CAM_TYPE_PLZ) { +#if 0 + XYLOG(XYLOG_SEVERITY_DEBUG, "PTZ PWR turned ON"); if(mPhotoInfo.scheduleTime == 0) powerCtrlPtr = std::shared_ptr(new PlzCameraPowerCtrl(mPhotoInfo.closeDelayTime)); else powerCtrlPtr = std::shared_ptr(new PlzCameraPowerCtrl(2)); +#endif } res = true; @@ -1742,6 +2049,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< params.wait3ALocked = mPhotoInfo.wait3ALocked; params.customHdr = mPhotoInfo.customHdr; params.hdrStep = mPhotoInfo.hdrStep; + params.minFps = mPhotoInfo.minFps; params.burstRawCapture = mPhotoInfo.usingRawFormat; params.burstCaptures = mPhotoInfo.burstCaptures; if (params.requestTemplate <= 0 || params.requestTemplate > 5) @@ -1812,8 +2120,17 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< osds.swap(mOsds); IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo; - std::thread t([localPhotoInfo, path, pThis, osds, powerCtrlPtr]() mutable + std::thread t([localPhotoInfo, path, pThis, osds, powerCtrlPtr]() { + uint32_t waitTime = localPhotoInfo.selfTestingTime; + XYLOG(XYLOG_SEVERITY_INFO, "Camera is SelfTesting Time=%u s", waitTime); + waitTime = (waitTime <= 5 ) ? 0 : ((waitTime - 5) * 1000); + if (waitTime > 0) + { + std::this_thread::sleep_for(std::chrono::milliseconds(waitTime)); + } + XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting Finished"); + pThis->TakePhotoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr); }); @@ -1829,7 +2146,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< vector osds; osds.swap(mOsds); - std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial]() mutable + std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial]() { time_t ts = time(NULL); if(localPhotoInfo.scheduleTime != 0) @@ -1843,7 +2160,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< std::this_thread::sleep_for(std::chrono::seconds(5)); } - CameraPhotoCmd(ts, localPhotoInfo.channel, 0, localPhotoInfo.resolution, localPhotoInfo.preset, param.serfile, param.baud, param.addr); + CameraPhotoCmd(ts, localPhotoInfo.channel, TAKE_PHOTO, localPhotoInfo.resolution, localPhotoInfo.preset, param.serfile, param.baud, param.addr); XYLOG(XYLOG_SEVERITY_INFO, "Taking photo over"); if(localPhotoInfo.scheduleTime == 0) { @@ -1857,9 +2174,14 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< time_t takingTime = ts; if (localPhotoInfo.remedy != 0) { - if ((takingTime - localPhotoInfo.scheduleTime) > 30) + time_t scheduleTime = localPhotoInfo.scheduleTime; + if (scheduleTime == 0) { - takingTime = localPhotoInfo.scheduleTime + localPhotoInfo.channel * 2; + scheduleTime = localPhotoInfo.requestTime; + } + if ((takingTime - scheduleTime) > 30) + { + takingTime = scheduleTime + localPhotoInfo.channel * 2; } } IMAGE_DEF photo = { 0 }; @@ -1868,13 +2190,12 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< if(photo.state == 5) { XYLOG(XYLOG_SEVERITY_INFO,"Get Serials Photo, PhotoID = %s", photo.photoname); - localPhotoInfo.photoTime = takingTime; cv::Mat img = cv::imread(photo.photoname, cv::IMREAD_COLOR); if (!img.empty()) { int result = std::remove(photo.photoname); pThis->TakePhotoCb(1, localPhotoInfo, "", takingTime, objects); - pThis->PostProcessPhoto(localPhotoInfo, osds, path, "", img); + pThis->PostProcessPhoto(localPhotoInfo, osds, path, "", img, takingTime); } }else @@ -1890,6 +2211,8 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< } else if (mPhotoInfo.mediaType == 0 && (mPhotoInfo.cameraType == CAM_TYPE_PLZ)) { + m_ptzController->AddPhotoCommand(mPhotoInfo, mPath, mOsds); +#if 0 uint64_t wid_serial = RequestWakelock(0); CPhoneDevice* pThis = this; IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo; @@ -1898,14 +2221,17 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< vector osds; osds.swap(mOsds); - std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial, powerCtrlPtr]() mutable + std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial, powerCtrlPtr]() { uint32_t waitTime = localPhotoInfo.selfTestingTime; if(!GpioControl::GetSelftestStatus(waitTime)) { - XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting, selfTestingtime=%u", waitTime); - waitTime = (waitTime != 0) ? (waitTime * 1024) : 10240; - std::this_thread::sleep_for(std::chrono::milliseconds(waitTime)); + pThis->m_isSelfTesting.store(true); + time_t remaintime = GpioControl::GetSelfTestRemain(waitTime); + XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime); + remaintime = (remaintime != 0) ? (remaintime * 1000) : 10000; + std::this_thread::sleep_for(std::chrono::milliseconds(remaintime)); + pThis->m_isSelfTesting.store(false); XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over"); } @@ -1913,16 +2239,110 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< { XYLOG(XYLOG_SEVERITY_INFO,"Recv CameraCtrl Command, action= MOVE_PRESETNO, preset = %u", localPhotoInfo.preset); CameraPhotoCmd(time(NULL), localPhotoInfo.channel, MOVE_PRESETNO, 0, localPhotoInfo.preset, param.serfile, param.baud, param.addr); - std::this_thread::sleep_for(std::chrono::seconds(5)); + std::this_thread::sleep_for(std::chrono::seconds(15)); } pThis->TakePhotoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr); pThis->ReleaseWakelock(wid_serial); }); + t.detach(); +#endif + } + else if ((mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM || mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF) && (mPhotoInfo.cameraType == CAM_TYPE_NET || mPhotoInfo.cameraType == CAM_TYPE_PLZ)) + { + XYLOG(XYLOG_SEVERITY_INFO, "Start TP(Streaming) CH=%u PR=%X PHOTOID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.preset, mPhotoInfo.photoId); + + // Start Thread + CPhoneDevice* pThis = this; + + vector osds; + osds.swap(mOsds); + IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo; + + if(mPhotoInfo.cameraType == CAM_TYPE_PLZ) + { + pThis->TakePhotoCb(1, localPhotoInfo, "", 0); + m_ptzController->AddPhotoCommand(localPhotoInfo, path, osds); + }else + { + std::thread t([localPhotoInfo, path, pThis, osds, powerCtrlPtr]() + { + pThis->TakePhotoCb(1, localPhotoInfo, "", 0); + pThis->StartPushStreaming(localPhotoInfo, path, osds, powerCtrlPtr); + }); + + t.detach(); + } + } + else if (mPhotoInfo.mediaType == 1 && (mPhotoInfo.cameraType == CAM_TYPE_PLZ)) + { + m_ptzController->AddPhotoCommand(mPhotoInfo, mPath, mOsds); +#if 0 + uint64_t wid_serial = RequestWakelock(0); + CPhoneDevice* pThis = this; + IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo; + IDevice::SerialsPhotoParam param = { "", 0, 0 }; + GetPhotoSerialsParamCb(param); + vector osds; + osds.swap(mOsds); + std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial, powerCtrlPtr]() + { + uint32_t waitTime = localPhotoInfo.selfTestingTime; + if(!GpioControl::GetSelftestStatus(waitTime)) + { + pThis->m_isSelfTesting.store(true); + time_t remaintime = GpioControl::GetSelfTestRemain(waitTime); + XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime); + remaintime = (remaintime != 0) ? (remaintime * 1000) : 10000; + std::this_thread::sleep_for(std::chrono::milliseconds(remaintime)); + pThis->m_isSelfTesting.store(false); + XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over"); + } + + if (localPhotoInfo.preset != 0 && localPhotoInfo.preset != 0xFF) + { + XYLOG(XYLOG_SEVERITY_INFO,"Recv CameraCtrl Command, action= MOVE_PRESETNO, preset = %u", localPhotoInfo.preset); + CameraPhotoCmd(time(NULL), localPhotoInfo.channel, MOVE_PRESETNO, 0, localPhotoInfo.preset, param.serfile, param.baud, param.addr); + std::this_thread::sleep_for(std::chrono::seconds(10)); + } + + pThis->TakeVideoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr); + pThis->ReleaseWakelock(wid_serial); + }); + + t.detach(); +#endif + } + else if (mPhotoInfo.mediaType == 1 && (mPhotoInfo.cameraType == CAM_TYPE_NET)) + { + uint64_t wid_serial = RequestWakelock(0); + CPhoneDevice* pThis = this; + IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo; + vector osds; + osds.swap(mOsds); + + std::thread t([localPhotoInfo, pThis, path, osds, wid_serial, powerCtrlPtr]() + { + uint32_t waitTime = localPhotoInfo.selfTestingTime; + if(!GpioControl::GetSelftestStatus(waitTime)) + { + pThis->m_isSelfTesting.store(true); + time_t remaintime = GpioControl::GetSelfTestRemain(waitTime); + XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime); + remaintime = (remaintime != 0) ? (remaintime * 1000) : 10000; + std::this_thread::sleep_for(std::chrono::milliseconds(remaintime)); + pThis->m_isSelfTesting.store(false); + XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over"); + } + + pThis->TakeVideoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr); + pThis->ReleaseWakelock(wid_serial); + }); + t.detach(); } - else if (mPhotoInfo.usingSysCamera == 1) + else if (mPhotoInfo.cameraType == CAM_TYPE_SYSTEM) { JNIEnv* env = NULL; bool didAttachThread = false; @@ -1983,7 +2403,8 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< } } - int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1) * 90; + //为修复宁夏短视频拍照翻转的bug 临时修改方法 + int orientation = mPhotoInfo.orientation == 0 ? -1 : (mPhotoInfo.orientation - 1); jboolean photoOrVideo = mPhotoInfo.mediaType == 0 ? JNI_TRUE : JNI_FALSE; env->CallVoidMethod(m_javaService, mStartRecordingMid, photoOrVideo, mPhotoInfo.cameraId, (uint64_t)mPhotoInfo.photoId, mPhotoInfo.duration, mPhotoInfo.width, mPhotoInfo.height, mPhotoInfo.duration, orientation, @@ -2007,25 +2428,20 @@ bool CPhoneDevice::OpenPTZSensors(uint32_t sec) { uint64_t wid = RequestWakelock(0); unsigned long long time_now = GetMicroTimeStamp(); - { - std::lock_guard lock(m_cameraLocker); - if(time_now < localDelayTime-1000) - { - m_sensorsStatus = true; - m_cameraStatus = true; - OpenSensors(MAIN_POWER_OPEN); - OpenSensors(CAMERA_SENSOR_OPEN); - }else if (!m_cameraStatus && !m_sensorsStatus) - { - m_sensorsStatus = true; - OpenSensors(MAIN_POWER_OPEN); - OpenSensors(CAMERA_SENSOR_OPEN); - } + OpenSensors(MAIN_POWER_OPEN); + OpenSensors(CAMERA_SENSOR_OPEN); + + if (m_isSelfTesting.load() || (GpioControl::GetCamerastatus() && GpioControl::GetSelftestStatus(sec))) + { + ReleaseWakelock(wid); + return true; } - if(m_sensorsStatus && !m_cameraStatus) + + if(GpioControl::GetCamerastatus() && !GpioControl::GetSelftestStatus(sec)) { + m_isSelfTesting.store(true); XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting, selfTestingtime=%u", sec); auto start = std::chrono::steady_clock::now(); while (std::chrono::steady_clock::now() - start < std::chrono::seconds(sec)) @@ -2034,62 +2450,45 @@ bool CPhoneDevice::OpenPTZSensors(uint32_t sec) { CloseSensors(CAMERA_SENSOR_OPEN, 0); CloseSensors(MAIN_POWER_OPEN, 0); - m_cameraStatus = false; - m_sensorsStatus = false; m_shouldStopWaiting.store(false); + m_isSelfTesting.store(false); ReleaseWakelock(wid); return false; } std::this_thread::sleep_for(std::chrono::milliseconds(200)); } + m_isSelfTesting.store(false); + m_shouldStopWaiting.store(false); + unsigned long long time_over = GetMicroTimeStamp(); + XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over, selfTestingtime=%u", (time_over - time_now)/1000); } -// if(m_sensorsStatus && !m_cameraStatus) -// std::this_thread::sleep_for(std::chrono::seconds(sec)); - - { - std::lock_guard lock(m_cameraLocker); -// std::unique_lock lock(m_cameraLocker); - if (!m_cameraStatus && m_sensorsStatus) - { - m_cameraStatus = true; - unsigned long long time_over = GetMicroTimeStamp(); - XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over, selfTestingtime=%u", (time_over - time_now)/1000); - } - } ReleaseWakelock(wid); + return true; - return m_cameraStatus; } bool CPhoneDevice::ClosePTZSensors(uint32_t delayedCloseTime) { - localDelayTime = GetMicroTimeStamp() + delayedCloseTime*1000; - - if(m_sensorsStatus && !m_cameraStatus) + if(m_isSelfTesting.load()) { - localDelayTime = GetMicroTimeStamp(); m_shouldStopWaiting.store(true); - } - else + }else { - std::lock_guard lock(m_cameraLocker); -// std::unique_lock lock(m_cameraLocker); CloseSensors(CAMERA_SENSOR_OPEN, delayedCloseTime); CloseSensors(MAIN_POWER_OPEN, delayedCloseTime); - m_cameraStatus = false; - m_sensorsStatus = false; } + return true; } -bool CPhoneDevice::GetPTZSensorsStatus() +bool CPhoneDevice::GetPTZSensorsStatus(time_t waittime) { - return m_sensorsStatus; + return GpioControl::GetSelftestStatus(waittime); } bool CPhoneDevice::GetCameraStatus() { - return m_cameraStatus; + return GpioControl::GetCamerastatus(); } bool CPhoneDevice::CloseCamera() @@ -2101,6 +2500,8 @@ bool CPhoneDevice::CloseCamera() camera->close(); delete camera; + + m_powerCtrlPtr.reset(); } return true; } @@ -2108,34 +2509,29 @@ bool CPhoneDevice::CloseCamera() void CPhoneDevice::CloseCamera2(CPhoneDevice::CPhoneCamera* camera, unsigned int photoId, unsigned char cameraType) { XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Start CloseCamera PHOTOID=%u", photoId); - // std::this_thread::sleep_for(std::chrono::milliseconds(16)); + if (camera != NULL) { camera->close(); delete camera; } - XYLOG(XYLOG_SEVERITY_DEBUG, "TP: Will Turn Off Power PHOTOID=%u", photoId); { - std::shared_ptr empty; - empty.swap(m_powerCtrlPtr); + auto powerCtrl = m_powerCtrlPtr; + m_powerCtrlPtr.reset(); + std::this_thread::sleep_for(std::chrono::seconds(1)); } - XYLOG(XYLOG_SEVERITY_DEBUG, "TP: End Turn Off Power PHOTOID=%u", photoId); XYLOG(XYLOG_SEVERITY_DEBUG, "TP: CloseCamera PHOTOID=%u", photoId); } -void visualize(const char* filename, const ncnn::Mat& m) -{ - cv::Mat a(m.h, m.w, CV_8UC3); - m.to_pixels(a.data, ncnn::Mat::PIXEL_BGR2RGB); - - cv::imwrite(filename, a); -} - void DrawOutlineText(cv::Ptr ft2, cv::Mat& mat, const std::string& str, cv::Point startPoint, int fontSize, cv::Scalar clr, int thickness) { + if (mat.empty()) + { + return; + } std::vector lines = split(str, "\n"); int lineHeight = 0; cv::Point pt = startPoint; @@ -2164,12 +2560,16 @@ bool CPhoneDevice::onOneCapture(std::shared_ptr characteristics time_t takingTime = time(NULL); if (mPhotoInfo.remedy != 0) { - if ((takingTime - mPhotoInfo.scheduleTime) > 30) + time_t scheduleTime = mPhotoInfo.scheduleTime; + if (scheduleTime == 0) { - takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + scheduleTime = mPhotoInfo.requestTime; + } + if ((takingTime - scheduleTime) > 30) + { + takingTime = scheduleTime + mPhotoInfo.channel * 2; } } - mPhotoInfo.photoTime = takingTime; vector osds; osds.swap(mOsds); @@ -2210,30 +2610,25 @@ bool CPhoneDevice::onOneCapture(std::shared_ptr characteristics closeThread.detach(); } + if (rgb.empty()) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Empty Mat object CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + TakePhotoCb(0, photoInfo, "", takingTime); + return true; + } + CPhoneDevice* pThis = this; - std::thread th([pThis, characteristics, result, photoInfo, osds, path, rgb, facing, sensorOrientation, ldr, duration, takingTime]()mutable - { - std::string cameraInfo; - if (photoInfo.outputDbgInfo != 0) - { - NdkCamera::CAPTURE_RESULT captureResult = { 0 }; - NdkCamera::EnumCameraResult(result.get(), captureResult); - - char extimeunit[4] = { 0 }; - unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); - strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); - char str[128] = { 0 }; - snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", - captureResult.autoExposure, captureResult.autoFocus, - extime, extimeunit, captureResult.compensation, captureResult.sensitivity, - // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, - (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, - captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, - duration, captureResult.frameDuration); - cameraInfo = str; - } + std::thread th([pThis, characteristics, result, photoInfo, osds, path, rgb, facing, sensorOrientation, ldr, duration, takingTime]() + { + std::string cameraInfo; + if (photoInfo.outputDbgInfo != 0) + { + cameraInfo = BuildCaptureResultInfo(result.get(), ldr, duration, false); + } + + XYLOG(XYLOG_SEVERITY_DEBUG, "Photo Result CH=%u PR=%u IMGID=%u %s", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.preset, (uint32_t)photoInfo.photoId, cameraInfo.c_str()); -#ifdef OUTPUT_CAMERA_DBG_INFO +#ifdef OUTPUT_DBG_INFO #if 0 bool shouldRetry = false; if (ldr != ~0) @@ -2270,18 +2665,21 @@ bool CPhoneDevice::onOneCapture(std::shared_ptr characteristics } } #endif // 0 -#endif // OUTPUT_CAMERA_DBG_INFO +#endif // OUTPUT_DBG_INFO - // Notify to take next photo - XYLOG(XYLOG_SEVERITY_INFO, "TP: Notofy to Take Next CUR Info: CH=%u PR=%u PHOTOID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.preset, (uint32_t)photoInfo.photoId); - pThis->TakePhotoCb(1, photoInfo, "", takingTime); + // Notify to take next photo + XYLOG(XYLOG_SEVERITY_INFO, "TP: Notofy to Take Next CUR Info: CH=%u PR=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.preset, (uint32_t)photoInfo.photoId); + pThis->TakePhotoCb(1, photoInfo, "", takingTime); - bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); - if (res) - { - // TakePhotoCb(2, photoInfo, path, takingTime); - } - }); + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb, takingTime); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + ALOGW("Current allocated matrices %u", (uint32_t)rgb.total()); + } + + + }); th.detach(); @@ -2292,12 +2690,18 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi std::vector >& results, uint32_t ldr, uint32_t duration, std::vector >& frames) { +#if 0 time_t takingTime = time(NULL); if (mPhotoInfo.remedy != 0) { - if ((takingTime - mPhotoInfo.scheduleTime) > 30) + time_t scheduleTime = mPhotoInfo.scheduleTime; + if (scheduleTime == 0) { - takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + scheduleTime = mPhotoInfo.requestTime; + } + if ((takingTime - scheduleTime) > 30) + { + takingTime = scheduleTime + mPhotoInfo.channel * 2; } } mPhotoInfo.photoTime = takingTime; @@ -2324,7 +2728,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi } CPhoneDevice* pThis = this; - std::thread th([pThis, characteristics, results, photoInfo, osds, path, pByteArrays, ldr, duration, takingTime]()mutable + std::thread th([pThis, characteristics, results, photoInfo, osds, path, pByteArrays, ldr, duration, takingTime]() { cv::Mat rgb; std::string cameraInfo; @@ -2352,25 +2756,11 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi { if (!results.empty()) { - NdkCamera::CAPTURE_RESULT captureResult = { 0 }; - NdkCamera::EnumCameraResult(results[0].get(), captureResult); - - char extimeunit[4] = { 0 }; - unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); - strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); - char str[128] = { 0 }; - snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld BURST", - captureResult.autoExposure, captureResult.autoFocus, - extime, extimeunit, captureResult.compensation, captureResult.sensitivity, - // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, - (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, - captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, - duration, captureResult.frameDuration); - cameraInfo = str; + cameraInfo = BuildCaptureResultInfo(results[0].get(), ldr, duration, true); } } -#ifdef OUTPUT_CAMERA_DBG_INFO +#ifdef OUTPUT_DBG_INFO #if 0 bool shouldRetry = false; if (ldr != ~0) @@ -2407,7 +2797,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi } } #endif // 0 -#endif // OUTPUT_CAMERA_DBG_INFO +#endif // OUTPUT_DBG_INFO // Notify to take next photo pThis->TakePhotoCb(1, photoInfo, "", takingTime); @@ -2497,7 +2887,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi std::vector > localFrames; localFrames.swap(pByteArrays.get()->byteArrays); - if (photoInfo.customHdr) + if (false /*photoInfo.customHdr*/) { std::vector imagePaths; std::vector exposureTimes; @@ -2648,18 +3038,48 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi } cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR); #endif // USING_EXEC_HDRP - bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); - if (res) + + if (rgb.empty()) { - // TakePhotoCb(2, photoInfo, path, takingTime); + XYLOG(XYLOG_SEVERITY_ERROR, "Empty Mat object CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + pThis->TakePhotoCb(0, photoInfo, path, takingTime); + } + else + { + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb, takingTime); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } } + }); th.detach(); +#endif return true; } +std::string CPhoneDevice::BuildCaptureResultInfo(ACameraMetadata* result, uint32_t ldr, uint32_t duration, bool burst) +{ + NdkCamera::CAPTURE_RESULT captureResult = { 0 }; + NdkCamera::EnumCameraResult(result, captureResult); + + char extimeunit[4] = { 0 }; + unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((captureResult.exposureTime >= 1000) ? ((unsigned int)(captureResult.exposureTime / 1000)) : captureResult.exposureTime); + strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : ((captureResult.exposureTime > 1000) ? "μs" : "ns")); + char str[128] = { 0 }; + snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld %s", + captureResult.autoExposure, captureResult.autoFocus, + extime, extimeunit, captureResult.compensation, captureResult.sensitivity, + // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, + (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, + captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, + duration, captureResult.frameDuration, burst ? "BURST" : ""); + return std::string(str); +} + bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, uint32_t duration, std::vector >& frames) @@ -2667,12 +3087,16 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi time_t takingTime = time(NULL); if (mPhotoInfo.remedy != 0) { - if ((takingTime - mPhotoInfo.scheduleTime) > 30) + time_t scheduleTime = mPhotoInfo.scheduleTime; + if (scheduleTime == 0) + { + scheduleTime = mPhotoInfo.requestTime; + } + if ((takingTime - scheduleTime) > 30) { - takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + takingTime = scheduleTime + mPhotoInfo.channel * 2; } } - mPhotoInfo.photoTime = takingTime; vector osds; osds.swap(mOsds); @@ -2844,25 +3268,11 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi { if (!results.empty()) { - NdkCamera::CAPTURE_RESULT captureResult = { 0 }; - NdkCamera::EnumCameraResult(results[0].get(), captureResult); - - char extimeunit[4] = { 0 }; - unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); - strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs"); - char str[128] = { 0 }; - snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld", - captureResult.autoExposure, captureResult.autoFocus, - extime, extimeunit, captureResult.compensation, captureResult.sensitivity, - // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, - (unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState, - captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio, - duration, captureResult.frameDuration); - cameraInfo = str; + cameraInfo = BuildCaptureResultInfo(results[0].get(), ldr, duration, false); } } -#ifdef OUTPUT_CAMERA_DBG_INFO +#ifdef OUTPUT_DBG_INFO #if 0 bool shouldRetry = false; if (ldr != ~0) @@ -2899,7 +3309,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi } } #endif // 0 -#endif // OUTPUT_CAMERA_DBG_INFO +#endif // OUTPUT_DBG_INFO // Notify to take next photo pThis->TakePhotoCb(1, photoInfo, "", takingTime); @@ -2957,11 +3367,20 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr characteristi cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR); } - bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb); - if (res) + if (rgb.empty()) { - // TakePhotoCb(2, photoInfo, path, takingTime); + XYLOG(XYLOG_SEVERITY_ERROR, "Empty Mat object CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId); + pThis->TakePhotoCb(0, photoInfo, path, takingTime); + } + else + { + bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb, takingTime); + if (res) + { + // TakePhotoCb(2, photoInfo, path, takingTime); + } } + }); th.detach(); @@ -3001,17 +3420,22 @@ int CPhoneDevice::CallExecv(int rotation, int frontCamera, const std::string& ou return exitCode; } -bool CPhoneDevice::OnImageReady(cv::Mat& mat) +bool CPhoneDevice::OnImageReady(cv::Mat mat) { time_t takingTime = time(NULL); if (mPhotoInfo.remedy != 0) { - if ((takingTime - mPhotoInfo.scheduleTime) > 30) + time_t scheduleTime = mPhotoInfo.scheduleTime; + if (scheduleTime == 0) + { + scheduleTime = mPhotoInfo.requestTime; + } + if ((takingTime - scheduleTime) > 30) { - takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2; + takingTime = scheduleTime + mPhotoInfo.channel * 2; } } - mPhotoInfo.photoTime = takingTime; + // mPhotoInfo.photoTime = takingTime; int baseline = 0; cv::Size textSize; double height = mat.size().height; @@ -3135,7 +3559,7 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) pt.x = it->x + it->w - textSize.width; } -#ifdef OUTPUT_CAMERA_DBG_INFO +#ifdef OUTPUT_DBG_INFO char buf[128]; snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)", it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height); @@ -3152,7 +3576,7 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled"); } -// #ifdef OUTPUT_CAMERA_DBG_INFO +// #ifdef OUTPUT_DBG_INFO if (mCamera != NULL) { @@ -3188,7 +3612,7 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) // DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1); } } -// #endif // OUTPUT_CAMERA_DBG_INFO +// #endif // OUTPUT_DBG_INFO for (vector::const_iterator it = mOsds.cbegin(); it != mOsds.cend(); ++it) { @@ -3205,7 +3629,7 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) #endif textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline); - XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline); + // XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline); if (it->alignment == OSD_ALIGNMENT_TOP_LEFT) { @@ -3238,9 +3662,9 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) params.push_back((int)((uint32_t)mPhotoInfo.quality)); bool res = false; - std::string fullPath = endsWith(mPath, ".jpg") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo)); + std::string fullPath = endsWith(mPath, ".jpg") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo, takingTime)); -#ifdef OUTPUT_CAMERA_DBG_INFO +#ifdef OUTPUT_DBG_INFO bool shouldRetry = false; #if 0 @@ -3280,9 +3704,18 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) } #endif -#endif // OUTPUT_CAMERA_DBG_INFO +#endif // OUTPUT_DBG_INFO + bool imgExisted = std::filesystem::exists(std::filesystem::path(fullPath)); + if (imgExisted) + { + size_t imgFileSize = getFileSize(fullPath); + if (imgFileSize == 0 || imgFileSize == (size_t)-1) + { + imgExisted = false; + } + } - if (!std::filesystem::exists(std::filesystem::path(fullPath))) + if (!imgExisted) { bool res = cv::imwrite(fullPath.c_str(), mat, params); if (!res) @@ -3293,7 +3726,7 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) { XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to write photo: %s", fullPath.c_str()); } -#ifdef OUTPUT_CAMERA_DBG_INFO +#ifdef OUTPUT_DBG_INFO if (shouldRetry) { TakePhotoCb(0, mPhotoInfo, fullPath, takingTime, objs); @@ -3314,7 +3747,7 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat) return res; } -bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat) +bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat, time_t takingTime) { int baseline = 0; cv::Size textSize; @@ -3425,7 +3858,7 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vectorx + it->w - textSize.width; } -#ifdef OUTPUT_CAMERA_DBG_INFO +#ifdef OUTPUT_DBG_INFO char buf[128]; snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)", it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height); @@ -3442,7 +3875,7 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector (int)width - 1) { rb.x = (int)width - 1; @@ -3479,7 +3911,7 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector::const_iterator it = osds.cbegin(); it != osds.cend(); ++it) { @@ -3496,7 +3928,7 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vectorgetTextSize(it->text, fontSize, thickness, &baseline); - XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline); + // XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline); if (it->alignment == OSD_ALIGNMENT_TOP_LEFT) { @@ -3539,24 +3971,91 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector imgContents; + size_t imgFileSize = 0; + bool res = cv::imencode(".jpg", mat, imgContents, params); + if (res) + { + int errcode = 0; + res = writeFile(tmpPath.c_str(), &imgContents[0], imgContents.size(), errcode); + if (res) + { + if (existsFile(tmpPath)) + { + imgFileSize = getFileSize(tmpPath); + if (imgFileSize == 0 || imgFileSize == -1) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Empty File Written: %s errno=%d", tmpPath.c_str() + m_appPath.size(), errcode); + remove(tmpPath.c_str()); + res = false; + } + } + } + else + { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s errno=%d", tmpPath.c_str() + m_appPath.size(), errcode); + if (existsFile(tmpPath)) + { + remove(tmpPath.c_str()); + } + } + } + else + { + int errcode = errno; + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Encode Image CH=%u PR=%u IMGID=%u, errno=%d", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.preset, (uint32_t)photoInfo.photoId, errcode); + } + // bool res = cv::imwrite(tmpPath.c_str(), mat, params); + if (res/* && imgFileSize > 0*/) + { + res = (rename(tmpPath.c_str(), fullPath.c_str()) == 0); + if (res) + { + imgFileSize = getFileSize(fullPath); + if (imgFileSize == 0 || imgFileSize == -1) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Empty File after rename %s", fullPath.c_str() + m_appPath.size()); + res = false; + } + } + + } +#else bool res = cv::imwrite(fullPath.c_str(), mat, params); + size_t imgFileSize = getFileSize(fullPath); +#endif if (!res) { XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s", fullPath.c_str() + m_appPath.size()); } else { - XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Write File: %s", fullPath.c_str() + m_appPath.size()); + XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Write File: %s, FileSize=%u", fullPath.c_str() + m_appPath.size(), (uint32_t)imgFileSize); } - TakePhotoCb(res ? 2 : 0, photoInfo, fullPath, photoInfo.photoTime, objs); + std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + TakePhotoCb(res ? 2 : 0, photoInfo, fullPath, takingTime, objs); } else { @@ -3566,7 +4065,7 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector objs; - std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo); + std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo, photoTime); if (result) { std::rename(path, fullPath.c_str()); } - TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs); + TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, photoTime, objs); bool turnOffOtg = (mPhotoInfo.usbCamera != 0); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, mPhotoInfo.cameraType); @@ -3617,16 +4116,16 @@ bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path } else { - mPhotoInfo.photoTime = time(NULL); + time_t photoTime = time(NULL); CPhoneCamera* pCamera = NULL; std::vector objs; - std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo); + std::string fullPath = mPath + CTerminal::BuildPhotoFileName(mPhotoInfo, photoTime); if (result) { std::rename(path, fullPath.c_str()); } - TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs); + TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, photoTime, objs); bool turnOffOtg = (mPhotoInfo.usbCamera != 0); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, mPhotoInfo.cameraType); @@ -3708,6 +4207,7 @@ void CPhoneDevice::UpdateSignalLevel(int signalLevel) { m_signalLevel = signalLevel; m_signalLevelUpdateTime = time(NULL); + XYLOG(XYLOG_SEVERITY_DEBUG, "Signal Level Updated: %d, SS=%d", signalLevel & 0xFF, (signalLevel >> 8)); } void CPhoneDevice::UpdateSimcard(const std::string& simcard) @@ -3715,27 +4215,56 @@ void CPhoneDevice::UpdateSimcard(const std::string& simcard) m_simcard = simcard; } -void CPhoneDevice::UpdateEthernet(net_handle_t nethandle, bool available) +void CPhoneDevice::UpdateNetwork(net_handle_t nethandle, bool available, bool defaultOrEthernet, bool& changed) { - m_devLocker.lock(); - m_netHandle = available ? nethandle : NETWORK_UNSPECIFIED; - m_devLocker.unlock(); - - XYLOG(XYLOG_SEVERITY_WARNING, "NET Handle: %lld", available ? (uint64_t)nethandle : 0); + if (defaultOrEthernet) + { + net_handle_t oldHandle = NETWORK_UNSPECIFIED; + m_devLocker.lock(); + oldHandle = m_defNetHandle; + m_defNetHandle = available ? nethandle : NETWORK_UNSPECIFIED; + m_devLocker.unlock(); + changed = (oldHandle != nethandle); + XYLOG(XYLOG_SEVERITY_WARNING, "Active Network Handle: %lld", available ? (uint64_t)nethandle : 0); + } + else + { + net_handle_t oldHandle = NETWORK_UNSPECIFIED; + m_devLocker.lock(); + oldHandle = m_ethnetHandle; + m_ethnetHandle = available ? nethandle : NETWORK_UNSPECIFIED; + m_devLocker.unlock(); + changed = (oldHandle != nethandle); + XYLOG(XYLOG_SEVERITY_WARNING, "Ethernet Handle: %lld", available ? (uint64_t)nethandle : 0); + } } -net_handle_t CPhoneDevice::GetNetHandle() const +net_handle_t CPhoneDevice::GetEthnetHandle() const { net_handle_t nethandle = NETWORK_UNSPECIFIED; m_devLocker.lock(); - nethandle = m_netHandle; + nethandle = m_ethnetHandle; m_devLocker.unlock(); return nethandle; - } void CPhoneDevice::SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway) { +#if 0 + std::string argv = " root ifconfig "; + argv += iface; + argv += " down"; + int resCode = execl("/system/xbin/su", "root", "ifconfig", iface.c_str(), "down", NULL); + + argv = " root ifconfig "; + argv += iface; + argv += " " + ip; + argv += " netmask " + netmask; + argv += " up"; + resCode = execl("/system/xbin/su", argv.c_str(), NULL); + + // execv("/system/xbin/su", " root ifconfig ", "start", "-a", "android.intent.action.VIEW", "-d", "http://qq.com", "-n", "com.android.browser/.BrowserActivity", NULL); +#else JNIEnv* env = NULL; jboolean ret = JNI_FALSE; bool didAttachThread = false; @@ -3751,18 +4280,62 @@ void CPhoneDevice::SetStaticIp(const std::string& iface, const std::string& ip, #else jstring jip = env->NewStringUTF(ip.c_str()); #endif - jstring jnetmask = env->NewStringUTF(netmask.c_str()); + std::string ipPrefix = "192.168.68.0"; + jstring jipPrefix = env->NewStringUTF(ipPrefix.c_str()); jstring jgw = env->NewStringUTF(gateway.c_str()); - env->CallVoidMethod(m_javaService, mSetStaticIpMid, jiface, jip, jnetmask, jgw); - // env->DeleteLocalRef(jgw); - // env->DeleteLocalRef(jnetmask); - // env->DeleteLocalRef(jip); - // env->DeleteLocalRef(jiface); + env->CallVoidMethod(m_javaService, mSetStaticIpMid, jiface, jip, jgw, jipPrefix, 24); + env->DeleteLocalRef(jgw); + env->DeleteLocalRef(jipPrefix); + env->DeleteLocalRef(jip); + env->DeleteLocalRef(jiface); + + if (didAttachThread) + { + m_vm->DetachCurrentThread(); + } +#endif +} + +int CPhoneDevice::ExecuteCommand(const std::string& cmd) +{ + JNIEnv* env = NULL; + jboolean ret = JNI_FALSE; + bool didAttachThread = false; + bool res = GetJniEnv(m_vm, &env, didAttachThread); + if (!res) + { + ALOGE("Failed to get JNI Env"); + } + + jstring jcmd = env->NewStringUTF(cmd.c_str()); + jint resCode = env->CallIntMethod(m_javaService, mExecuteCmdMid, jcmd); + env->DeleteLocalRef(jcmd); if (didAttachThread) { m_vm->DetachCurrentThread(); } + return resCode; +} + +void CPhoneDevice::ShutdownEthernet() +{ + XYLOG(XYLOG_SEVERITY_DEBUG, "Make Ethernet Down"); + std::string cmd = "/system/xbin/su root /system/bin/ip link set "; + cmd += m_network->iface; + cmd += " down"; + int resCode = ExecuteCommand(cmd); + XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet is Down %d", resCode); + + // const char* args[] = {"root", "ifconfig", m_network->iface.c_str(), "down", NULL}; + +#if 0 + int resCode = execv("/system/xbin/su", args); + if (resCode == 0) + { + int aa = 0; + } +#endif } void CPhoneDevice::ConvertDngToPng(const std::string& dngPath, const std::string& pngPath) @@ -3779,6 +4352,8 @@ void CPhoneDevice::ConvertDngToPng(const std::string& dngPath, const std::string jstring jdngPath = env->NewStringUTF(dngPath.c_str()); jstring jpngPath = env->NewStringUTF(pngPath.c_str()); env->CallVoidMethod(m_javaService, mConvertDngToPngMid, jdngPath, jpngPath); + env->DeleteLocalRef(jdngPath); + env->DeleteLocalRef(jpngPath); if (didAttachThread) { @@ -3788,11 +4363,14 @@ void CPhoneDevice::ConvertDngToPng(const std::string& dngPath, const std::string void CPhoneDevice::CameraCtrl(unsigned short waitTime, unsigned short delayTime, unsigned char channel, int cmdidx, unsigned char preset, const char *serfile, unsigned int baud, int addr) { - if(!(m_sensorsStatus && !m_cameraStatus) && GpioControl::GetCamerastatus()) { + + m_ptzController->AddCommand(channel, cmdidx, 0, preset, serfile, baud, addr); +#if 0 + if(GpioControl::GetSelftestStatus(waitTime) && GpioControl::GetCamerastatus()) { CPhoneDevice *pThis = this; string serfileStr(serfile); std::thread ctrlThread([pThis, waitTime, delayTime, cmdidx, channel, preset, serfileStr, baud, - addr]() mutable { + addr]() { uint64_t wid = pThis->RequestWakelock(0); XYLOG(XYLOG_SEVERITY_INFO,"CameraCtrl Command= %d, preset = %u", cmdidx, preset); pThis->OpenPTZSensors(waitTime); @@ -3804,6 +4382,7 @@ void CPhoneDevice::CameraCtrl(unsigned short waitTime, unsigned short delayTime, }); ctrlThread.detach(); } +#endif } int CPhoneDevice::GetSerialPhoto(int devno, D_IMAGE_DEF *photo) @@ -3811,6 +4390,7 @@ int CPhoneDevice::GetSerialPhoto(int devno, D_IMAGE_DEF *photo) return GetImage(devno, (IMAGE_DEF*)photo); } + void CPhoneDevice::InitSerialComm(D_SENSOR_PARAM *sensorParam, char *filedir,const char *logpath) { Gm_InitSerialComm((SENSOR_PARAM *)sensorParam, filedir, logpath); @@ -4348,7 +4928,7 @@ bool CPhoneDevice::OpenSensors(int sensortype) GpioControl::TurnOn(CMD_SET_SPI_POWER); // GpioControl::TurnOn(CMD_SET_485_EN_STATE); // 打开RS485电源 #ifndef USING_N938 -#ifndef USING_PLZ +#ifndef USING_PTZ GpioControl::TurnOn(CMD_SET_485_EN_STATE); #else @@ -4362,8 +4942,9 @@ bool CPhoneDevice::OpenSensors(int sensortype) if(sensortype == CAMERA_SENSOR_OPEN) { #ifndef USING_N938 -#ifndef USING_PLZ +#ifndef USING_PTZ #else + // GpioControl::TurnOn(CMD_SET_5V_PWR_ENABLE); GpioControl::TurnOn(CMD_SET_PTZ_PWR_ENABLE); #endif #else @@ -4412,7 +4993,7 @@ bool CPhoneDevice::CloseSensors(int sensortype, uint32_t delayedCloseTime) GpioControl::TurnOff(CMD_SET_SPI_POWER, delayedCloseTime); // GpioControl::TurnOff(CMD_SET_485_EN_STATE); #ifndef USING_N938 -#ifndef USING_PLZ +#ifndef USING_PTZ GpioControl::TurnOff(CMD_SET_485_EN_STATE, delayedCloseTime); #else GpioControl::TurnOff(CMD_SET_485_ENABLE, delayedCloseTime); @@ -4434,8 +5015,9 @@ bool CPhoneDevice::CloseSensors(int sensortype, uint32_t delayedCloseTime) #ifndef USING_N938 // GpioControl::TurnOff(CMD_SET_3V3_PWR_ENABLE); -#ifndef USING_PLZ +#ifndef USING_PTZ #else + // GpioControl::TurnOffImmediately(CMD_SET_5V_PWR_ENABLE); GpioControl::TurnOffImmediately(CMD_SET_PTZ_PWR_ENABLE); #endif #endif @@ -4449,8 +5031,9 @@ bool CPhoneDevice::CloseSensors(int sensortype, uint32_t delayedCloseTime) #ifndef USING_N938 // GpioControl::TurnOff(CMD_SET_3V3_PWR_ENABLE); -#ifndef USING_PLZ +#ifndef USING_PTZ #else + // GpioControl::TurnOff(CMD_SET_5V_PWR_ENABLE, delayedCloseTime); GpioControl::TurnOff(CMD_SET_PTZ_PWR_ENABLE, delayedCloseTime); #endif #endif @@ -4524,6 +5107,7 @@ void CPhoneDevice::SetStaticIp() unsigned int netMask = 0; unsigned int gateway = 0; std::string ipStr = m_network->ip; +#if 0 if (GetNetInfo("eth0", ip, netMask, gateway)) { // const @@ -4535,16 +5119,40 @@ void CPhoneDevice::SetStaticIp() ipStr = "0.0.0.0"; } } - +#endif SetStaticIp(m_network->iface, ipStr, m_network->netmask, m_network->gateway); - XYLOG(XYLOG_SEVERITY_INFO, "Set Static IP on %s: %s", m_network->iface.c_str(), - m_network->ip.c_str()); + XYLOG(XYLOG_SEVERITY_INFO, "Set Static IP on %s: %s(%s)", m_network->iface.c_str(), + m_network->ip.c_str(), ipStr.c_str()); } else { -#ifdef USING_N938 - SetStaticIp("eth0", "0.0.0.0", "255.255.255.0", "192.168.1.1"); -#endif - XYLOG(XYLOG_SEVERITY_WARNING, "No Static IP Confg"); + SetStaticIp("eth0", "192.168.68.91", "255.255.255.0", "192.168.68.91"); } -} \ No newline at end of file +} + +VendorCtrl* CPhoneDevice::MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle, bool syncTime) +{ + VendorCtrl* vendorCtrl = NULL; + switch (vendor) + { + case 1: + // Hai Kang + vendorCtrl = new HikonCtrl(ip, userName, password, channel, netHandle, syncTime); + break; + case 2: + break; + case 3: + // Yu Shi + vendorCtrl = new YuShiCtrl(ip, userName, password, channel, netHandle, syncTime); + break; + case 5: + // Hang Yu - New + vendorCtrl = new HangYuCtrl(ip, userName, password, channel, netHandle, syncTime); + } + if (syncTime && (vendorCtrl != NULL)) + { + time_t ts = time(NULL); + vendorCtrl->UpdateTime(ts); + } + return vendorCtrl; +} diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h index 077cda68..263aca66 100644 --- a/app/src/main/cpp/PhoneDevice.h +++ b/app/src/main/cpp/PhoneDevice.h @@ -31,6 +31,8 @@ #include #include "SensorsProtocol.h" +#include "PtzController.h" + #define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "error", __VA_ARGS__)) #define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, "debug", __VA_ARGS__)) @@ -154,11 +156,22 @@ void MatToBitmap(JNIEnv *env, cv::Mat& mat, jobject& bitmap) { #endif class PowerControl; +class VendorCtrl; +class Streaming; + +struct STREAMING_CONTEXT +{ + std::shared_ptr stream; + std::shared_ptr powerCtrl; + std::shared_ptr ethernetPowerCtrl; +}; class CPhoneDevice : public IDevice { public: + friend PtzController; + struct NETWORK { std::string iface; @@ -172,7 +185,7 @@ public: public: CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params); virtual ~CPhoneCamera(); - virtual bool on_image(cv::Mat& rgb); + virtual bool on_image(cv::Mat rgb); virtual void on_error(const std::string& msg); virtual void onDisconnected(ACameraDevice* device); virtual bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, uint32_t duration, std::vector >& frames); @@ -208,7 +221,7 @@ public: uint64_t uid; }; - CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode, const std::string& nativeLibDir); + CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, uint64_t activeNetHandle, unsigned int versionCode, const std::string& nativeLibDir); virtual ~CPhoneDevice(); virtual void SetListener(IListener* listener); @@ -219,25 +232,28 @@ public: virtual bool UpdateSchedules(); virtual bool QuerySystemProperties(map& properties); virtual bool InstallAPP(const std::string& path, unsigned int delayedTime); - virtual bool Reboot(int resetType, bool manually, const std::string& reason); + virtual bool Reboot(int resetType, bool manually, const std::string& reason, uint32_t timeout = 1000); virtual bool EnableGPS(bool enabled); - virtual float QueryBattaryVoltage(int timesForAvg, bool* isCharging); + virtual int QueryBattaryVoltage(int timesForAvg, int* isCharging); + virtual uint32_t QueryLdr(); virtual bool RequestPosition(); virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout, time_t tsForNextPhoto); virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector& osds, const std::string& path); virtual bool CloseCamera(); - virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times = 0); + virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times = 1); virtual bool UnregisterTimer(timer_uid_t uid); virtual uint64_t RequestWakelock(uint64_t timeout); virtual bool ReleaseWakelock(uint64_t wakelock); + virtual std::string GetVersion() const; + virtual int GetWData(WEATHER_INFO *weatherInfo, D_SENSOR_PARAM *sensorParam); virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, D_SENSOR_PARAM *sensorParam); virtual bool OpenSensors(int sensortype); virtual bool CloseSensors(int sensortype, uint32_t delayedCloseTime); virtual bool OpenPTZSensors(uint32_t sec); virtual bool ClosePTZSensors(uint32_t delayedCloseTime); - virtual bool GetPTZSensorsStatus(); + virtual bool GetPTZSensorsStatus(time_t waittime); virtual bool GetCameraStatus(); virtual void CameraCtrl(unsigned short waitTime, unsigned short delayTime, unsigned char channel, int cmdidx, unsigned char presetno, const char *serfile, unsigned int baud, int addr); virtual int GetSerialPhoto(int devno, D_IMAGE_DEF *photo); @@ -248,7 +264,7 @@ public: void UpdatePosition(double lon, double lat, double radius, time_t ts); bool OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId); - bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, unsigned int photoId); + bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat mat, unsigned int photoId); void UpdateSignalLevel(int signalLevel); void UpdateTfCardPath(const std::string& tfCardPath) @@ -260,20 +276,23 @@ public: mBuildTime = buildTime; } void UpdateSimcard(const std::string& simcard); - void UpdateEthernet(net_handle_t nethandle, bool available); + void UpdateNetwork(net_handle_t nethandle, bool available, bool defaultOrEthernet, bool& changed); - net_handle_t GetNetHandle() const; + net_handle_t GetEthnetHandle() const; + + VendorCtrl* MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle, bool syncTime); protected: std::string GetFileName() const; - std::string GetVersion() const; bool SendBroadcastMessage(std::string action, int value); // bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_, - bool TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector& osds, std::shared_ptr powerCtrlPtr); - bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat); + bool TakePhotoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector& osds, std::shared_ptr powerCtrlPtr); + bool TakeVideoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector& osds, std::shared_ptr powerCtrlPtr); + bool StartPushStreaming(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, const std::vector& osds, std::shared_ptr powerCtrlPtr); + bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat, time_t takingTime); inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector& objects) const { if (m_listener != NULL) @@ -315,10 +334,10 @@ protected: return false; } - void QueryPowerInfo(std::map& powerInfo); + void QueryFlowInfo(std::map& powerInfo); std::string QueryCpuTemperature(); - bool OnImageReady(cv::Mat& mat); + bool OnImageReady(cv::Mat mat); bool onOneCapture(std::shared_ptr characteristics, std::shared_ptr results, uint32_t ldr, uint32_t duration, cv::Mat rgb); bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, uint32_t duration, std::vector >& frames); bool onBurstCapture(std::shared_ptr characteristics, std::vector >& results, uint32_t ldr, uint32_t duration, std::vector >& frames); @@ -342,6 +361,10 @@ protected: void SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway); void ConvertDngToPng(const std::string& dngPath, const std::string& pngPath); void SetStaticIp(); + void ShutdownEthernet(); + int ExecuteCommand(const std::string& cmd); + + static std::string BuildCaptureResultInfo(ACameraMetadata* result, uint32_t ldr, uint32_t duration, bool burst); protected: @@ -354,7 +377,8 @@ protected: std::string m_nativeLibraryDir; NETWORK* m_network; - net_handle_t m_netHandle; + net_handle_t m_defNetHandle; + net_handle_t m_ethnetHandle; jmethodID mRegisterHeartbeatMid; jmethodID mUpdateCaptureScheduleMid; @@ -364,7 +388,7 @@ protected: jmethodID mRequestWakelockMid; jmethodID mReleaseWakelockMid; - jmethodID mGetSystemInfoMid; + jmethodID mGetFlowInfoMid; jmethodID mRebootMid; jmethodID mInstallAppMid; @@ -372,6 +396,7 @@ protected: jmethodID mRequestPositionMid; jmethodID mExecHdrplusMid; jmethodID mSetStaticIpMid; + jmethodID mExecuteCmdMid; jmethodID mConvertDngToPngMid; @@ -383,7 +408,6 @@ protected: IListener* m_listener; const CFG_RECOGNIZATION* m_pRecognizationCfg; bool mAIInitialized; - unsigned int mNetId; unsigned int mVersionCode; time_t mBuildTime; @@ -400,16 +424,17 @@ protected: std::thread m_threadClose; std::shared_ptr m_powerCtrlPtr; + uint32_t m_ethernetFailures; + int m_signalLevel; time_t m_signalLevelUpdateTime; std::string m_simcard; mutable std::mutex m_cameraLocker; - bool m_cameraStatus; - bool m_sensorsStatus; time_t m_lastTime; std::atomic m_shouldStopWaiting; + std::atomic m_isSelfTesting{false}; IDevice::ICE_TAIL m_tempData; mutable std::mutex m_dataLocker; @@ -419,6 +444,10 @@ protected: std::atomic m_collecting; unsigned long long localDelayTime; + std::map m_streamings; + + PtzController* m_ptzController; + }; diff --git a/app/src/main/cpp/PtzController.cpp b/app/src/main/cpp/PtzController.cpp new file mode 100644 index 00000000..c59c53a6 --- /dev/null +++ b/app/src/main/cpp/PtzController.cpp @@ -0,0 +1,462 @@ +// +// Created by Matthew on 2025/3/5. +// + +#include "PtzController.h" +#include "SensorsProtocol.h" +#include "GPIOControl.h" +#include "PhoneDevice.h" +#include "time.h" +#include + +PtzController::PtzController(CPhoneDevice* pPhoneDevice) : m_pPhoneDevice(pPhoneDevice) +{ + m_exit = false; +} + +void PtzController::Startup() +{ + m_thread = std::thread(PtzThreadProc, this); +} + +void PtzController::PtzThreadProc(PtzController* pThis) +{ + pThis->PtzProc(); +} + +void PtzController::AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr) +{ + SERIAL_CMD cmd = { 0 }; + cmd.channel = channel; + cmd.preset = preset; + cmd.cmdidx = cmdidx; + cmd.bImageSize = bImageSize; + strcpy(cmd.serfile, serfile); + cmd.baud = baud; + cmd.addr = addr; + cmd.ts = time(NULL); + + m_locker.lock(); + m_cmds.push_back(cmd); + m_locker.unlock(); + + m_sem.release(); +} + +void PtzController::AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector& osds) +{ + IDevice::SerialsPhotoParam param = { "", 0, 0 }; + m_pPhoneDevice->GetPhotoSerialsParamCb(param); + + SERIAL_CMD cmdPreset = { 0 }; + time_t ts = time(NULL); + +#if 1 + // if (photoInfo.preset != 0 && photoInfo.preset != 0xFF) + { + cmdPreset.ts = photoInfo.selfTestingTime; + cmdPreset.delayTime = photoInfo.closeDelayTime; + cmdPreset.channel = photoInfo.channel; + cmdPreset.channel = photoInfo.preset; + cmdPreset.cmdidx = PHOTO_OPEN_POWER; + strcpy(cmdPreset.serfile, param.serfile); + cmdPreset.baud = param.baud; + cmdPreset.addr = param.addr; + } +#endif + SERIAL_CMD cmd = { 0 }; + cmd.ts = ts; + cmd.delayTime = photoInfo.closeDelayTime; + cmd.channel = photoInfo.channel; + cmd.preset = photoInfo.preset; + cmd.cmdidx = Take_Photo; + cmd.bImageSize = photoInfo.resolution; + + strcpy(cmd.serfile, param.serfile); + cmd.baud = param.baud; + cmd.addr = param.addr; + + PtzPhotoParams* ppp = new PtzPhotoParams(photoInfo, path, osds); + cmd.photoParams.reset(ppp); + // cmd.delayTime; + // uint8_t bImageSize; + // char serfile[128]; + // uint32_t baud; + // int addr; + m_locker.lock(); +#if 1 + if (cmdPreset.cmdidx != 0) + { + m_cmds.push_back(cmdPreset); + } +#endif + m_cmds.push_back(cmd); + m_locker.unlock(); + + m_sem.release(); + m_sem.release(); +} + +void PtzController::ExitAndWait() +{ + m_exit = true; + m_sem.release(); + + if (m_thread.joinable()) + { + m_thread.join(); + } +} + +void PtzController::PtzProc() +{ + PROC_PTZ_STATE state = PTZS_POWER_OFF; + SERIAL_CMD cmd; + PTZ_STATE ptz_state; + bool hasCmd = false; + int i=0; + int closecmd=0; + + std::shared_ptr powerCtrl; + time_t selfTestingStartTime = 0; + time_t selfTestingWaitTime = 0; + time_t PTZ_preset_start_time = 0; + time_t PTZ_preset_wait_time = 0; + time_t close_delay_time = CAMERA_CLOSE_DELAYTIME; + time_t start_delay_time = 0; + time_t auto_delay_time = 0; + time_t auto_wait_time = WAIT_TIME_AUTO_CLOSE; + time_t photo_move_preset_time = 0; + int iwaitime = 0; + + while(true) + { + m_sem.acquire(); + + if (m_exit) + { + break; + } + + hasCmd = false; + + m_locker.lock(); + for (auto it = m_cmds.begin(); it != m_cmds.end(); ++it) + { + if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state)) + { + // find first non-taking-photo cmd + if (it->cmdidx != Take_Photo) + { + cmd = *it; + m_cmds.erase(it); + hasCmd = true; + break; + } + } + else + { + cmd = *it; + m_cmds.erase(it); + hasCmd = true; + break; + } + } + m_locker.unlock(); + + if (!hasCmd) + { + if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state)) + { + time_t timeout = time(NULL) - selfTestingStartTime; + if(timeout < 0) + selfTestingStartTime = time(NULL); + if (timeout >= selfTestingWaitTime) + { + XYLOG(XYLOG_SEVERITY_INFO, "超时(%u秒)未收到云台自检结束应答,状态改为空闲!", (uint32_t)timeout); + state = PTZS_IDLE; + m_sem.release(); + continue; + } + else + { + //if(timeout >= CAMERA_SELF_TEST_TIME) + { +#ifndef NDEBUG + if (timeout == 1 || ((timeout % 10) == 0)) +#endif + { + XYLOG(XYLOG_SEVERITY_INFO, "开始查询云台自检状态!timeout=%u秒", (uint32_t)timeout); + } + if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr)) + { + if(0 == ptz_state.ptz_status) + { + XYLOG(XYLOG_SEVERITY_INFO, "收到云台自检结束应答,状态改为空闲!timeout=%u秒", (uint32_t)timeout); + state = PTZS_IDLE; + m_sem.release(); + continue; + } + } + } + } + std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + m_sem.release(); + continue; + } + if(0 == start_delay_time) + { + if(0 == iwaitime) + { + auto_delay_time = time(NULL); + iwaitime += 1; + m_sem.release(); + continue; + } + else + { + if(time(NULL) - auto_delay_time < 0) + { + auto_delay_time = time(NULL); + } + if(time(NULL) - auto_delay_time >= auto_wait_time) + { + iwaitime = 0; + XYLOG(XYLOG_SEVERITY_INFO, "摄像机自动上电延时时间超过%u秒!准备关闭摄像机!", (uint32_t)auto_wait_time); + } + else + { + m_sem.release(); + continue; + } + } + } + else + { + if(time(NULL) - start_delay_time < 0) + {/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间不会关摄像机电源*/ + start_delay_time = time(NULL); + } + if(time(NULL) - start_delay_time >= close_delay_time) + { + XYLOG(XYLOG_SEVERITY_INFO, "摄像机空闲时间超过%u秒!准备关闭摄像机!", (uint32_t)close_delay_time); + } + else + { + m_sem.release(); + continue; + } + } + if (state == PTZS_POWER_OFF) + { + closecmd = 0; + XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发,摄像机本来就处于关机状态!"); + // Do Nothing + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发,通知云台准备关机!state=%d", state); + for(i=0; i<3; i++) + { + if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr)) + break; + } + powerCtrl.reset(); + closecmd = 0; + state = PTZS_POWER_OFF; + XYLOG(XYLOG_SEVERITY_INFO, "自动触发关闭云台电源!state=%d", state); + } + start_delay_time = 0; + continue; + } + + switch (cmd.cmdidx) + { + case Take_Photo: + { + if (state == PTZS_POWER_OFF) + { + if (!powerCtrl) + { + //powerCtrl = std::make_shared(cmd.photoParams->mPhotoInfo.closeDelayTime); + powerCtrl = std::make_shared(0); + selfTestingStartTime = time(NULL); + selfTestingWaitTime = cmd.photoParams->mPhotoInfo.selfTestingTime; + state = PTZS_PHOTO_SELF_TESTING; + XYLOG(XYLOG_SEVERITY_INFO, "1、收到拍照指令,摄像机从关机状态改为自检状态!"); + + m_locker.lock(); + m_cmds.insert(m_cmds.begin(), cmd); + m_locker.unlock(); + m_sem.release(); + continue; + } + } + if(cmd.photoParams->mPhotoInfo.scheduleTime == 0) + { + if(1 == closecmd) + { + XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令,但同时后续收到关机指令,等待拍完照片再关机。state=%d!", state); + } + else + { + start_delay_time = time(NULL); + XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令,state=%d!", state); + } + } + else + XYLOG(XYLOG_SEVERITY_INFO, "2、收到自动拍照指令,state=%d!", state); + + state = PTZS_TAKING_PHOTO; + if (cmd.preset != 0 && cmd.preset != 0xFF) + { + CameraPhotoCmd(0, cmd.channel, MOVE_PRESETNO, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr); +#if 0 + if(START_ONCE_SELF == cmd.preset) + { + selfTestingStartTime = time(NULL); + selfTestingWaitTime = CAMERA_SELF_TEST_TIME; + state = PTZS_SELF_TESTING; + m_sem.release(); + XYLOG(XYLOG_SEVERITY_INFO, "拍照调用200号预置点指令,摄像机启动一次性自检从拍照状态改为自检状态!取消拍照动作!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime); + break; + } +#endif + PTZ_preset_start_time = time(NULL); + if(START_ONCE_SELF == cmd.preset) + PTZ_preset_wait_time = CAMERA_SELF_TEST_TIME; + else + PTZ_preset_wait_time = MOVE_PRESET_WAIT_TIME; + XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前开始调用预置点%u!state=%d", (uint32_t)cmd.preset, state); + for(;;) + { + if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr)) + { + if(0 == ptz_state.ptz_status) + { + XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u,收到移动结束应答!移动时长=%d秒 state=%d", (uint32_t)cmd.preset, (uint32_t)(time(NULL)-PTZ_preset_start_time), state); + break; + } + } + if(time(NULL) - PTZ_preset_start_time < 0) + {/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间等待摄像机到达预置点*/ + PTZ_preset_start_time = time(NULL); + } + if(time(NULL) - PTZ_preset_start_time >= PTZ_preset_wait_time) + { + XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u,摄像机在%u秒内未收到调用预置点结束应答!state=%d", (uint32_t)cmd.preset, (uint32_t)PTZ_preset_wait_time, state); + break; + } + std::this_thread::sleep_for(std::chrono::milliseconds(10)); + photo_move_preset_time = time(NULL); + } + } + if(cmd.photoParams->mPhotoInfo.mediaType == 1) + m_pPhoneDevice->TakeVideoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl); + else if ((cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM || cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF)) + { + m_pPhoneDevice->StartPushStreaming(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl); + } + else + m_pPhoneDevice->TakePhotoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl); + state = PTZS_IDLE; + } + + break; + case PHOTO_OPEN_POWER: + if (state == PTZS_POWER_OFF) + { + if (!powerCtrl) + { + powerCtrl = std::make_shared(0); + selfTestingStartTime = time(NULL); + selfTestingWaitTime = CAMERA_SELF_TEST_TIME; + state = PTZS_PHOTO_SELF_TESTING; + m_sem.release(); + XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime); + } + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机,摄像机处于state=%d!", state); + } + break; + case OPEN_TOTAL: + if (state == PTZS_POWER_OFF) + { + if (!powerCtrl) + { + powerCtrl = std::make_shared(0); + selfTestingStartTime = time(NULL); + selfTestingWaitTime = CAMERA_SELF_TEST_TIME; + state = PTZS_SELF_TESTING; + m_sem.release(); + XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime); + } + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令,摄像机处于state=%d!", state); + } + closecmd = 0; + start_delay_time = time(NULL); + XYLOG(XYLOG_SEVERITY_INFO, "收到手动打开摄像机指令,刷新关机计时初始值,state=%d", state); + break; + case CLOSE_TOTAL: + if (state == PTZS_POWER_OFF) + { + closecmd = 0; + XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令,摄像机本来就处于关机状态!"); + // Do Nothing + } + else if(PTZS_PHOTO_SELF_TESTING == state) + { + closecmd = 1; + XYLOG(XYLOG_SEVERITY_INFO, "在拍照自检过程中收到关机指令,取消延时关机,转到自动关机处理!state=%d", state); + } + else + { + XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令,通知云台准备关机!state=%d", state); + for(i=0; i<3; i++) + { + if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr)) + break; + } + closecmd = 0; + powerCtrl.reset(); + state = PTZS_POWER_OFF; + XYLOG(XYLOG_SEVERITY_INFO, "关闭云台电源!state=%d", state); + } + start_delay_time = 0; + break; + default: + { + if (state == PTZS_POWER_OFF) + { + XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令,摄像机处于关机状态,无法执行!"); + CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr); + break; + } + start_delay_time = time(NULL); + XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令,刷新关机计时初始值,state=%d", state); + if(cmd.ts <= photo_move_preset_time) + { + XYLOG(XYLOG_SEVERITY_INFO, "丢弃拍照调预置点期间收到的控制云台指令,指令时间" FMT_TIME_T ",拍照时间" FMT_TIME_T "!state=%d", cmd.ts, photo_move_preset_time, state); + } + else + { + if((MOVE_PRESETNO == cmd.cmdidx) && (START_ONCE_SELF == cmd.preset)) + { + selfTestingStartTime = time(NULL); + selfTestingWaitTime = CAMERA_SELF_TEST_TIME; + state = PTZS_SELF_TESTING; + m_sem.release(); + XYLOG(XYLOG_SEVERITY_INFO, "收到调用200号预置点指令,摄像机启动一次性自检从当前状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime); + } + CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr); + } + } + break; + } + } +} \ No newline at end of file diff --git a/app/src/main/cpp/PtzController.h b/app/src/main/cpp/PtzController.h new file mode 100644 index 00000000..6a221c90 --- /dev/null +++ b/app/src/main/cpp/PtzController.h @@ -0,0 +1,100 @@ +// +// Created by Matthew on 2025/3/5. +// + +#ifndef MICROPHOTO_PTZCONTROLLER_H +#define MICROPHOTO_PTZCONTROLLER_H + +#include +#include +#include +#include +#include +#include +#include +#include + +enum PROC_PTZ_STATE +{ + PTZS_POWER_OFF = 0, + PTZS_IDLE = 1, + PTZS_SELF_TESTING = 2, + PTZS_MOVING = 3, + PTZS_TAKING_PHOTO = 4, + PTZS_PHOTO_SELF_TESTING = 5, +}; + +#define CAMERA_SELF_TEST_TIME 150 /* Camera self-test time (excluding PTZ self-test)*/ +#define MOVE_PRESET_WAIT_TIME 20 /* Waiting for the maximum time for the PTZ to move to the preset position*/ +#define CAMERA_CLOSE_DELAYTIME 360 /* Auto Power-Off Timer Setting After Manual Power-On (for Camera)*/ +#define PHOTO_OPEN_POWER 16000 +#define WAIT_TIME_AUTO_CLOSE 2 /* In order to automatically capture multiple preset point images at the same time and prevent the camera from self checking every time it takes a picture.*/ + +class PtzPhotoParams +{ +public: + PtzPhotoParams(const IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector& osds) : + mPhotoInfo(photoInfo), mPath(path), mOsds(osds) + { + } + + ~PtzPhotoParams() + { + } + + IDevice::PHOTO_INFO mPhotoInfo; + std::string mPath; + std::vector mOsds; +}; + +struct SERIAL_CMD +{ + uint8_t channel; + uint8_t preset; + time_t ts; + int cmdidx; + uint32_t delayTime; + uint8_t bImageSize; + char serfile[128]; + uint32_t baud; + int addr; + std::shared_ptr photoParams; +}; + + +class CPhoneDevice; +class PtzController +{ +public: + PtzController(CPhoneDevice* pPhoneDevice); + + void Startup(); + // (); + void AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr); + void AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector& osds); + + void ExitAndWait(); + +protected: + static void PtzThreadProc(PtzController* pThis); + + void PtzProc(); + + +protected: + + +protected: + std::mutex m_locker; + std::vector m_cmds; + + CSemaphore m_sem; + bool m_exit; + + std::thread m_thread; + + CPhoneDevice* m_pPhoneDevice; +}; + + +#endif //MICROPHOTO_PTZCONTROLLER_H diff --git a/app/src/main/cpp/SensorsProtocol.cpp b/app/src/main/cpp/SensorsProtocol.cpp index 662df4ee..eb8820de 100644 --- a/app/src/main/cpp/SensorsProtocol.cpp +++ b/app/src/main/cpp/SensorsProtocol.cpp @@ -1,7 +1,6 @@ #include #include -#include #include #include #include @@ -9,7 +8,6 @@ #include #include #include -//#include #include #include #include @@ -24,13 +22,13 @@ #include #include -#include +#include "AndroidHelper.h" #include "SensorsProtocol.h" -//#include "Eint.h" #include pthread_mutex_t serial_mutex = PTHREAD_MUTEX_INITIALIZER; // 定义一个互斥锁 pthread_mutex_t camera_mutex = PTHREAD_MUTEX_INITIALIZER; // 定义一个互斥锁 +pthread_mutex_t bd_mutex = PTHREAD_MUTEX_INITIALIZER; // 定义一个互斥锁 SIO_PARAM_SERIAL_DEF serialport[MAX_SERIAL_PORT_NUM]; @@ -41,411 +39,6 @@ AI_DEF rallypntmsg[6][RALLY_DATA_NUM]; AI_DEF slantpntmsg[6][SLANTANGLE_DATA_NUM]; char logPath[512]; -#if 0 -/********************************************************************************* -* 气象数据处理 * -**********************************************************************************/ -static void PortDataProcess(void) -{ - float fvalue, fcorvalue, *fvalua, frnb/*, fwind*/; - //uint16_t uDevAddr; - unsigned char cmdidx; - int i, j, aipnt, datanum; - SIO_PARAM_SERIAL_DEF *pPortParam; - char szbuf[64]; - - pPortParam = &serialport; - //取出装置地址,开始处理地址+++ - if (0x02 == pPortParam->m_au8RecvBuf[5]) - { - //pPortParam->devaddr = pPortParam->m_au8RecvBuf[4]; - return; - } - cmdidx = pPortParam->m_au8RecvBuf[5]; -#if 0 - aipnt = pPortParam->SameTypeDevIdx; - uDevAddr = serialport->m_au8RecvBuf[4]; - if (0 == srdt.IsReadWireTem) - { - if (uDevAddr != pPortParam->devaddr) - return; - } -#endif - fvalua = &fvalue; - datanum = pPortParam->m_au8RecvBuf[6]; - if ((0x08 != cmdidx) && (0x09 != cmdidx)) - return; - - for (i = 0, j = 7; (i < datanum) && (j < 6 + pPortParam->m_au8RecvBuf[1]); i++, j += 5) - { - if (0x08 == cmdidx) - fvalue = (pPortParam->m_au8RecvBuf[j + 1] << 24) + (pPortParam->m_au8RecvBuf[j + 2] << 16) - + (pPortParam->m_au8RecvBuf[j + 3] << 8) + pPortParam->m_au8RecvBuf[j + 4]; - else - { - *(u_char *)fvalua = pPortParam->m_au8RecvBuf[j + 4]; - *((u_char *)fvalua + 1) = pPortParam->m_au8RecvBuf[j + 3]; - *((u_char *)fvalua + 2) = pPortParam->m_au8RecvBuf[j + 2]; - *((u_char *)fvalua + 3) = pPortParam->m_au8RecvBuf[j + 1]; - } - switch (pPortParam->m_au8RecvBuf[j]) - { - case 1: /*温度*/ - weatherpntmsg[0] = fvalue; - LOGE("温度:%0.3f ", fvalue); - break; - case 2: /*气压*/ - weatherpntmsg[5] = fvalue; - LOGE("气压:%0.3f ", fvalue); - break; - case 3: /*湿度*/ - weatherpntmsg[1] = fvalue; - LOGE("湿度:%0.3f ", fvalue); - break; - case 4: /*雨量*/ - break; - case 5: /*日照*/ - break; - case 6: /*风速*/ - weatherpntmsg[2] = fvalue; - LOGE("风速:%0.3f ", fvalue); - break; - case 7: /*风向*/ - weatherpntmsg[3] = fvalue; - LOGE("风向:%0.3f ", fvalue); - break; - case 8: /*拉力*/ - case 9: /*倾角传感器X轴倾角*/ - case 10: /*倾角传感器Y轴倾角*/ - case 11: /*测温球导线温度*/ - case 12: /*测温球内部温度*/ - break; - case 13: /*测温球导线X轴倾角*/ - break; - case 14: /*测温球导线Y轴倾角*/ - break; - case 15: /*测温球导线电流*/ - break; - case 16: /*测温球电池电压*/ - break; - case 17: /*A相泄漏电流平均值;*/ - break; - case 18: /*A相泄漏电流最大值;*/ - break; - case 19: /*A相超过3mA的脉冲频次*/ - break; - case 20: /*A相超过10mA的脉冲频次*/ - break; - case 21: /*B相泄漏电流平均值;*/ - break; - case 22: /*B相泄漏电流最大值;*/ - break; - case 23: /*B相超过3mA的脉冲频次*/ - break; - case 24: /*B相超过10mA的脉冲频次*/ - case 25: /*C相泄漏电流平均值;*/ - case 26: /*C相泄漏电流最大值;*/ - case 27: /*C相超过3mA的脉冲频次*/ - case 28: /*C相超过10mA的脉冲频次*/ - break; - } - } -} - -/*************************************************************** -* 按照协议格式化接收数据 * -***************************************************************/ -static void RecvData(u_char *buf, int len)// 规约读数据处理 -{ - int i, ictime; - //uint16_t crc, check; - SIO_PARAM_SERIAL_DEF *pPortParam; - - pPortParam = &serialport; - ictime = (int)time(NULL); - - if (pPortParam->m_iRecvLen == 0) - { - pPortParam->iRecvTime = ictime; - } - else - { - if ((ictime - pPortParam->iRecvTime > 6) || (ictime - pPortParam->iRecvTime < 0)) - pPortParam->iRecvTime = ictime; - else if (ictime - pPortParam->iRecvTime > 2) - { - pPortParam->m_iRecvLen = 0; - pPortParam->m_iRevStatus = 0; - } - } - - for (i = 0; i < len; i++) - { - switch (pPortParam->m_iRevStatus) - { - case 0: // 0x68 - pPortParam->m_iRecvLen = 0; - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if (0x68 == buf[i]) - pPortParam->m_iRevStatus++; - else - pPortParam->m_iRevStatus = 18; - break; - case 1: // len1 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus++; - break; - case 2: // len2 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if (buf[i] == pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen - 2]) - { - pPortParam->m_iRevStatus++; - pPortParam->m_iNeedRevLength = buf[i] + 5; - } - else - pPortParam->m_iRevStatus = 18; - break; - case 3: // 0x68 - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iNeedRevLength--; - if (0x68 == buf[i]) - pPortParam->m_iRevStatus++; - else - pPortParam->m_iRevStatus = 18; - break; - case 4: // 正确接收数据 - pPortParam->m_iNeedRevLength--; - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if (pPortParam->m_iNeedRevLength > 0) - break; - if (buf[i] != 0x16) - { - pPortParam->m_iRevStatus = 18; - break; - } - - //if(CheckLpcError(serialport->m_au8RecvBuf, pPortParam->m_iRecvLen) == TRUE) - { - PortDataProcess(); - pPortParam->m_iRevStatus = 0; - pPortParam->RevCmdFlag = 1; - } - pPortParam->m_iRecvLen = 0; - break; - case 255:// 错误接收数据 - default: - if (buf[i] == 0x68) - { - pPortParam->m_iRevStatus = 1; - pPortParam->m_iRecvLen = 1; - pPortParam->m_au8RecvBuf[0] = buf[i]; - } - else if (buf[i] == 0x16) - { - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - pPortParam->m_iRevStatus = 0; - pPortParam->m_iRecvLen = 0; - } - else - { - pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; - if (pPortParam->m_iRecvLen > 200) - { - pPortParam->m_iRecvLen = 0; - } - } - break; - } - } -} - -static int64_t get_msec(void) -{ - struct timeval tv; - - gettimeofday(&tv, NULL); - int64_t time_in_msec = tv.tv_sec * 1000 + tv.tv_usec / 1000; - - return time_in_msec; -} -//int inum =0; -//int itimecnt=0; -static int weather_comm(SERIAL_PARAM weatherport) -{ - int fd = -1; - int len, i, ret, icnt = 0; - int64_t ictime, iruntime, isendtime, irecvtime; - unsigned char sendbuf[] = { 0x68,0x00,0x00,0x68,0x01,0x09,0x0a,0x16 }; - char recvbuf[256], szbuf[512]; - //char serial_description[] = "/dev/ttyS0"; - -#if 0 - DIR *dir = opendir("/dev"); - if (dir == NULL) { - LOGE("_test_ opendir"); - return -1; - } - - // 读取目录项 - struct dirent *entry; - while ((entry = readdir(dir)) != NULL) { - // 过滤出串口设备,通常以"ttyS"或"ttyUSB"开头 - if ((strncmp(entry->d_name, "ttyS2", 5) == 0) || - (strncmp(entry->d_name, "ttyS0", 5) == 0)) { - LOGE("_test_ Found serial port: %s\n", entry->d_name); - } - } - - // 关闭目录 - closedir(dir); -#endif - serialport.RevCmdFlag = 1; - serialport.m_iRecvLen = 0; - serialport.m_iRevStatus = 0; - - set12VEnable(true); - setCam3V3Enable(true); - setRS485Enable(true); - sleep(2); - //ictime = (int)time(NULL); - ictime = get_msec(); - for (;;) - { - if (fd < 0) - { - fd = open(weatherport.pathname, O_RDWR | O_NDELAY); - //fd = open(weatherport.pathname, O_RDWR | O_NOCTTY); - if (fd < 0) - { - LOGE("_test_ open serial error \n"); - perror(weatherport.pathname); - return -1; - } - - ret = set_port_attr(fd, weatherport.baudrate, weatherport.databit, weatherport.stopbit, weatherport.parity, 0, 0);/*9600 8n1 */ - if (ret < 0) - { - LOGE("_test_ set uart arrt faile \n"); - return -1; - } - } - - usleep(10000); - //iruntime = (int)time(NULL); - iruntime = get_msec(); - if ((iruntime - ictime > 120000) || (iruntime - ictime < 0)) - ictime = iruntime; - if (iruntime - ictime > 20000) - { - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "气象采样时间=%0.3f秒,停止采样!", (iruntime - ictime) / 1000.0); - LOGE("%s", szbuf); - break; - } - - if (1 == serialport.RevCmdFlag) - { - set485WriteMode(); - - len = write(fd, sendbuf, sizeof(sendbuf));/* 向串囗发送字符串 */ - serialport.RevCmdFlag = 0; - LOGE("发送命令时间差%ld毫秒", get_msec() - isendtime); - //isendtime = time(NULL); - isendtime = get_msec(); - if (len < 0) { - LOGE("write data error \n"); - return -1; - } - else { - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "Send:"); - for (i = 0; i < len; i++) { - sprintf(szbuf, "%s %02X", szbuf, sendbuf[i]); - } - LOGE("%s", szbuf); - //icnt = 0; - //inum++; - } - tcdrain(fd); - //usleep(50000); - } - else - { - //irecvtime = time(NULL); - irecvtime = get_msec(); - if ((irecvtime - isendtime > 6000) || (irecvtime - isendtime < 0)) - isendtime = irecvtime; - if (irecvtime - isendtime > 300) - { - LOGE("传感器超过%ld毫秒未应答", irecvtime - isendtime); - serialport.RevCmdFlag = 1; - serialport.m_iRecvLen = 0; - serialport.m_iRevStatus = 0; - close(fd); - fd = -1; - continue; - } - } - set485ReadMode(); - memset(recvbuf, 0, sizeof(recvbuf)); - len = read(fd, recvbuf, sizeof(recvbuf));/* 在串口读取字符串 */ - if (len < 0) { - LOGE("serial read error \n"); - continue; - } - if (0 == len) - { - //icnt++; - continue; - } - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "Recv:"); - for (i = 0; i < len; i++) { - sprintf(szbuf, "%s %02X", szbuf, recvbuf[i]); - } - __android_log_print(ANDROID_LOG_INFO, "serial", "%s", szbuf); - RecvData((u_char*)recvbuf, len); - //LOGE("一周期空循环次数%d, 读取次数%d, 时间:%d %d", icnt, inum, (int)time(NULL), itimecnt); - icnt = 0; - //serialport.RevCmdFlag =1; - } - - close(fd); - set12VEnable(false); - setCam3V3Enable(false); - setRS485Enable(false); - - //exit(-1); - return(0); -} - -int serial_port_comm() -{ - SERIAL_PARAM portparm; - - //struct timeval tv; - - //gettimeofday(&tv, NULL); - //int64_t time_in_microseconds = tv.tv_sec * 1000000 + tv.tv_usec; - - //LOGE("Current time in microseconds: %ld\n", time_in_microseconds); - -#if 1 - memset(portparm.pathname, 0, sizeof(portparm.pathname)); - sprintf(portparm.pathname, "/dev/ttyS0"); - portparm.parity = 'N'; - portparm.databit = 8; - portparm.baudrate = B9600; - memset(portparm.stopbit, 0, sizeof(portparm.stopbit)); - sprintf(portparm.stopbit, "1"); -#endif - //itimecnt = (int)time(NULL); - - //for(;;) - weather_comm(portparm); - return 0; -} -#endif extern int Gm_SetSerialPortParam(int commid); static speed_t getBaudrate(unsigned int baudrate) { @@ -498,6 +91,162 @@ static int64_t get_msec() return time_in_msec; } + +void DebugLog(int commid, char flag, const char* format, ...) +{ +#ifndef NDEBUG + // ALOGW("FMT: %s", format); + // std::this_thread::sleep_for(std::chrono::milliseconds(100)); +#endif + va_list argptr; + va_start(argptr, format); + + auto len = vsnprintf(NULL, 0, format, argptr); + if (len < 0) + { + va_end(argptr); + return; + } + + char* szbuf = new char[len + 1]; + szbuf[len] = 0; + len = std::vsnprintf(szbuf, len + 1, format, argptr); + va_end(argptr); + if (len < 0) + { + delete[] szbuf; + return; + } + + SaveLogTofile(commid, szbuf); +#ifndef NDEBUG + switch (flag) + { + case 'E': + ALOGE("%s", szbuf); + break; + case 'I': + ALOGI("%s", szbuf); + break; + case 'D': + ALOGD("%s", szbuf); + break; + case 'V': + ALOGI("%s", szbuf); + break; + case 'W': + ALOGW("%s", szbuf); + break; + default: + ALOGI("%s", szbuf); + break; + } +#endif + + delete[] szbuf; + +#ifndef NDEBUG + // ALOGW("FMT: %s end", format); +#endif +} + +int SaveLogTofile(int commid, const char *szbuf) +{ + int status; + time_t now; + char filename[512], filedir[512], buf[128]; + FILE *fp = NULL; + struct tm t0; + struct timeval tv; + + if (NULL == szbuf) + return -1; + + now = time(NULL); + localtime_r(&now, &t0); + gettimeofday(&tv, NULL); + + memset(filedir, 0, sizeof(filedir)); + if(logPath != NULL) + strcpy(filedir, logPath); + + if (access(filedir, 0) == 0) + ;//LOGI("文件路径已经存在!"); + else + { + status = mkdir(filedir, S_IRWXU | S_IRWXG | S_IRWXO); + if (status < 0) + return -1; + } + // 写入文件到sdcard + memset(filename, 0, sizeof(filename)); + sprintf(filename, "%sCOM%dlog-%04d-%02d-%02d.txt", filedir, commid + 1, t0.tm_year + 1900, t0.tm_mon + 1, t0.tm_mday); + fp = fopen(filename, "a+"); + if (NULL == fp) + return -1; + memset(buf, 0, sizeof(buf)); + sprintf(buf, "%d-%02d-%02d %02d:%02d:%02d-%03d ", t0.tm_year + 1900, t0.tm_mon + 1, t0.tm_mday, t0.tm_hour, t0.tm_min, t0.tm_sec, (int)(tv.tv_usec / 1000)); + fwrite(buf, 1, strlen(buf), fp); + fwrite(szbuf, 1, strlen(szbuf), fp); + memset(buf, 0, sizeof(buf)); + strcpy(buf, "\n"); + fwrite(buf, 1, strlen(buf), fp); + + fclose(fp); + return 1; +} + +int SaveImageDataTofile(SIO_PARAM_SERIAL_DEF *curserial) +{ + u_char *image = NULL, *tempphoto = NULL; + int i, status; + size_t len; + // char filename[512]; + FILE *fp = NULL; + + image = (u_char*)malloc(curserial->image.imagelen); + if (NULL == image) + return -1; + tempphoto = image; + for (i = 0; i < curserial->image.imagenum; ++i) { + memmove(tempphoto, &curserial->image.buf[i], (size_t)curserial->image.ilen[i]); + tempphoto += (size_t)curserial->image.ilen[i]; + } + + // memset(szbuf, 0, sizeof(szbuf)); + //memset(filedir, 0, sizeof(filedir)); + //sprintf(filedir, "/sdcard/photo/"); + + if (access(srdt.filedir, 0) == 0) + { + // DebugLog(0, 'I', "文件路径%s已经存在!", srdt.filedir); + DebugLog(0, 'I', "文件路径%s已经存在!", srdt.filedir); + } + else + { + status = mkdir(srdt.filedir, S_IRWXU | S_IRWXG | S_IRWXO); + if (status < 0) + return -1; + } + // 写入文件到sdcard + memset(curserial->image.photoname, 0, sizeof(curserial->image.photoname)); + sprintf(curserial->image.photoname, "%s1-%d-%d.jpg", srdt.filedir, curserial->image.presetno, curserial->image.phototime); + fp = fopen(curserial->image.photoname, "wb+"); + if (NULL == fp) + return -1; + len = fwrite(image, 1, curserial->image.imagelen, fp); + fclose(fp); + free(image); + image = NULL; + if (len < curserial->image.imagelen) + return -1; + else + { + DebugLog(0, 'I', "写入图片文件%s成功!", curserial->image.photoname); + return 1; + } +} + /* 打开串口电源 */ @@ -523,111 +272,12 @@ void Gm_CloseSensorsPower() /* 关闭电源*/ //switch(port) /* 根据硬件具体布置最后调整,目前是微拍板子的来控制*/ -/* set12VEnable(false); - setCam3V3Enable(false); - setRS485Enable(false); -#if 0 - setInt(CMD_SET_WTH_POWER, 0); - setInt(CMD_SET_PULL_POWER, 0); - setInt(CMD_SET_ANGLE_POWER, 0); - setInt(CMD_SET_OTHER_POWER, 0); - setInt(CMD_SET_PIC1_POWER, 0); - - sleep(3); - igpio = getInt(CMD_SET_WTH_POWER); - igpio = getInt(CMD_SET_PULL_POWER); - igpio = getInt(CMD_SET_ANGLE_POWER); - igpio = getInt(CMD_SET_OTHER_POWER); - igpio = getInt(CMD_SET_PIC1_POWER); -#endif -#if 1 - setInt(CMD_SET_SPI_POWER, 1); - setInt(CMD_SET_485_EN0, 1); - setInt(CMD_SET_485_EN1, 1); - setInt(CMD_SET_485_EN2, 1); - setInt(CMD_SET_485_EN3, 1); - setInt(CMD_SET_485_EN4, 1); -#else - setInt(CMD_SET_SPI_POWER, 0); - setInt(CMD_SET_485_EN0, 0); - setInt(CMD_SET_485_EN1, 0); - setInt(CMD_SET_485_EN2, 0); - setInt(CMD_SET_485_EN3, 0); - setInt(CMD_SET_485_EN4, 0); - sleep(3); - igpio = getInt(CMD_SET_SPI_POWER); - igpio = getInt(CMD_SET_485_EN0); - igpio = getInt(CMD_SET_485_EN1); - igpio = getInt(CMD_SET_485_EN2); - igpio = getInt(CMD_SET_485_EN3); - igpio = getInt(CMD_SET_485_EN4); -#endif -*/ } // 打开传感器电源 void Gm_OpenSensorsPower() { - //char iIoNo; -/* int igpio; - char szbuf[128]; - - //if(0 == port) - // return; - //sprintf(szbuf, "Open Sensors port %d Power!", port); - - //set12VEnable(true); - setCam3V3Enable(true); - setRS485Enable(true); - -#if 0 - setInt(CMD_SET_WTH_POWER, 0); - setInt(CMD_SET_PULL_POWER, 0); - setInt(CMD_SET_ANGLE_POWER, 0); - setInt(CMD_SET_OTHER_POWER, 0); - setInt(CMD_SET_PIC1_POWER, 0); -#else - setInt(CMD_SET_WTH_POWER, 1); - setInt(CMD_SET_PULL_POWER, 1); - setInt(CMD_SET_ANGLE_POWER, 1); - setInt(CMD_SET_OTHER_POWER, 1); - setInt(CMD_SET_PIC1_POWER, 1); - //sleep(3); - igpio = getInt(CMD_SET_WTH_POWER); - igpio = getInt(CMD_SET_PULL_POWER); - igpio = getInt(CMD_SET_ANGLE_POWER); - igpio = getInt(CMD_SET_OTHER_POWER); - igpio = getInt(CMD_SET_PIC1_POWER); - -#endif -#if 1 - setInt(CMD_SET_SPI_POWER, 1); - setInt(CMD_SET_485_EN0, 1); - setInt(CMD_SET_485_EN1, 1); - setInt(CMD_SET_485_EN2, 1); - setInt(CMD_SET_485_EN3, 1); - setInt(CMD_SET_485_EN4, 1); - - //sleep(3); - igpio = getInt(CMD_SET_SPI_POWER); - igpio = getInt(CMD_SET_485_EN0); - igpio = getInt(CMD_SET_485_EN1); - igpio = getInt(CMD_SET_485_EN2); - igpio = getInt(CMD_SET_485_EN3); - igpio = getInt(CMD_SET_485_EN4); - -#else - setInt(CMD_SET_485_EN0, 0); - setInt(CMD_SET_485_EN1, 0); - setInt(CMD_SET_485_EN2, 0); - setInt(CMD_SET_485_EN3, 0); - setInt(CMD_SET_485_EN4, 0); -#endif - - // 打开电源 - //switch(port) -*/ } // 查询传感器电源状态 @@ -683,28 +333,23 @@ void BytestreamLOG(int commid, char* describe, u_char* buf, int len, char flag) void Gm_OpenSerialPort(int devidx) { int fd = -1; - char szbuf[512]; if ((devidx < 0) || (devidx >= MAX_SERIAL_DEV_NUM)) return; - memset(szbuf, 0, sizeof(szbuf)); if (serialport[devparam[devidx].commid].fd <= 0) { fd = ::open(devparam[devidx].pathname, O_RDWR | O_NDELAY); if (fd < 0) { - sprintf(szbuf, "装置%d 打开串口%d %s失败!fd=%d", devidx+1, devparam[devidx].commid+1, devparam[devidx].pathname, fd); - DebugLog(devparam[devidx].commid, szbuf, 'E'); + DebugLog(devparam[devidx].commid, 'E', "装置%d 打开串口%d %s失败!fd=%d", devidx+1, devparam[devidx].commid+1, devparam[devidx].pathname, fd); return; } - sprintf(szbuf, "装置%d 打开串口%d %s成功!fd=%d", devidx + 1, devparam[devidx].commid + 1, devparam[devidx].pathname, fd); - DebugLog(devparam[devidx].commid, szbuf, 'I'); + DebugLog(devparam[devidx].commid, 'I', "装置%d 打开串口%d %s成功!fd=%d", devidx + 1, devparam[devidx].commid + 1, devparam[devidx].pathname, fd); serialport[devparam[devidx].commid].fd = fd; Gm_SetSerialPortParam(devparam[devidx].commid); return; } - sprintf(szbuf, "装置%d 串口%d %s已经打开!fd=%d", devidx + 1, devparam[devidx].commid + 1, devparam[devidx].pathname, serialport[devparam[devidx].commid].fd); - DebugLog(devparam[devidx].commid, szbuf, 'I'); + DebugLog(devparam[devidx].commid, 'I', "装置%d 串口%d %s已经打开!fd=%d", devidx + 1, devparam[devidx].commid + 1, devparam[devidx].pathname, serialport[devparam[devidx].commid].fd); } // 关闭串口通讯 @@ -743,8 +388,7 @@ int GM_SerialComSend(unsigned char * cSendBuf, size_t nSendLen, int commid) //isendtime = get_msec(); if (len < 0) { - sprintf(szbuf, "write data error "); - DebugLog(commid, szbuf, 'E'); + DebugLog(commid, 'E', "write data error "); return -1; } else if (len > 0) @@ -765,8 +409,7 @@ int Gm_SetSerialPortParam(int commid) if (ret < 0) { memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "串口%d 波特率等参数设置错误!", commid + 1); - DebugLog(commid, szbuf, 'E'); + DebugLog(commid, 'E', "串口%d 波特率等参数设置错误!", commid + 1); return -1; } return ret; @@ -950,7 +593,7 @@ uint8_t getdevtype(int devno) return devparam[devno].ProtocolIdx; } // 初始化所有串口及所接传感器的配置 -void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, char *filedir,const char *log) +void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, const char *filedir,const char *log) { int i; char szbuf[128]; @@ -1037,8 +680,7 @@ void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, char *filedir,const char *log) sprintf(szbuf, "%s", "没有启用!;"); else { - sprintf(szbuf, "%s 已启用!;", szbuf); - DebugLog(8, szbuf, 'I'); + DebugLog(8, 'I', "%s 已启用!;", szbuf); } } if (NULL == filedir) @@ -1094,7 +736,7 @@ void testComm() CameraPhotoCmd(time(NULL), 1, 0, 6, 1, "/dev/ttyS0",38400, 1); sleep(5); - CameraPhotoCmd(time(NULL), 1, 10017, 0, 2, "/dev/ttyS1",38400, 1); + CameraPhotoCmd(time(NULL), 1, MOVE_PRESETNO, 0, 2, "/dev/ttyS1",38400, 1); sleep(5); CameraPhotoCmd(0, 1, MOVE_LEFT, 0, 0, "/dev/ttyS1",38400, 1); @@ -1105,7 +747,7 @@ void testComm() sleep(5); CameraPhotoCmd(0, 1, MOVE_UP, 0, 0, "/dev/ttyS1",38400, 1); sleep(5); - CameraPhotoCmd(0, 1, 10017, 0, 1, "/dev/ttyS1",38400, 1); + CameraPhotoCmd(0, 1, MOVE_PRESETNO, 0, 1, "/dev/ttyS1",38400, 1); sleep(5); sleep(5); //CameraPhotoCmd(0, 1, ZOOM_WIDE, 0, 0); @@ -1290,8 +932,7 @@ void GM_StartSerialComm() { srdt.ms_dev[i].aiValue[j].AiState = SER_STARTSAMPLE; weatherpntmsg[j].AiState = SER_STARTSAMPLE; - sprintf(logbuf, "init weather_state%d=%d", j, weatherpntmsg[j].AiState); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "init weather_state%d=%d", j, weatherpntmsg[j].AiState); } break; case RALLY_PROTOCOL: /* 拉力*/ @@ -1312,8 +953,7 @@ void GM_StartSerialComm() { srdt.ms_dev[i].aiValue[j].AiState = SER_STARTSAMPLE; weatherpntmsg[j].AiState = SER_STARTSAMPLE; - sprintf(logbuf, "init weather_state%d=%d", j, weatherpntmsg[j].AiState); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "init weather_state%d=%d", j, weatherpntmsg[j].AiState); } break; case SLANT_PROTOCOL: /* 倾角*/ @@ -1336,40 +976,20 @@ void GM_StartSerialComm() break; } // 测试查询传感器电源状态 -#if 0 - LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); - LOGE("3.3V state= %d", getInt(CMD_SET_CAM_3V3_EN_STATE)); - LOGE("485 state=%d", getInt(CMD_SET_485_EN_STATE)); - - set12VEnable(true); - setCam3V3Enable(true); - setRS485Enable(true); - sleep(1); - LOGV("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); - LOGV("3.3V state= %d", getInt(CMD_SET_CAM_3V3_EN_STATE)); - LOGV("485 state=%d", getInt(CMD_SET_485_EN_STATE)); - set12VEnable(false); - setCam3V3Enable(false); - setRS485Enable(false); - sleep(1); - LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); - LOGE("3.3V state= %d", getInt(CMD_SET_CAM_3V3_EN_STATE)); - LOGE("485 state=%d", getInt(CMD_SET_485_EN_STATE)); -#endif + // 打开传感器电源 // 打开对应的485电源 // 打开串口通讯 memset(logbuf, 0, sizeof(logbuf)); if (1 == srdt.ms_dev[i].IsNeedSerial) { - sprintf(logbuf, "装置%d, IsNoInsta=%d, 类型:%s", i + 1, devparam[i].IsNoInsta, szbuf); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "装置%d, IsNoInsta=%d, 类型:%s", i + 1, devparam[i].IsNoInsta, szbuf); Gm_OpenSensorsPower(); Gm_OpenSerialPort(i); } } - DebugLog(8, "启动数据采样!", 'I'); + DebugLog(8, 'I', "启动数据采样!"); /* 直接使用循环进行采样处理*/ //polltime = get_msec(); for (;;) @@ -1379,8 +999,7 @@ void GM_StartSerialComm() //polltime = get_msec(); if (GM_SerialTimer() < 0) { - //LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); - DebugLog(8, "退出采样流程!", 'V'); + DebugLog(8, 'V', "退出采样流程!"); sleep(5); //GM_StartSerialComm(); break; @@ -1430,10 +1049,6 @@ void Gm_FindAllSensorsCommand() Gm_OpenSerialPort(curidx); if (serialport[devparam[curidx].commid].cmdlen > 0) break; - //LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); - //LOGE("3.3V state= %d", getInt(CMD_SET_CAM_3V3_EN_STATE)); - //LOGE("485 state=%d", getInt(CMD_SET_485_EN_STATE)); - flag = -1; switch (devparam[curidx].ProtocolIdx) @@ -1453,6 +1068,7 @@ void Gm_FindAllSensorsCommand() case PELCO_P_PROTOCOL: /* 摄像机协议*/ case SERIALCAMERA_PROTOCOL: /* 串口摄像机协议*/ break; + default: break; } if (flag == -1) @@ -1500,8 +1116,7 @@ void GM_IsCloseSensors() { srdt.ms_dev[i].IsNeedSerial = 0; // 关闭传感器电源 - sprintf(buf, "读取装置%d数据%0.3f秒,关闭装置%d电源!", i + 1, (get_msec() - srdt.ms_dev[i].FirstCmdTimeCnt) / 1000.0, i + 1); - DebugLog(devparam[i].commid, buf, 'I'); + DebugLog(devparam[i].commid, 'I', "读取装置%d数据%0.3f秒,关闭装置%d电源!", i + 1, (get_msec() - srdt.ms_dev[i].FirstCmdTimeCnt) / 1000.0, i + 1); for (j = 0; j < MAX_DEV_VALUE_NUM; j++) { if (SER_STARTSAMPLE == srdt.ms_dev[i].aiValue[j].AiState) @@ -1517,8 +1132,7 @@ void GM_IsCloseSensors() weatherpntmsg[j].AiState = SER_SAMPLEFAIL; else if (SER_SAMPLE == weatherpntmsg[j].AiState) weatherpntmsg[j].AiState = SAMPLINGSUCCESS; - sprintf(buf, "over weather_state%d=%d", j, weatherpntmsg[j].AiState); - DebugLog(8, buf, 'I'); + DebugLog(8, 'I', "over weather_state%d=%d", j, weatherpntmsg[j].AiState); } } } @@ -1533,8 +1147,7 @@ void GM_IsCloseSensors() else if (PHOTO_SAVE_SUCC == srdt.ms_dev[i].image.state) srdt.ms_dev[i].image.state = SAMPLINGSUCCESS; srdt.ms_dev[i].IsNeedSerial = 0; - sprintf(buf, "通道%d摄像机使用完毕!可以关闭摄像机电源!", devparam[i].CameraChannel); - DebugLog(devparam[i].commid, buf, 'I'); + DebugLog(devparam[i].commid, 'I', "通道%d摄像机使用完毕!可以关闭摄像机电源!", devparam[i].CameraChannel); } break; } @@ -1602,8 +1215,7 @@ int GM_CloseTimer() //Gm_CloseSensorsPower(); for (j = 1; j < MAX_SERIAL_PORT_NUM; j++) ClearCmdFormPollCmdBuf(j); - sprintf(buf, "%s", "关闭串口定时器!"); - DebugLog(8, buf, 'I'); + DebugLog(8, 'I', "%s", "关闭串口定时器!"); return -1; } } @@ -1612,7 +1224,6 @@ void SerialDataProcess(int devidx, u_char *buf, int len) { switch (devparam[devidx].ProtocolIdx) { - case WEATHER_PROTOCOL: /* 气象*/ case RALLY_PROTOCOL: /* 拉力*/ case WIND_PROTOCOL: /* 风速风向*/ @@ -1629,133 +1240,6 @@ void SerialDataProcess(int devidx, u_char *buf, int len) } } -void DebugLog(int commid, char *szbuf, char flag) -{ - if (NULL == szbuf) - return; - SaveLogTofile(commid, szbuf); - switch (flag) - { - case 'E': - ALOGE("%s", szbuf); - break; - case 'I': - ALOGI("%s", szbuf); - break; - case 'D': - ALOGD("%s", szbuf); - break; - case 'V': - ALOGI("%s", szbuf); - break; - case 'W': - ALOGW("%s", szbuf); - break; - default: - ALOGI("%s", szbuf); - break; - } -} - -int SaveLogTofile(int commid, char *szbuf) -{ - int status; - time_t now; - char filename[512], filedir[512], buf[128]; - FILE *fp = NULL; - struct tm t0; - struct timeval tv; - - if (NULL == szbuf) - return -1; - - now = time(NULL); - localtime_r(&now, &t0); - gettimeofday(&tv, NULL); - - memset(filedir, 0, sizeof(filedir)); - if(logPath != NULL) - strcpy(filedir, logPath); - - if (access(filedir, 0) == 0) - ;//LOGI("文件路径已经存在!"); - else - { - status = mkdir(filedir, S_IRWXU | S_IRWXG | S_IRWXO); - if (status < 0) - return -1; - } - // 写入文件到sdcard - memset(filename, 0, sizeof(filename)); - sprintf(filename, "%sCOM%dlog-%04d-%02d-%02d.log", filedir, commid + 1, t0.tm_year + 1900, t0.tm_mon + 1, t0.tm_mday); - fp = fopen(filename, "a+"); - if (NULL == fp) - return -1; - memset(buf, 0, sizeof(buf)); - sprintf(buf, "%d-%d-%d %d:%d:%d-%d ", t0.tm_year + 1900, t0.tm_mon + 1, t0.tm_mday, t0.tm_hour, t0.tm_min, t0.tm_sec, (int)(tv.tv_usec / 1000)); - fwrite(buf, 1, strlen(buf), fp); - fwrite(szbuf, 1, strlen(szbuf), fp); - memset(buf, 0, sizeof(buf)); - strcpy(buf, "\n"); - fwrite(buf, 1, strlen(buf), fp); - - fclose(fp); - return 1; -} - -int SaveImageDataTofile(SIO_PARAM_SERIAL_DEF *curserial) -{ - u_char *image = NULL, *tempphoto = NULL; - int i, status; - size_t len; - char filename[512]/*, filedir[512]*/, szbuf[128]; - FILE *fp = NULL; - - image = (u_char*)malloc(curserial->image.imagelen); - if (NULL == image) - return -1; - tempphoto = image; - for (i = 0; i < curserial->image.imagenum; ++i) { - memmove(tempphoto, &curserial->image.buf[i], (size_t)curserial->image.ilen[i]); - tempphoto += (size_t)curserial->image.ilen[i]; - } - - memset(szbuf, 0, sizeof(szbuf)); - //memset(filedir, 0, sizeof(filedir)); - //sprintf(filedir, "/sdcard/photo/"); - - if (access(srdt.filedir, 0) == 0) - { - sprintf(szbuf, "文件路径%s已经存在!", srdt.filedir); - DebugLog(0, szbuf, 'I'); - } - else - { - status = mkdir(srdt.filedir, S_IRWXU | S_IRWXG | S_IRWXO); - if (status < 0) - return -1; - } - // 写入文件到sdcard - memset(curserial->image.photoname, 0, sizeof(curserial->image.photoname)); - sprintf(curserial->image.photoname, "%s1-%d-%d.jpg", srdt.filedir, curserial->image.presetno, curserial->image.phototime); - fp = fopen(curserial->image.photoname, "wb+"); - if (NULL == fp) - return -1; - len = fwrite(image, 1, curserial->image.imagelen, fp); - fclose(fp); - free(image); - image = NULL; - if (len < curserial->image.imagelen) - return -1; - else - { - memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "写入图片文件%s成功!", curserial->image.photoname); - DebugLog(0, szbuf, 'I'); - return 1; - } -} - /******************************************************************* * 读 摄像机 数据 * *******************************************************************/ @@ -1849,8 +1333,10 @@ void CameraRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, u_char *buf, int len) void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) { RTUMSG rtumsg; - int img_file_size, packetnum, iNo, packsize, i = 0, presetno, iphototime, pidx; - char szbuf[128]; + int img_file_size, packetnum, iNo, packsize, i = 0, j = 0, presetno, iphototime, pidx; + int datanum; + float fvalue; + // char szbuf[128]; uint16_t uDevAddr, datalen=0; uint8_t cmdidx, recvend; @@ -1866,7 +1352,7 @@ void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) datalen = rtumsg.MsgData[1]*256+rtumsg.MsgData[2]; if (uDevAddr != curserial->cameraaddr) return; - memset(szbuf, 0, sizeof(szbuf)); + // memset(szbuf, 0, sizeof(szbuf)); switch (cmdidx) { case 0x10: /* 拍照应答*/ @@ -1876,8 +1362,7 @@ void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) if (srdt.RephotographCnt > 2) { curserial->SerialCmdidx = -1; - sprintf(szbuf, "因摄像机重拍%d次均未成功!结束拍照!", srdt.RephotographCnt); - DebugLog(0, szbuf, 'E'); + DebugLog(0, 'E', "因摄像机重拍%u次均未成功!结束拍照!", (uint32_t)srdt.RephotographCnt); } break; } @@ -1892,8 +1377,8 @@ void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) curserial->image.imagelen = img_file_size; curserial->image.imagenum = packetnum; srdt.historyimagenum[0] = rtumsg.MsgData[i + 7] + (rtumsg.MsgData[i + 6] << 8); - sprintf(szbuf, "有%d张历史图片!", srdt.historyimagenum[0]); - DebugLog(0, szbuf, 'V'); + DebugLog(0, 'V', "有%d张历史图片!", srdt.historyimagenum[0]); + presetno = (int)rtumsg.MsgData[i + 8]; curserial->image.presetno = presetno; curserial->image.state = SER_SAMPLE; @@ -1913,8 +1398,7 @@ void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) packsize = rtumsg.MsgData[i + 3] + rtumsg.MsgData[i + 2] * 256; memmove(&curserial->image.buf[iNo - 1], &rtumsg.MsgData[i + 4], packsize); curserial->image.ilen[iNo - 1] = packsize; - sprintf(szbuf, "收到第%d(总%d包)包长=%d", iNo, curserial->image.imagenum, packsize); - DebugLog(0, szbuf, 'V'); + DebugLog(0, 'V', "收到第%d(总%d包)包长=%d", iNo, curserial->image.imagenum, packsize); curserial->RevCmdFlag = 1; curserial->FirstCmdTimeCnt = get_msec(); if (iNo == curserial->SerialCmdidx) @@ -1967,9 +1451,8 @@ void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) break; } srdt.errorPhotoNoCnt++; - sprintf(szbuf, "问询第%d包图片摄像机应答第%d包,连续错误%d次!", - curserial->SerialCmdidx, iNo, srdt.errorPhotoNoCnt); - DebugLog(0, szbuf, 'E'); + DebugLog(0, 'E', "问询第%d包图片摄像机应答第%d包,连续错误%d次!", + curserial->SerialCmdidx, iNo, (uint32_t)srdt.errorPhotoNoCnt); if (srdt.errorPhotoNoCnt > 5) { curserial->SerialCmdidx = 0; @@ -1977,25 +1460,95 @@ void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) if (srdt.RephotographCnt > 2) { curserial->SerialCmdidx = -1; - sprintf(szbuf, "因摄像机重拍%d次均未成功!结束拍照!", srdt.RephotographCnt); - DebugLog(0, szbuf, 'E'); + DebugLog(0, 'E', "因摄像机重拍%d次均未成功!结束拍照!", (uint32_t)srdt.RephotographCnt); } } break; case 0x03: - //sprintf(szbuf, "设置波特率%d成功", curserial->baud); - //DebugLog(devparam[devno].commid, szbuf, 'D'); curserial->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; curserial->RevCmdFlag = 1; curserial->FirstCmdTimeCnt = get_msec(); break; + case 0xA0: + DebugLog(0, 'I', "云台收到关电通知响应!"); + curserial->SerialCmdidx = -1; + break; + case 0x08: + datanum = curserial->m_au8RecvBuf[6]; + // memset(szbuf, 0, sizeof(szbuf)); + for (i = 0, j = 7; (i < datanum) && (j < 6 + datalen); i++, j += 5) + { + fvalue = (curserial->m_au8RecvBuf[j + 1] << 24) + (curserial->m_au8RecvBuf[j + 2] << 16) + + (curserial->m_au8RecvBuf[j + 3] << 8) + curserial->m_au8RecvBuf[j + 4]; + switch (curserial->m_au8RecvBuf[j]) + { + case 200: /* 云台状态*/ + curserial->ptz_state.ptz_process = curserial->m_au8RecvBuf[j + 1]; + curserial->ptz_state.ptz_status = curserial->m_au8RecvBuf[j+4]; + switch (curserial->ptz_state.ptz_process) + { + case 1: // 自检 + if (0 == curserial->ptz_state.ptz_status) + DebugLog(0, 'I', "云台自检结束!"); + else if (0 < curserial->ptz_state.ptz_status) + DebugLog(0, 'I', "云台正在自检!"); + else + DebugLog(0, 'I', "云台自检发生错误!"); + break; + case 2: // 调用预置点 + if (0 == curserial->ptz_state.ptz_status) + DebugLog(0, 'I', "调用预置位结束,云台处于所调预置位!"); + else if (0 < curserial->ptz_state.ptz_status) + DebugLog(0, 'I', "调用预置位,云台正在前往所调预置位位置!"); + else if (2 == (curserial->ptz_state.ptz_status & 0x0f)) + DebugLog(0, 'I', "调用预置位时,机芯电源未打开!"); + else + DebugLog(0, 'I', "调用预置位时,发生了错误,未正确执行!"); + break; + case 3: // 一般状态 + if (0 == curserial->ptz_state.ptz_status) + DebugLog(0, 'I', "云台处于静止状态!"); + else if (0 < curserial->ptz_state.ptz_status) + DebugLog(0, 'I', "云台正在运动!"); + else + DebugLog(0, 'I', "云台发生错误!"); + break; + default: + DebugLog(0, 'I', "未知错误,云台应答错误!"); + break; + } + break; + case 201: /* 云台预置点*/ + curserial->ptz_state.presetno = (curserial->m_au8RecvBuf[j + 3] << 8) + curserial->m_au8RecvBuf[j + 4]; + DebugLog(0, 'I', "云台预置点号=%u", (uint32_t)curserial->ptz_state.presetno); + break; + case 202: /* 云台坐标*/ + fvalue = (uint16_t)((curserial->m_au8RecvBuf[j + 1] << 8) + (curserial->m_au8RecvBuf[j + 2])); + fvalue /=100.0; + curserial->ptz_state.x_coordinate = fvalue; + fvalue = (uint16_t)((curserial->m_au8RecvBuf[j + 3] << 8) + (curserial->m_au8RecvBuf[j + 4])); + fvalue /=100.0; +#if 0 + if(fvalue < 180) + fvalue *= -1; + else + fvalue = 360-fvalue; +#endif + if(fvalue > 180) + fvalue -= 360; + curserial->ptz_state.y_coordinate = fvalue; + DebugLog(0, 'I', "云台坐标水平(X)=%0.2f°, 垂直(Y)=%0.2f°", curserial->ptz_state.x_coordinate, curserial->ptz_state.y_coordinate); + break; + } + } + curserial->SerialCmdidx = -1; + break; case 0x15: if (0xFF == rtumsg.MsgData[6]) { curserial->SerialCmdidx = -1; - strcpy(szbuf, "没有历史图片!结束读取图片!"); - DebugLog(0, szbuf, 'I'); + DebugLog(0, 'I', "没有历史图片!结束读取图片!"); break; } i = 6; @@ -2011,8 +1564,7 @@ void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) srdt.historyimagenum[0] = rtumsg.MsgData[i + 7] + (rtumsg.MsgData[i + 6] << 8); presetno = rtumsg.MsgData[i + 8]; curserial->image.presetno = presetno; - sprintf(szbuf, "读取历史图片,还有%d张历史图片!", srdt.historyimagenum[0]); - DebugLog(0, szbuf, 'I'); + DebugLog(0, 'I', "读取历史图片,还有%d张历史图片!", srdt.historyimagenum[0]); curserial->RevCmdFlag = 1; curserial->SerialCmdidx = 1; srdt.sendphotocmdcnt = 0; @@ -2021,8 +1573,7 @@ void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) if (0xFF == rtumsg.MsgData[10]) { curserial->SerialCmdidx = -1; - strcpy(szbuf, "摄像机图片保存失败!"); - DebugLog(0, szbuf, 'E'); + DebugLog(0, 'E', "摄像机图片保存失败!"); } curserial->SerialCmdidx = -1; if (0 == rtumsg.MsgData[10]) @@ -2134,16 +1685,13 @@ void SendCmdFormPollCmdBuf(int port) len = GM_SerialComSend(&pPortParam->PollCmd[2], pPortParam->cmdlen - 2, port); if (len < 1) { - sprintf(buf, "串口%d, 发送命令失败! fd = %d", port + 1, serialport[port].fd); - DebugLog(port, buf, 'E'); + DebugLog(port, 'E', "串口%d, 发送命令失败! fd = %d", port + 1, serialport[port].fd); } else { - sprintf(buf, "发送串口%d 装置%d命令:", port + 1, srdt.curdevidx[port] + 1); BytestreamLOG(port, buf, &pPortParam->PollCmd[2], len, 'D'); - sprintf(buf, "sendtimeconst= %lld", lctime - pPortParam->lsendtime); - DebugLog(port, buf, 'W'); + DebugLog(port, 'W', "sendtimeconst= %lld", (long long)(lctime - pPortParam->lsendtime)); pPortParam->lsendtime = lctime; } pPortParam->SendCmdFlag = 1; @@ -2233,7 +1781,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) } switch (cmdno) { - case 0:/* 下发拍照指令*/ + case TAKE_PHOTO:/* 下发拍照指令*/ if (lcurtime - pPortParam->FirstCmdTimeCnt < 3800) return -1; if ((lcurtime - pPortParam->FirstCmdTimeCnt > 3 * 35 * 1000) || (lcurtime - pPortParam->FirstCmdTimeCnt < 0)) @@ -2245,13 +1793,11 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) if (lcurtime - pPortParam->FirstCmdTimeCnt > 35 * 1000) { pPortParam->SerialCmdidx = -1; - strcpy(szbuf, "串口摄像机未接或故障!结束拍照!"); - DebugLog(0, szbuf, 'I'); + DebugLog(0, 'I', "串口摄像机未接或故障!结束拍照!"); return -1; } memset(szbuf, 0, sizeof(szbuf)); - sprintf(szbuf, "time=%lldms", lcurtime - pPortParam->FirstCmdTimeCnt); - DebugLog(0, szbuf, 'I'); + DebugLog(0, 'I', "time=%lld ms", (long long)(lcurtime - pPortParam->FirstCmdTimeCnt)); packetsize = (uint16_t)MAX_PHOTO_FRAME_LEN; srdt.sendphotocmdcnt++; srdt.imagepacketnum = 0; @@ -2260,7 +1806,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) imagesize = srdt.bImageSize; break; - case 10000: /* 下发设置串口波特率命令*/ + case SET_BAUD: /* 下发设置串口波特率命令*/ #if 0 switch (devparam[devidx].baudrate) { @@ -2274,8 +1820,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) imagesize = 0x09; break; default: - sprintf(szbuf, "设置串口摄像机参数时,配置参数错误!退出设置!"); - DebugLog(devparam[devidx].commid, szbuf, 'I'); + DebugLog(devparam[devidx].commid, 'I', "设置串口摄像机参数时,配置参数错误!退出设置!"); pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return -1; @@ -2284,8 +1829,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) if (lcurtime - pPortParam->FirstCmdTimeCnt > 15 * 1000) { pPortParam->SerialCmdidx = -1; - sprintf(szbuf, "设置串口摄像机参数时,15秒未收到摄像机应答!退出设置!"); - DebugLog(devparam[devidx].commid, szbuf, 'I'); + DebugLog(devparam[devidx].commid, 'I', "设置串口摄像机参数时,15秒未收到摄像机应答!退出设置!"); return -1; } cmdidx = 0x03; @@ -2308,7 +1852,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) packetsize = (uint16_t)MAX_PHOTO_FRAME_LEN; break; - case 10005: /* 关闭功能*/ + case STOP_CMD: /* 关闭功能*/ //Gm_CtrlPtzCmd(1, P_MOVE_LEFT); //sleep(2); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2317,25 +1861,25 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) srdt.iLastGetPhotoNo = -1; //sleep(20); return 1; - case 10006: /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ + case AUTO_SCAN: /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ Gm_CtrlPtzCmd(pPortParam, P_Auto_Scan); usleep(100000); pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10007: /* 光圈缩小(1 有效)*/ + case IRIS_CLOSE: /* 光圈缩小(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_IRIS_CLOSE); usleep(100000); pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10008: /* 光圈放大(1 有效)*/ + case IRIS_OPEN: /* 光圈放大(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_IRIS_OPEN); usleep(100000); pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10009: /* 近距离聚焦(1 有效)*/ + case FOCUS_NEAR: /* 近距离聚焦(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_FOCUS_NEAR); usleep(500000); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2343,7 +1887,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10010: /* 远距离聚焦(1 有效)*/ + case FOCUS_FAR: /* 远距离聚焦(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_FOCUS_FAR); usleep(500000); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2351,7 +1895,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10011: /* 远离物体(1 有效)*/ + case ZOOM_WIDE: /* 远离物体(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_ZOOM_WIDE); usleep(500000); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2359,7 +1903,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10012: /* 接近物体(1 有效)*/ + case ZOOM_TELE: /* 接近物体(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_ZOOM_TELE); usleep(500000); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2367,7 +1911,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10013: /* 向下移动镜头(1 有效)*/ + case MOVE_DOWN: /* 向下移动镜头(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_MOVE_DOWN); sleep(1); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2375,7 +1919,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10014: /* 向上移动镜头(1 有效)*/ + case MOVE_UP: /* 向上移动镜头(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_MOVE_UP); sleep(1); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2383,7 +1927,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10015: /* 向左移动镜头(1 有效)*/ + case MOVE_LEFT: /* 向左移动镜头(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_MOVE_LEFT); sleep(1); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2391,7 +1935,7 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10016: /* 向右移动镜头(1 有效)*/ + case MOVE_RIGHT: /* 向右移动镜头(1 有效)*/ Gm_CtrlPtzCmd(pPortParam, P_MOVE_RIGHT); sleep(1); Gm_CtrlPtzCmd(pPortParam, Cmd_Cancel); @@ -2399,26 +1943,66 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; - case 10017: /* 调用预置点*/ + case MOVE_PRESETNO: /* 调用预置点*/ //srdt.presetno = 2; Gm_CtrlPtzCmd(pPortParam, MOVE_TO_PRESETNO + srdt.presetno); +#if 0 sleep(2); if (0 == srdt.IsSleep) { - pPortParam->SerialCmdidx = 10017; + pPortParam->SerialCmdidx = MOVE_PRESETNO; srdt.IsSleep++; return 1; } +#endif pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; srdt.IsSleep = 0; return 1; - case 10018: /* 设置预置点*/ + case SAVE_PRESETNO: /* 设置预置点*/ Gm_CtrlPtzCmd(pPortParam, SET_PRESETNO + srdt.presetno); usleep(100000); pPortParam->SerialCmdidx = srdt.iLastGetPhotoNo; srdt.iLastGetPhotoNo = -1; return 1; + case NOTIFY_PTZ_CLOSE: /* 通知云台关闭电源*/ + if (pPortParam->sendptzstatecmd == 0) + { + cmdidx = 0xA0; + pPortParam->sendptzstatecmd++; + } + else + { + pPortParam->SerialCmdidx = -2; + pPortParam->sendptzstatecmd = 0; + DebugLog(0, 'I', "云台未接或故障!结束通知!"); + return -1; + } + MakePtzStateQueryCommand(pPortParam, cmdidx); + return 1; + case QUERY_PTZ_STATE: /* 查询云台状态信息*/ + //DebugLog(0, 'I', "下发命令sendptzstatecmd=%d!", pPortParam->sendptzstatecmd); + //if (pPortParam->sendptzstatecmd > 0) +#if 1 + if (pPortParam->sendptzstatecmd == 0) + { + pPortParam->sendptzstatecmd++; + cmdidx = 0x08; + //DebugLog(0, 'I', "下发命令sendptzstatecmd=%d!", pPortParam->sendptzstatecmd); + } + else +#endif + { + pPortParam->SerialCmdidx = -2; + pPortParam->sendptzstatecmd = 0; + DebugLog(0, 'I', "云台未接或故障!结束查询!"); + return -1; + } + //pPortParam->sendptzstatecmd++; + //cmdidx = 0x08; + MakePtzStateQueryCommand(pPortParam, cmdidx); + return 1; + default: imagesize = 0xFF; packetsize = (uint16_t)pPortParam->SerialCmdidx; @@ -2433,13 +2017,14 @@ int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam) if (lcurtime - pPortParam->FirstCmdTimeCnt > 35 * 1000) { pPortParam->SerialCmdidx = -1; - sprintf(szbuf, "读取第%d包图片数据35秒未收到!结束拍照!", packetsize); - DebugLog(0, szbuf, 'I'); + DebugLog(0, 'I', "读取第%u包图片数据35秒未收到!结束拍照!", (uint32_t)packetsize); return -1; } break; } - MakeCameraPhotoCommand(pPortParam, cmdidx, imagesize, packetsize, imagequality, srdt.sendphototime); + //DebugLog(0, 'I', "make:sendptzstatecmd"); + + MakeCameraPhotoCommand(pPortParam, cmdidx, imagesize, packetsize, imagequality, srdt.sendphototime); return 1; } @@ -2450,10 +2035,11 @@ void MakeCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx, in { int i, icurtime; u_char *sendbuf; - //char szbuf[128]; sendbuf = pPortParam->PollCmd; + DebugLog(0, 'I', "make:sendptzstatecmd start!"); + //return; icurtime = phototime; i = 0; sendbuf[i++] = 0x00; /* 强制等待时间*/ @@ -2504,6 +2090,8 @@ void MakeCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx, in sendbuf[i++] = LOBYTE(LOWORD(OneParam)); sendbuf[i++] = (uint8_t)TwoParam; /* 是否需要保存*/ break; + default: + break; } sendbuf[i] = CalLpc((u_char *)&sendbuf[6], i - 6); i += 1; @@ -2511,9 +2099,52 @@ void MakeCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx, in sendbuf[3] = (uint8_t)((i - 10) >> 8); sendbuf[4] = (uint8_t)(i - 10); pPortParam->cmdlen = i; +#if 1 + if(0x08 == cmdidx) + { + DebugLog(0, 'I', "生成查询云台位置命令!"); + } +#endif //return; } +/********************************************************************************* + 生成 QueryPtzState命令 +**********************************************************************************/ +void MakePtzStateQueryCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx) +{ + int i, icurtime; + u_char *sendbuf; + //char szbuf[128]; + + sendbuf = pPortParam->PollCmd; + + //DebugLog(0, 'I', "make:sendptzstatecmd start!"); + i = 0; + sendbuf[i++] = 0x00; /* 强制等待时间*/ + sendbuf[i++] = 0x00; /* 强制等待时间*/ + sendbuf[i++] = 0x68; /* 起始字符*/ + sendbuf[i++] = (uint8_t)0x00; /* length */ + sendbuf[i++] = (uint8_t)0x00; /* length */ + sendbuf[i++] = 0x68; /* 起始字符*/ + sendbuf[i++] = (uint8_t)pPortParam->cameraaddr;/* 传感器地址*/ + sendbuf[i++] = cmdidx; /* 命令字*/ + sendbuf[i] = CalLpc((u_char *)&sendbuf[6], i - 6); + i += 1; + sendbuf[i++] = 0x16; /* 信息尾*/ + sendbuf[3] = (uint8_t)((i - 10) >> 8); + sendbuf[4] = (uint8_t)(i - 10); + pPortParam->cmdlen = i; +#if 0 + //DebugLog(0, 'I', "make over!"); + if(0x08 == cmdidx) + { + DebugLog(0, 'I', "生成查询云台位置命令!"); + } +#endif + //return; +} + // 准备发送云台指令 int Gm_CtrlPtzCmd(SIO_PARAM_SERIAL_DEF *pPortParam, uint32_t ptzcmd) { @@ -2670,7 +2301,7 @@ void Gm_SendPelco_pCommand(uint32_t cmdtype) len = GM_SerialComSend(commandbuf, len, srdt.camerauseserial); if (len < 1) { - DebugLog(srdt.camerauseserial, "发送Pelco_p命令失败", 'E'); + DebugLog(srdt.camerauseserial, 'E', "发送Pelco_p命令失败"); } else { @@ -2719,7 +2350,7 @@ void Gm_SendPelco_DCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint32_t cmdtype) len = GM_CameraComSend(commandbuf, len, pPortParam->fd); if (len < 1) { - DebugLog(0, "发送Pelco_D命令失败", 'E'); + DebugLog(0, 'E', "发送Pelco_D命令失败"); } else { @@ -2832,13 +2463,23 @@ void MakeShxyProtocolPollCommand(int portno, uint8_t cmdidx) unsigned char CalLpc(unsigned char *msg, int len) { int i; - u_char retval = 0; + unsigned char retval = 0; for (i = 0; i < len; i++) retval += msg[i]; return retval; } +unsigned char BDXorCheck(unsigned char *msg, int len) +{ + int i; + unsigned char retval = 0; + + for (i = 0; i < len; i++) + retval ^= msg[i]; + return retval; +} + /*************************************************************** * 读上海欣影传感器协议数据 * ***************************************************************/ @@ -3019,8 +2660,7 @@ void ShxyProtocolDataProcess(int devno) //slantpntmsg[aipnt][0].AiState = 1; //if ((gDisSunRain & 0x20) == 0x20) { - sprintf(szbuf, "倾角ID:%d slantangle X=%0.3f ", devparam[devno].devaddr, fvalue); - //DebugLog(devparam[devno].commid, szbuf, 'V'); + // DebugLog(devparam[devno].commid, 'V', "倾角ID:%d slantangle X=%0.3f ", devparam[devno].devaddr, fvalue); } //XslantSec[aipnt][srdt.SectimesamplingCnt[0]] = (short)slantpntmsg[aipnt][0].EuValue; //srdt.SectimesamplingCnt[0] += 1; @@ -3031,8 +2671,7 @@ void ShxyProtocolDataProcess(int devno) *((uint8_t*)fvalua + 3) = curserial->m_au8RecvBuf[10]; //if ((gDisSunRain & 0x20) == 0x20) { - sprintf(szbuf, "%sY =%0.3f ", szbuf, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "%sY =%0.3f ", szbuf, fvalue); } if ((fvalue < -59) || (fvalue > 59)) { @@ -3068,7 +2707,9 @@ void ShxyProtocolDataProcess(int devno) switch (curserial->m_au8RecvBuf[j]) { case 1: /*温度*/ - if ((fvalue < -40) || (fvalue > 85)) + if(devparam[devno].ProtocolIdx == WIND_PROTOCOL) + break; + if ((fvalue < -60) || (fvalue > 100)) { frnb = (GeneratingRandomNumber() % 101 - 50) / 1000.0; pPortParam->aiValue[AirTempNo].EuValue *= (1 + frnb); @@ -3077,19 +2718,20 @@ void ShxyProtocolDataProcess(int devno) } else { - pPortParam->aiValue[AirTempNo].EuValue = fvalue;/*pPortParam->aiValue[0].AiParam.fFactor + pPortParam->aiValue[0].AiParam.EuValueDelta;*/ - weatherpntmsg[AirTempNo].EuValue = fvalue;/*weatherpntmsg[AirTempNo].AiParam.fFactor + weatherpntmsg[AirTempNo].AiParam.EuValueDelta;*/ + pPortParam->aiValue[AirTempNo].EuValue = fvalue; + weatherpntmsg[AirTempNo].EuValue = fvalue; } pPortParam->aiValue[AirTempNo].AiState = SER_SAMPLE; weatherpntmsg[AirTempNo].AiState = SER_SAMPLE; //g_SelfTest.SensorsFault |= (0x01); //if ((gDisSunRain & 0x80) == 0x80) { - sprintf(szbuf, "ID:%d 温度:%0.3f ", devparam[devno].devaddr, fvalue); - //DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "ID:%d 温度:%0.3f ", devparam[devno].devaddr, fvalue); } break; case 2: /*气压*/ + if(devparam[devno].ProtocolIdx == WIND_PROTOCOL) + break; if ((fvalue < 550) || (fvalue > 1060)) { frnb = (GeneratingRandomNumber() % 41 - 20) / 10000.0; @@ -3098,19 +2740,20 @@ void ShxyProtocolDataProcess(int devno) } else { - pPortParam->aiValue[AtmosNo].EuValue = fvalue;/*pPortParam->aiValue[5].AiParam.fFactor + pPortParam->aiValue[5].AiParam.EuValueDelta;*/ - weatherpntmsg[AtmosNo].EuValue = fvalue;/*weatherpntmsg[AtmosNo].AiParam.fFactor + weatherpntmsg[AtmosNo].AiParam.EuValueDelta;*/ + pPortParam->aiValue[AtmosNo].EuValue = fvalue; + weatherpntmsg[AtmosNo].EuValue = fvalue; } pPortParam->aiValue[AtmosNo].AiState = SER_SAMPLE; weatherpntmsg[AtmosNo].AiState = SER_SAMPLE; //g_SelfTest.SensorsFault |= (0x10); //if ((gDisSunRain & 0x80) == 0x80) { - sprintf(szbuf, "%s气压:%0.3f ", szbuf, fvalue); - //DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "气压:%0.3f ", fvalue); } break; case 3: /*湿度*/ + if(devparam[devno].ProtocolIdx == WIND_PROTOCOL) + break; if ((fvalue < 0) || (fvalue > 100)) { frnb = (GeneratingRandomNumber() % 41 - 20) / 1000.0; @@ -3127,9 +2770,7 @@ void ShxyProtocolDataProcess(int devno) //g_SelfTest.SensorsFault |= (0x02); //if ((gDisSunRain & 0x80) == 0x80) { - sprintf(szbuf, "%s湿度:%0.3f ", szbuf, fvalue); - if(datanum < 6) - DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "湿度:%0.3f ", fvalue); } break; case 4: /*雨量*/ @@ -3149,9 +2790,7 @@ void ShxyProtocolDataProcess(int devno) //g_SelfTest.SensorsFault |= (0x02); //if ((gDisSunRain & 0x80) == 0x80) { - sprintf(szbuf, "%s雨量:%0.3f ", szbuf, fvalue); - if(datanum < 7) - DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "雨量:%0.3f ", fvalue); } break; case 5: /*日照*/ @@ -3169,8 +2808,7 @@ void ShxyProtocolDataProcess(int devno) pPortParam->aiValue[OpticalRadiationNo].AiState = SER_SAMPLE; weatherpntmsg[OpticalRadiationNo].AiState = SER_SAMPLE; { - sprintf(szbuf, "%s日照:%0.3f ", szbuf, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "日照:%0.3f ", fvalue); } break; case 6: /*风速*/ @@ -3214,8 +2852,7 @@ void ShxyProtocolDataProcess(int devno) //g_SelfTest.SensorsFault |= (0x08); //if ((gDisSunRain & 0x10) == 0x10) { - sprintf(szbuf, "%s 风向:%0.3f ", szbuf, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "%s 风向:%0.3f ", szbuf, fvalue); } break; case 8: /*拉力*/ @@ -3229,8 +2866,7 @@ void ShxyProtocolDataProcess(int devno) // +rallypntmsg[aipnt][0].AiParam.EuValueDelta; pPortParam->aiValue[0].AiState = SER_SAMPLE; //rallypntmsg[aipnt][0].AiState = 1; - sprintf(szbuf, "地址%d拉力:%0.3fKg ", devparam[devno].devaddr, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "地址%d拉力:%0.3fKg ", devparam[devno].devaddr, fvalue); //} break; case 9: /*倾角传感器X轴倾角*/ @@ -3249,11 +2885,10 @@ void ShxyProtocolDataProcess(int devno) } pPortParam->aiValue[0].AiState = SER_SAMPLE; //slantpntmsg[aipnt][0].AiState = 1; - sprintf(szbuf, "倾角ID:%d slantangle X=%0.3f ", devparam[devno].devaddr, fvalue); + DebugLog(devparam[devno].commid, 'V', "倾角ID:%d slantangle X=%0.3f ", devparam[devno].devaddr, fvalue); break; case 10: /*倾角传感器Y轴倾角*/ - sprintf(szbuf, "%s Y =%0.3f ", szbuf, fvalue); - DebugLog(devparam[devno].commid, szbuf, 'V'); + DebugLog(devparam[devno].commid, 'V', "%s Y =%0.3f ", szbuf, fvalue); if ((fvalue < -59) || (fvalue > 59)) { @@ -3273,54 +2908,6 @@ void ShxyProtocolDataProcess(int devno) } } -void delete_old_files(const char *path, int days) -{ - struct stat file_stat; - struct tm *file_tm; - time_t now = time(NULL); - DIR *dir = opendir(path); - struct dirent *entry; - char szbuf[1024]; - char fullpath[256]; - - memset(szbuf, 0, sizeof(szbuf)); - if (!dir) - { - sprintf(szbuf, "delete_old_files opendir %s error ", path); - DebugLog(8, szbuf, 'E'); - return; - } - - while ((entry = readdir(dir))) - { - memset(szbuf, 0, sizeof(szbuf)); - if (entry->d_type == DT_REG) - { // 只处理普通文件 - snprintf(fullpath, sizeof(fullpath), "%s/%s", path, entry->d_name); - - if (stat(fullpath, &file_stat) == -1) - { - perror("stat"); - strcpy(szbuf, "stat"); - DebugLog(8, szbuf, 'E'); - continue; - } - - localtime_r(&(file_stat.st_mtime), file_tm); - //file_tm = localtime(&(file_stat.st_mtime)); - - if (difftime(now, mktime(file_tm)) > days * 24 * 60 * 60) - { - if (unlink(fullpath) == -1) - { // 删除文件 - perror("unlink"); - } - } - } - } - - closedir(dir); -} /********************************************************************************* 把16进制和10进制ASCII字符串转换成int整数 *********************************************************************************/ @@ -3421,16 +3008,14 @@ void Collect_sensor_data() } #endif #if 1 - pthread_mutex_lock(&serial_mutex); // 加锁 + static int ideletefile = 0; - char logbuf[64]; time_t now; struct tm t0; // const char *path = logPath; // 指定目录路径 // int days = 15; // 删除15天前的log文件 - sprintf(logbuf, "进入程序时间:%lld, ideletefile=%d", get_msec(), ideletefile); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "进入程序时间:%lld, ideletefile=%d", get_msec(), ideletefile); // now = time(NULL); // localtime_r(&now, &t0); @@ -3442,6 +3027,7 @@ void Collect_sensor_data() // if (0 < t0.tm_hour) // ideletefile = 0; #endif + pthread_mutex_lock(&serial_mutex); // 加锁 GM_StartSerialComm(); pthread_mutex_unlock(&serial_mutex); // 解锁 } @@ -3449,14 +3035,11 @@ void Collect_sensor_data() int Gm_SetCameraSerialPortParam(int fd, unsigned int baud) { int ret; - char szbuf[128]; ret = set_port_attr(fd, baud, 8, 1, 'n', 0, 0);/*9600 8n1 */ if (ret < 0) { - memset(szbuf, 0, sizeof(szbuf)); - strcpy(szbuf, "摄像机串口波特率等参数设置错误!"); - DebugLog(0, szbuf, 'E'); + DebugLog(0, 'E', "摄像机串口波特率等参数设置错误!"); return -1; } return ret; @@ -3465,42 +3048,34 @@ int Gm_SetCameraSerialPortParam(int fd, unsigned int baud) int Gm_OpenCameraSerial(SIO_PARAM_SERIAL_DEF *pPortParam, const char *serfile, unsigned int baud) { int fd = -1; - char szbuf[512]; unsigned int serbaud; - memset(szbuf, 0, sizeof(szbuf)); if (pPortParam->fd <= 0) { fd = ::open(serfile, O_RDWR | O_NDELAY); if (fd < 0) { - sprintf(szbuf, "摄像机串口%s打开失败!fd=%d", serfile, fd); - DebugLog(0, szbuf, 'E'); + DebugLog(0, 'E', "摄像机串口%s打开失败!fd=%d", serfile, fd); return -1; } - sprintf(szbuf, "摄像机打开串口%s成功!fd=%d 波特率:%d", serfile, fd, baud); - DebugLog(0, szbuf, 'I'); + DebugLog(0, 'I', "摄像机打开串口%s成功!fd=%d 波特率:%d", serfile, fd, baud); pPortParam->fd = fd; serbaud = getBaudrate(baud); Gm_SetCameraSerialPortParam(fd, serbaud); return 0; } - sprintf(szbuf, "摄像机串口%s已经打开!fd=%d", serfile, pPortParam->fd); - DebugLog(0, szbuf, 'I'); + DebugLog(0, 'I', "摄像机串口%s已经打开!fd=%d", serfile, pPortParam->fd); return 0; } int GM_CameraComSend(unsigned char * cSendBuf, size_t nSendLen, int fd) { int i, len; - char szbuf[512]; - memset(szbuf, 0, sizeof(szbuf)); len = write(fd, cSendBuf, (size_t)nSendLen);/* 向串囗发送字符串 */ if (len < 0) { - strcpy(szbuf, "write data error "); - DebugLog(0, szbuf, 'E'); + DebugLog(0, 'E', "write data error "); return -1; } else if (len > 0) @@ -3515,6 +3090,7 @@ void SendCameraCmdFormPollCmdBuf(SIO_PARAM_SERIAL_DEF *pPortParam) char buf[128]; int len, ret; int64_t lctime; + //char szbuf[512]; memset(buf, 0, sizeof(buf)); lctime = get_msec(); @@ -3527,7 +3103,9 @@ void SendCameraCmdFormPollCmdBuf(SIO_PARAM_SERIAL_DEF *pPortParam) if (pPortParam->SendCmdFlag && (pPortParam->RevCmdFlag == 0)) { //pPortParam->RetryTimeCnt++; - if ((lctime - pPortParam->RetryTimeCnt > 3 * pPortParam->RetryTime) || (lctime - pPortParam->RetryTimeCnt < 0)) + //DebugLog(0, 'E', "1-send"); + + if ((lctime - pPortParam->RetryTimeCnt > 3 * pPortParam->RetryTime) || (lctime - pPortParam->RetryTimeCnt < 0)) { pPortParam->RetryTimeCnt = lctime; return; @@ -3552,6 +3130,7 @@ void SendCameraCmdFormPollCmdBuf(SIO_PARAM_SERIAL_DEF *pPortParam) if (pPortParam->SendCmdFlag && pPortParam->RevCmdFlag) { + //DebugLog(0, 'E', "2-send"); // 清除当前命令 ClearCameraCmdFormPollCmdBuf(pPortParam); } @@ -3588,16 +3167,13 @@ void SendCameraCmdFormPollCmdBuf(SIO_PARAM_SERIAL_DEF *pPortParam) len = GM_CameraComSend(&pPortParam->PollCmd[2], pPortParam->cmdlen - 2, pPortParam->fd); if (len < 1) { - strcpy(buf, "摄像机串口, 发送命令失败!"); - DebugLog(0, buf, 'E'); + DebugLog(0, 'E', "摄像机串口, 发送命令失败!"); } else { - strcpy(buf, "摄像机串口命令:"); BytestreamLOG(0, buf, &pPortParam->PollCmd[2], len, 'D'); - sprintf(buf, "sendtimeconst= %lld", (long long)(lctime - pPortParam->lsendtime)); - DebugLog(0, buf, 'W'); + DebugLog(0, 'W', "sendtimeconst= %lld", (long long)(lctime - pPortParam->lsendtime)); pPortParam->lsendtime = lctime; } pPortParam->SendCmdFlag = 1; @@ -3633,19 +3209,16 @@ void Gm_FindCameraCommand(SIO_PARAM_SERIAL_DEF *pPortParam) int GM_IsCloseCamera(SIO_PARAM_SERIAL_DEF *pPortParam) { int i, j; - char buf[256]; int64_t lctime; lctime = get_msec(); - memset(buf, 0, sizeof(buf)); - if (-1 == pPortParam->SerialCmdidx) + if (0 > pPortParam->SerialCmdidx) { if ((SER_STARTSAMPLE == pPortParam->image.state) || (SER_SAMPLE == pPortParam->image.state)) pPortParam->image.state = SER_SAMPLEFAIL; else if (PHOTO_SAVE_SUCC == pPortParam->image.state) pPortParam->image.state = PHOTO_SAVE_SUCC; - strcpy(buf, "通道1摄像机使用完毕!可以关闭摄像机电源!"); - DebugLog(0, buf, 'I'); + DebugLog(0, 'I', "摄像机使用完毕!可以关闭摄像机电源!"); memset(&serialport[0].image, 0, sizeof(PHOTO_DEF)); memmove((void *)&serialport[0].image, (void*)&pPortParam->image, sizeof(PHOTO_DEF)); if(pPortParam->fd >= 0) @@ -3677,14 +3250,17 @@ void GM_CameraSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam) recvlen = read(pPortParam->fd, &recvbuf[i], sizeof(recvbuf)-i);/* 在串口读取字符串 */ t1 = get_msec(); if(t1-t0 >= 350) - break; + { + i += recvlen; + break; + } } recvlen = i; if (recvlen < 1) return; #if 1 - sprintf(buf, "收到Camera, %d字节数据:", recvlen); + sprintf(buf, "收到BD, %d字节数据:", recvlen); BytestreamLOG(0, buf, recvbuf, recvlen, 'I'); #endif @@ -3695,9 +3271,9 @@ int GM_CameraSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam) { int flag = -1; - GM_CameraSerialComRecv(pPortParam); - Gm_FindCameraCommand(pPortParam); //GM_CameraSerialComRecv(pPortParam); + Gm_FindCameraCommand(pPortParam); + GM_CameraSerialComRecv(pPortParam); flag = GM_IsCloseCamera(pPortParam); return flag; } @@ -3707,7 +3283,7 @@ int GM_CameraSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam) int GM_StartSerialCameraPhoto(int phototime, unsigned char channel, int cmdidx, unsigned char bImageSize, unsigned char presetno, const char *serfile, unsigned int baud, int addr) { int flag = 0; - char szbuf[128], logbuf[128]; + char logbuf[128]; SIO_PARAM_SERIAL_DEF *cameraport; cameraport = (SIO_PARAM_SERIAL_DEF*)malloc(sizeof(SIO_PARAM_SERIAL_DEF)); @@ -3727,13 +3303,12 @@ int GM_StartSerialCameraPhoto(int phototime, unsigned char channel, int cmdidx, flag = Gm_OpenCameraSerial(cameraport, serfile, baud); #if 1 - memset(szbuf, 0, sizeof(szbuf)); srdt.iLastGetPhotoNo = -1; - if (0 == cmdidx) + if (TAKE_PHOTO == cmdidx) cameraport->image.state = SER_STARTSAMPLE; - if ((0 == cmdidx) && (srdt.presetno > 0)) + if ((TAKE_PHOTO == cmdidx) && (srdt.presetno > 0)) { - cameraport->SerialCmdidx = 10017; + cameraport->SerialCmdidx = MOVE_PRESETNO; srdt.iLastGetPhotoNo = cmdidx; } else @@ -3744,8 +3319,7 @@ int GM_StartSerialCameraPhoto(int phototime, unsigned char channel, int cmdidx, //flag = 1; //if (1 == srdt.ms_dev[i].IsNeedSerial) //{ - //sprintf(logbuf, "装置%d, IsNoInsta=%d, 类型:%s", i + 1, devparam[i].IsNoInsta, szbuf); - //DebugLog(8, logbuf, 'I'); + //DebugLog(8, 'I', "装置%d, IsNoInsta=%d, 类型:%s", i + 1, devparam[i].IsNoInsta, szbuf); //Gm_OpenSensorsPower(); //Gm_OpenCameraSerial(&cameraport); //} @@ -3763,8 +3337,7 @@ int GM_StartSerialCameraPhoto(int phototime, unsigned char channel, int cmdidx, if (0x00 == flag) { - sprintf(szbuf, "摄像机启动串口定时器!"); - DebugLog(8, szbuf, 'I'); + DebugLog(0, 'I', "摄像机启动串口定时器!"); for (;;) { usleep(10); @@ -3772,9 +3345,8 @@ int GM_StartSerialCameraPhoto(int phototime, unsigned char channel, int cmdidx, //polltime = get_msec(); if (GM_CameraSerialTimer(cameraport) < 0) { - //LOGE("12V state=%d", getInt(CMD_SET_12V_EN_STATE)); - DebugLog(8, "退出操作摄像机流程!", 'V'); - sleep(3); + DebugLog(8, 'V', "退出操作摄像机流程!"); + //sleep(3); break; } } @@ -3790,8 +3362,8 @@ int GM_StartSerialCameraPhoto(int phototime, unsigned char channel, int cmdidx, int CameraPhotoCmd(int phototime, unsigned char channel, int cmdidx, unsigned char bImageSize, unsigned char presetno, const char *serfile, unsigned int baud, int addr) { + int flag = 0; pthread_mutex_lock(&camera_mutex); - int flag = 0; srdt.bImageSize = bImageSize; srdt.presetno = presetno; @@ -3800,13 +3372,538 @@ int CameraPhotoCmd(int phototime, unsigned char channel, int cmdidx, unsigned ch pthread_mutex_unlock(&camera_mutex); // 解锁 return flag; } + +int QueryPtzState(PTZ_STATE *ptz_state, int cmdidx, const char *serfile, unsigned int baud, int addr) +{ + int flag = 0; + char logbuf[128]; + SIO_PARAM_SERIAL_DEF *cameraport=NULL; + + if(NULL == ptz_state) + { + return -1; + } + if((NOTIFY_PTZ_CLOSE == cmdidx) || (QUERY_PTZ_STATE == cmdidx)) + ; + else + { + return -1; + } + cameraport = (SIO_PARAM_SERIAL_DEF*)malloc(sizeof(SIO_PARAM_SERIAL_DEF)); + + cameraport->cameraaddr = addr; + cameraport->Retry = 0; + cameraport->RetryTime = 1000; + cameraport->WaitTime = 0; + cameraport->m_iRevStatus = 0; + cameraport->m_iRecvLen = 0; + cameraport->m_iNeedRevLength = 0; + cameraport->fd = -1; + memset(cameraport->m_au8RecvBuf, 0, RECVDATA_MAXLENTH); // 接收数据缓存区 + ClearCameraCmdFormPollCmdBuf(cameraport); + + pthread_mutex_lock(&camera_mutex); + flag = Gm_OpenCameraSerial(cameraport, serfile, baud); + +#if 1 + srdt.iLastGetPhotoNo = -1; + cameraport->SerialCmdidx = cmdidx; + cameraport->FirstCmdTimeCnt = get_msec(); + cameraport->sendptzstatecmd = 0; +#endif + if(-1 == flag) + { + if(NULL != cameraport) + { + free(cameraport); + cameraport = NULL; + } + pthread_mutex_unlock(&camera_mutex); + return -1; + } + + if (0x00 == flag) + { + DebugLog(8, 'I', "云台状态查询启动串口定时器!"); + //DebugLog(0, 'I', "下发命令sendptzstatecmd=%d!", cameraport->sendptzstatecmd); + for (;;) + { + usleep(10); + //LOGW("polltime=%ldms", get_msec()-polltime); + //polltime = get_msec(); + flag = GM_CameraSerialTimer(cameraport); + if (flag < 0) + { + DebugLog(8, 'V', "退出查询云台状态流程!"); + memmove((void*)ptz_state, &cameraport->ptz_state, sizeof(PTZ_STATE)); + if(flag == -2) + flag = -1; + else + flag = 0; + //sleep(3); + break; + } + } + } + if(NULL != cameraport) + { + free(cameraport); + cameraport = NULL; + } + pthread_mutex_unlock(&camera_mutex); // 解锁 + return flag; +} + +int Query_BDGNSS_Data(BD_GNSS_DATA *BD_data, int samptime, const char *serfile, unsigned int baud) +{ + int flag = 0; + char logbuf[128]; + SIO_PARAM_SERIAL_DEF *cameraport = NULL; + + if(NULL == BD_data) + { + return -1; + } + if(samptime < 5) + samptime = 5; + cameraport = (SIO_PARAM_SERIAL_DEF*)malloc(sizeof(SIO_PARAM_SERIAL_DEF)); + + cameraport->Retry = 0; + cameraport->RetryTime = 1000; + cameraport->WaitTime = 0; + cameraport->m_iRevStatus = 0; + cameraport->m_iRecvLen = 0; + cameraport->m_iNeedRevLength = 0; + cameraport->fd = -1; + memset(cameraport->m_au8RecvBuf, 0, RECVDATA_MAXLENTH); // 接收数据缓存区 + ClearCameraCmdFormPollCmdBuf(cameraport); + + pthread_mutex_lock(&bd_mutex); + flag = Gm_OpenCameraSerial(cameraport, serfile, baud); + +#if 1 + srdt.iLastGetPhotoNo = -1; + cameraport->SerialCmdidx = -1; + cameraport->FirstCmdTimeCnt = get_msec(); +#endif + if(-1 == flag) + { + if(NULL != cameraport) + { + free(cameraport); + cameraport = NULL; + } + pthread_mutex_unlock(&bd_mutex); // 解锁 + return -1; + } + + if (0x00 == flag) + { + DebugLog(8, 'I', "北斗定位数据查询启动串口定时器!"); + for (;;) + { + usleep(10); + //LOGW("polltime=%ldms", get_msec()-polltime); + //polltime = get_msec(); + flag = GM_BdSerialTimer(cameraport); + if(get_msec() - cameraport->FirstCmdTimeCnt > samptime*1000) + { + DebugLog(8, 'V', "退出查询北斗定位数据流程!"); + memmove((void*)BD_data, &cameraport->bd_data, sizeof(BD_GNSS_DATA)); + break; + } + } + } + if(NULL != cameraport) + { + free(cameraport); + cameraport = NULL; + } + pthread_mutex_unlock(&bd_mutex); // 解锁 + return flag; +} + +int GM_BdSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam) +{ + int flag = 0; + + GM_BdSerialComRecv(pPortParam); + return flag; +} + +void GM_BdSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam) +{ + int i, recvlen; + u_char recvbuf[RECVDATA_MAXLENTH]; + int64_t t0, t1; + + t0 = get_msec(); + + memset(recvbuf, 0, sizeof(recvbuf)); + if (pPortParam->fd <= 0) + return; + i=0; + recvlen = 0; + memset(recvbuf, 0, sizeof(recvbuf)); + for(;;) + { + i += recvlen; + recvlen = read(pPortParam->fd, &recvbuf[i], sizeof(recvbuf)-i);/* 在串口读取字符串 */ + t1 = get_msec(); + if(t1-t0 >= 350) + break; + } + + recvlen = i; + if (recvlen < 1) + return; +#if 1 + DebugLog(0, 'I', "收到BD, %d字节数据:%s", recvlen, recvbuf); +#endif + + BdRecvData(pPortParam, recvbuf, recvlen); +} + +void BdRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, u_char *buf, int len) +{ + int i; + unsigned char bdxor=0, srcxor=0; + + for (i = 0; i < len; i++) + { + switch (pPortParam->m_iRevStatus) + { + case 0: // 搜索起始符 '$' + pPortParam->m_iRecvLen = 0; + memset(pPortParam->m_au8RecvBuf, 0,sizeof(pPortParam->m_au8RecvBuf)); + if ('$' == buf[i]) + { + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + pPortParam->m_iRevStatus = 1; // 进入数据接收状态 + } + break; + case 1: // 接收数据直到 '*' + if (pPortParam->m_iRecvLen >= RECVDATA_MAXLENTH) + { + // 缓冲区溢出,重置状态 + pPortParam->m_iRevStatus = 0; + pPortParam->m_iRecvLen = 0; + break; + } + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + + if (buf[i] == '*') + { + pPortParam->m_iRevStatus = 2; // 进入校验码接收状态 + } + break; + case 2: // 接收校验码(2字节十六进制)和 + if (pPortParam->m_iRecvLen >= RECVDATA_MAXLENTH) + { + // 缓冲区溢出,重置状态 + pPortParam->m_iRevStatus = 0; + pPortParam->m_iRecvLen = 0; + break; + } + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + + // 检测到换行符(0x0A),检查前一个字符是否为回车符(0x0D) + if (buf[i] == 0x0A) + { + if (pPortParam->m_iRecvLen >= 5 && + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen - 2] == 0x0D) + { + // 提取校验码(*后的两个字符) + char hex_str[3] = { + (char)pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen - 4], + (char)pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen - 3], + '\0' + }; + srcxor = (uint8_t)strtol(hex_str, NULL, 16); + + // 计算校验值(从$后的第一个字符到*前的字符) + uint8_t calc_xor = BDXorCheck(&pPortParam->m_au8RecvBuf[1],pPortParam->m_iRecvLen - 6);// 长度 = 总长度 - ($ + *HH + \r\n) + + if (srcxor == calc_xor) + { + BD_NMEA0183_PortDataProcess(pPortParam); + pPortParam->RevCmdFlag = 1; + } + // 重置状态,准备接收下一帧 + pPortParam->m_iRevStatus = 0; + pPortParam->m_iRecvLen = 0; + } + else + { + // 格式错误,丢弃数据 + pPortParam->m_iRevStatus = 0; + pPortParam->m_iRecvLen = 0; + } + } + break; + case 255:// 错误接收数据 + default: + if (buf[i] == '$') + { + pPortParam->m_iRevStatus = 1; + pPortParam->m_iRecvLen = 1; + pPortParam->m_au8RecvBuf[0] = buf[i]; + } + else + { + pPortParam->m_au8RecvBuf[pPortParam->m_iRecvLen++] = buf[i]; + if (pPortParam->m_iRecvLen > 200) + { + pPortParam->m_iRecvLen = 0; + } + } + break; + } + } +} + +/* +$BDRMC,023656.00,A,2240.61563,N,11359.86512,E,0.16,,140324,,,A,V*2C +$BDVTG,,,,,0.16,N,0.30,K,A*2F +$BDGGA,023656.00,2240.61563,N,11359.86512,E,1,23,0.7,96.53,M,-3.52,M,,*5B +$BDGSA,A,3,01,02,03,04,05,06,07,09,10,16,19,20,1.0,0.7,0.8,4*30 +$BDGSV,6,1,23,01,45,125,38,02,46,235,40,03,61,189,46,04,32,112,37,1*7B +$BDGLL,2240.61563,N,11359.86512,E,023656.00,A,A*78 +$BDZDA,023656.00,14,03,2024,00,00*71 +$GPTXT,01,01,01,ANTENNA OPEN*25 + GLL:经度、纬度、UTC 时间 +GSA:北斗接收机操作模式,定位使用的卫星,DOP 值 +GSV:可见北斗卫星信息、仰角、方位角、信噪比(SNR) +RMC:时间、日期、位置、速度 +VTG:地面速度信息 +ZDA: 时间、日期 +TXT:用于天线状态检测 + */ +void BD_NMEA0183_PortDataProcess(SIO_PARAM_SERIAL_DEF *curserial) +{ + BD_NMEA0183_PROC_FUNC bd_nmea0183_call[] = { + {"$BDRMC",BD_get_BDRMC_data},/* 时间、日期、位置、速度*/ + {"$BDVTG",NULL},/* 地面速度信息*/ + {"$BDGGA",NULL},/* 时间、位置、定位类型*/ + {"$BDGSA",NULL},/* 北斗接收机操作模式,定位使用的卫星,DOP 值 */ + {"$BDGSV",NULL},/* 可见北斗卫星信息、仰角、方位角、信噪比(SNR)*/ + {"$BDGLL",NULL},/* 经度、纬度、UTC 时间*/ + {"$BDZDA",NULL},/* 时间、日期 */ + {"$GPTXT",NULL},/* 用于天线状态检测*/ + };/* irows*/ + int i=0, irows; + //char *cmd=NULL, *sourestr=NULL; + //RTUMSG rtumsg; + + if(NULL == curserial) + return; + irows = sizeof(bd_nmea0183_call)/sizeof(BD_NMEA0183_PROC_FUNC); + + DebugLog(0, 'I', (char*)curserial->m_au8RecvBuf); + +#if 0 + memset((void*)rtumsg.MsgData, 0, sizeof(rtumsg.MsgData)); + memcpy((void*)rtumsg.MsgData, (void*)curserial->m_au8RecvBuf, curserial->m_iRecvLen); + rtumsg.MsgLen = curserial->m_iRecvLen; + /* 析出关键字符串*/ + cmd = (char *)rtumsg.MsgData; + sourestr = strstr((char *)rtumsg.MsgData, ","); + *sourestr = 0; + DebugLog(0, 'I', "cmd_len = %d, cmd:%s", strlen(cmd), cmd); +#endif + + for(i=0; im_au8RecvBuf, bd_nmea0183_call[i].cmd_name)) + { + DebugLog(0, 'I', "cmd_name[%d]:%s", i, bd_nmea0183_call[i].cmd_name); + if(NULL != bd_nmea0183_call[i].recv_process) + bd_nmea0183_call[i].recv_process(curserial); + break; + } + } + if(i >= irows) + return; +} + +int BD_get_BDRMC_data(SIO_PARAM_SERIAL_DEF *curserial) +{ + const int UTC_TIME = 1; + const int STATUS = 2; + const int LAT = 3; + const int ULAT = 4; + const int LON = 5; + const int ULON = 6; + const int DATE = 9; + double dvalue; + int total_fields=0, ivalue; + char **result = NULL; + char buffer[128]; // 存储格式化时间的缓冲区 + + if(NULL == curserial) + return -1; + /* + 1 $--RMC 字符串 消息 ID,RMC 语句头,’--‘为系统标识 + 2 UTCtime hhmmss.ss 当前定位的 UTC 时间 + 3 status 字符串 位置有效标志。 V=接收机警告,数据无效 A=数据有效 + 4 lat ddmm.mmmmm 纬度,前 2 字符表示度,后面的字符表示分 + 5 uLat 字符 纬度方向:N-北,S-南 + 6 lon dddmm.mmmmm 经度,前 3 字符表示度,后面的字符表示分 + 7 uLon 字符 经度方向:E-东,W-西 + 8 spd 数值 对地速度,单位为节 + 9 cog 数值 对地真航向,单位为度 + 10 date ddmmyy 日期(dd 为日,mm 为月,yy 为年) + 11 mv 数值 磁偏角,单位为度。固定为空 + 12 mvE 字符 磁偏角方向:E-东,W-西。固定为空 + 13 mode 字符 定位模式标志(备注[1]) + 14 navStatus 字符 导航状态标示符(V 表示系统不输出导航状态信息) 仅 NMEA 4.1 及以上版本有效 + 15 CS 16 进制数值 校验和,$和*之间(不包括$和*)所有字符的异或结果 + 16 字符 回车与换行符*/ + + result = BD_NMEA0183_SplitString((char *)curserial->m_au8RecvBuf, &total_fields); + if(NULL == result) + return -1; + + dvalue = ::atof(result[UTC_TIME]); + curserial->bd_data.UTC_time.tm_sec = (int)dvalue%100; + curserial->bd_data.UTC_time.tm_min = (int)dvalue/100%100; + curserial->bd_data.UTC_time.tm_hour = (int)dvalue/10000; + curserial->bd_data.ms_time = (dvalue - ((int)dvalue/1))*1000; + + curserial->bd_data.status = result[STATUS][0]; + dvalue = ::atof(result[LAT]); + curserial->bd_data.lat = ((int)dvalue/100)+(dvalue - ((int)dvalue/100*100))/60; + curserial->bd_data.uLat = result[ULAT][0]; + dvalue = ::atof(result[LON]); + curserial->bd_data.lon = ((int)dvalue/100)+(dvalue - ((int)dvalue/100*100))/60; + curserial->bd_data.uLon = result[ULON][0]; + + ivalue = ::atoi(result[DATE]); + ALOGD("%d", ivalue); + curserial->bd_data.UTC_time.tm_mday = ivalue/10000; + ALOGD("D:%d", curserial->bd_data.UTC_time.tm_mday); + curserial->bd_data.UTC_time.tm_mon = ivalue/100%100-1; + ALOGD("M:%d", curserial->bd_data.UTC_time.tm_mon); + curserial->bd_data.UTC_time.tm_year = ivalue%100+100; + ALOGD("Y:%d", curserial->bd_data.UTC_time.tm_year); + + ::memset(buffer, 0, sizeof(buffer)); + // 使用 strftime 格式化时间 + strftime(buffer, sizeof(buffer), "%Y-%m-%d %H:%M:%S", &curserial->bd_data.UTC_time); + DebugLog(0, 'I', (const char*)buffer); + + DebugLog(0, 'I', "ms:%d lat=%f ulat=%c", curserial->bd_data.ms_time, curserial->bd_data.lat, curserial->bd_data.uLat); + DebugLog(0, 'I', "lon=%f ulon=%c, status=%c", curserial->bd_data.lon, curserial->bd_data.uLon, curserial->bd_data.status); + + // 释放内存 + for (int i = 0; i < total_fields; ++i) free(result[i]); + free(result); + + return 0; +} + +char** BD_NMEA0183_SplitString(char *str, int *total_fields) +{ + int max_fields, field_count = 0; + char **fields = NULL, **new_fields = NULL, **result = NULL; + char *copy = NULL, *p = NULL; + + if(NULL == str) + return NULL; + if(NULL == total_fields) + return NULL; + + // 创建可修改的副本 + copy = strdup(str); + if (!copy) + { + DebugLog(0, 'E', "内存分配失败\n"); + return NULL; + } + + // 初始字段数组大小 + max_fields = MAX_FIELDS_NUM; + fields = (char**)malloc(max_fields * sizeof(char *)); + if (!fields) { + free(copy); + DebugLog(0, 'E', "fields 内存分配失败\n"); + return NULL; + } + + field_count = 0; + fields[field_count] = copy; // 第一个字段起始 + + // 遍历字符串分割字段 + for (p = copy; *p; ++p) + { + if (*p == ',') + { + *p = '\0'; // 结束当前字段 + field_count++; + + // 动态扩展数组 + if (field_count >= max_fields) { + max_fields *= 2; + new_fields = (char**)realloc(fields, max_fields * sizeof(char *)); + if (!new_fields) { + free(fields); + free(copy); + DebugLog(0, 'E', "new_fields 内存分配失败\n"); + return NULL; + } + fields = new_fields; + } + fields[field_count] = p + 1; // 下一字段起始 + } + } + + *total_fields = field_count + 1; // 总字段数 + + // 复制字段到独立内存 + result = (char **)malloc((*total_fields) * sizeof(char *)); + if (!result) + { + free(fields); + free(copy); + DebugLog(0, 'E', "result 内存分配失败\n"); + return NULL; + } + + for (int i = 0; i < *total_fields; ++i) { + result[i] = strdup(fields[i]); + if (!result[i]) { + // 释放已分配内存 + for (int j = 0; j < i; ++j) free(result[j]); + free(result); + free(fields); + free(copy); + DebugLog(0, 'E', "result 字段复制失败\n"); + return NULL; + } + } + + // 输出结果到日志 + for (int i = 0; i < *total_fields; ++i) + { + DebugLog(0, 'I', "字段 %2d: %s\n", i + 1, result[i]); + } + + // 释放内存 + //for (int i = 0; i < total_fields; ++i) free(result[i]); + //free(result); + free(fields); + free(copy); + + return result; +} + /* 串口启动接口函数 结束*/ /* 数据和图片采集数据返回函数 开始*/ int GetWeatherData(Data_DEF *data, int datano) { int i; - char logbuf[512], szbuf[128]; + char szbuf[32]; if (NULL == data) return -1; @@ -3842,8 +3939,7 @@ int GetWeatherData(Data_DEF *data, int datano) sprintf(szbuf, "未知"); break; } - sprintf(logbuf, "data_state=%d, %svalue=%0.3f", data->AiState, szbuf,data->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "data_state=%d, %svalue=%0.3f", data->AiState, szbuf,data->EuValue); if ((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) { weatherpntmsg[datano].AiState = SER_IDLE; @@ -3854,14 +3950,11 @@ int GetWeatherData(Data_DEF *data, int datano) int GetAirTempData(Data_DEF *airt) { - char logbuf[512]; - if (NULL == airt) return -1; airt->EuValue = weatherpntmsg[AirTempNo].EuValue; airt->AiState = weatherpntmsg[AirTempNo].AiState; - sprintf(logbuf, "data_state=%d, 温度value=%0.3f", airt->AiState, airt->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "data_state=%d, 温度value=%0.3f", airt->AiState, airt->EuValue); if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) { weatherpntmsg[AirTempNo].AiState = SER_IDLE; @@ -3872,14 +3965,11 @@ int GetAirTempData(Data_DEF *airt) int GetHumidityData(Data_DEF *airt) { - char logbuf[512]; - if (NULL == airt) return -1; airt->EuValue = weatherpntmsg[HumidityNo].EuValue; airt->AiState = weatherpntmsg[HumidityNo].AiState; - sprintf(logbuf, "data_state=%d, 湿度value=%0.3f", airt->AiState, airt->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "data_state=%d, 湿度value=%0.3f", airt->AiState, airt->EuValue); if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) { weatherpntmsg[HumidityNo].AiState = SER_IDLE; @@ -3890,14 +3980,11 @@ int GetHumidityData(Data_DEF *airt) int GetWindSpeedData(Data_DEF *airt) { - char logbuf[512]; - if (NULL == airt) return -1; airt->EuValue = weatherpntmsg[WindSpeedNo].EuValue; airt->AiState = weatherpntmsg[WindSpeedNo].AiState; - sprintf(logbuf, "data_state=%d, 风速value=%0.3f", airt->AiState, airt->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "data_state=%d, 风速value=%0.3f", airt->AiState, airt->EuValue); if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) { weatherpntmsg[WindSpeedNo].AiState = SER_IDLE; @@ -3908,14 +3995,11 @@ int GetWindSpeedData(Data_DEF *airt) int GetWindDirectionData(Data_DEF *airt) { - char logbuf[512]; - if (NULL == airt) return -1; airt->EuValue = weatherpntmsg[WindDirectionNo].EuValue; airt->AiState = weatherpntmsg[WindDirectionNo].AiState; - sprintf(logbuf, "data_state=%d, 风向value=%0.3f", airt->AiState, airt->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "data_state=%d, 风向value=%0.3f", airt->AiState, airt->EuValue); if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) { weatherpntmsg[WindDirectionNo].AiState = SER_IDLE; @@ -3926,14 +4010,11 @@ int GetWindDirectionData(Data_DEF *airt) int GetRainfallData(Data_DEF *airt) { - char logbuf[512]; - if (NULL == airt) return -1; airt->EuValue = weatherpntmsg[RainfallNo].EuValue; airt->AiState = weatherpntmsg[RainfallNo].AiState; - sprintf(logbuf, "data_state=%d, 雨量value=%0.3f", airt->AiState, airt->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "data_state=%d, 雨量value=%0.3f", airt->AiState, airt->EuValue); if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) { weatherpntmsg[RainfallNo].AiState = SER_IDLE; @@ -3944,14 +4025,11 @@ int GetRainfallData(Data_DEF *airt) int GetAtmosData(Data_DEF *airt) { - char logbuf[512]; - if (NULL == airt) return -1; airt->EuValue = weatherpntmsg[AtmosNo].EuValue; airt->AiState = weatherpntmsg[AtmosNo].AiState; - sprintf(logbuf, "data_state=%d, 大气压value=%0.3f", airt->AiState, airt->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "data_state=%d, 大气压value=%0.3f", airt->AiState, airt->EuValue); if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) { weatherpntmsg[AtmosNo].AiState = SER_IDLE; @@ -3962,14 +4040,11 @@ int GetAtmosData(Data_DEF *airt) int GetOpticalRadiationData(Data_DEF *airt) { - char logbuf[512]; - if (NULL == airt) return -1; airt->EuValue = weatherpntmsg[OpticalRadiationNo].EuValue; airt->AiState = weatherpntmsg[OpticalRadiationNo].AiState; - sprintf(logbuf, "data_state=%d, 日照value=%0.3f", airt->AiState, airt->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "data_state=%d, 日照value=%0.3f", airt->AiState, airt->EuValue); if ((SER_SAMPLEFAIL == airt->AiState) || (SAMPLINGSUCCESS == airt->AiState)) { weatherpntmsg[OpticalRadiationNo].AiState = SER_IDLE; @@ -3980,8 +4055,6 @@ int GetOpticalRadiationData(Data_DEF *airt) int GetPullValue(int devno, Data_DEF *data) { - char logbuf[512]; - if (NULL == data) return -1; if ((0 > devno) || (MAX_SERIAL_DEV_NUM < devno)) @@ -3990,8 +4063,7 @@ int GetPullValue(int devno, Data_DEF *data) return -1; data->EuValue = srdt.ms_dev[devno].aiValue[0].EuValue; data->AiState = srdt.ms_dev[devno].aiValue[0].AiState; - sprintf(logbuf, "装置%d, ID=%d, data_state=%d, 拉力value=%0.3f", devno + 1, devparam[devno].devaddr, data->AiState, data->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "装置%d, ID=%d, data_state=%d, 拉力value=%0.3f", devno + 1, devparam[devno].devaddr, data->AiState, data->EuValue); if ((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) { srdt.ms_dev[devno].aiValue[0].AiState = SER_IDLE; @@ -4002,8 +4074,6 @@ int GetPullValue(int devno, Data_DEF *data) int GetAngleValue(int devno, Data_DEF *data, int Xy) { - char logbuf[512]; - if (NULL == data) return -1; if ((0 > devno) || (MAX_SERIAL_DEV_NUM < devno)) @@ -4015,8 +4085,7 @@ int GetAngleValue(int devno, Data_DEF *data, int Xy) data->EuValue = srdt.ms_dev[devno].aiValue[Xy].EuValue; data->AiState = srdt.ms_dev[devno].aiValue[Xy].AiState; - sprintf(logbuf, "装置%d, ID=%d, data_state=%d, 倾角value[%d]=%0.3f", devno + 1, devparam[devno].devaddr, data->AiState, Xy, data->EuValue); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "装置%d, ID=%d, data_state=%d, 倾角value[%d]=%0.3f", devno + 1, devparam[devno].devaddr, data->AiState, Xy, data->EuValue); if ((SER_SAMPLEFAIL == data->AiState) || (SAMPLINGSUCCESS == data->AiState)) { srdt.ms_dev[devno].aiValue[Xy].AiState = SER_IDLE; @@ -4027,8 +4096,6 @@ int GetAngleValue(int devno, Data_DEF *data, int Xy) int GetImage(int devno, IMAGE_DEF *photo) { - char logbuf[512]; - if (NULL == photo) return -1; photo->presetno = serialport[0].image.presetno; @@ -4038,8 +4105,7 @@ int GetImage(int devno, IMAGE_DEF *photo) photo->imagelen = serialport[0].image.imagelen; photo->state = serialport[0].image.state; - sprintf(logbuf, "装置%d, image_state=%d, 预置点:%d,拍照时间:%d, pic_name:%s", devno + 1, photo->state, photo->presetno, photo->phototime, photo->photoname); - DebugLog(8, logbuf, 'I'); + DebugLog(8, 'I', "装置%d, image_state=%d, 预置点:%d,拍照时间:%d, pic_name:%s", devno + 1, photo->state, photo->presetno, photo->phototime, photo->photoname); if ((SER_SAMPLEFAIL == photo->state) || (PHOTO_SAVE_SUCC == photo->state)) { serialport[0].image.state = SER_IDLE; diff --git a/app/src/main/cpp/SensorsProtocol.h b/app/src/main/cpp/SensorsProtocol.h index 6d8ee84f..f2e745b7 100644 --- a/app/src/main/cpp/SensorsProtocol.h +++ b/app/src/main/cpp/SensorsProtocol.h @@ -27,6 +27,7 @@ #define IOT_PARAM_WRITE 0xAE #define IOT_PARAM_READ 0xAF +#define MAX_FIELDS_NUM 20 /* BD_NMEA0183单组字符串数据内含数据最大数量*/ #define MAX_SERIAL_DEV_NUM 25 /* 最大接串口传感器数量*/ #define MAX_SERIAL_PORT_NUM 5 #define MAX_DEV_VALUE_NUM 12 /* 一台装置最大的采样值数量*/ @@ -39,6 +40,7 @@ #define PELCO_D_PROTOCOL 6 /* 摄像机Pelco_D协议序号*/ #define SERIALCAMERA_PROTOCOL 8 /* 串口摄像机协议序号*/ #define MUTIWEATHER_PROTOCOL 9 /*多合一气象*/ +#define NMEA0183_PROTOCOL 10 /* 单一北斗NMEA0183标准协议*/ #define RESERVE2_PROTOCOL 17 /* 备用2协议序号*/ #define RESERVE4_PROTOCOL 19 /* 备用4协议序号*/ #define RESERVE5_PROTOCOL 20 /* 备用5协议序号*/ @@ -83,9 +85,9 @@ #define P_IRIS_OPEN 0x04000000 /* 光圈放大(1 有效)*/ #define P_FOCUS_NEAR 0x02000000 /* 近距离聚焦(1 有效)*/ #define P_FOCUS_FAR 0x01000000 /* 远距离聚焦(1 有效)*/ -#define P_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/ +#define P_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/ #define P_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/ -#define P_MOVE_DOWN 0x0010001f /* 向下移动镜头(1 有效)*/ +#define P_MOVE_DOWN 0x0010001f /* 向下移动镜头(1 有效)*/ #define P_MOVE_UP 0x0008001f /* 向上移动镜头(1 有效)*/ #define P_MOVE_LEFT 0x00041f00 /* 向左移动镜头(1 有效)*/ #define P_MOVE_RIGHT 0x00021f00 /* 向右移动镜头(1 有效)*/ @@ -95,10 +97,10 @@ #define D_IRIS_CLOSE 0x04000000 /* 光圈缩小(1 有效)*/ #define D_IRIS_OPEN 0x02000000 /* 光圈放大(1 有效)*/ #define D_FOCUS_NEAR 0x01000000 /* 近距离聚焦(1 有效)*/ -#define D_FOCUS_FAR 0x00800000 /* 远距离聚焦(1 有效)*/ -#define D_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/ -#define D_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/ -#define D_MOVE_DOWN 0x0010002d /* 向下移动镜头(1 有效)*/ +#define D_FOCUS_FAR 0x00800000 /* 远距离聚焦(1 有效)*/ +#define D_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/ +#define D_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/ +#define D_MOVE_DOWN 0x0010002d /* 向下移动镜头(1 有效)*/ #define D_MOVE_UP 0x0008002d /* 向上移动镜头(1 有效)*/ #define D_MOVE_LEFT 0x00042d00 /* 向左移动镜头(1 有效)*/ #define D_MOVE_RIGHT 0x00022d00 /* 向右移动镜头(1 有效)*/ @@ -106,10 +108,10 @@ #define D_OPEN_MODULE_POWER 0x0009000C /* 打开机芯电源(1 有效)*/ /* 摄像机下发命令宏定义*/ -#define Take_Photo 0 /* 拍照*/ -#define Stop_Baud 10000 /* 设置球机波特率*/ -#define Stop_Cmd 10005 /* 取消或停止指令*/ -#define Auto_Scan 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ +#define TAKE_PHOTO 20000 /* 拍照*/ +#define SET_BAUD 10000 /* 设置球机波特率*/ +#define STOP_CMD 10005 /* 取消或停止指令*/ +#define AUTO_SCAN 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ #define IRIS_CLOSE 10007 /* 光圈缩小(1 有效)*/ #define IRIS_OPEN 10008 /* 光圈放大(1 有效)*/ #define FOCUS_NEAR 10009 /* 近距离聚焦(1 有效)*/ @@ -124,9 +126,14 @@ #define SAVE_PRESETNO 10018 // 设置预置点 #define OPEN_TOTAL 10019 /* 打开总电源(1 有效)*/ #define OPEN_MODULE_POWER 10020 /* 打开机芯电源(1 有效)*/ +#define NOTIFY_PTZ_CLOSE 10021 // 通知云台关闭 +#define QUERY_PTZ_STATE 10022 // 查询云台状态 +#define CLOSE_TOTAL 10040 /* 关闭总电源*/ #define SPEED_DOME_CAMERA 0 /* 球机摄像机*/ #define SERIAL_CAMERA 2 /* 串口摄像机a*/ +#define START_ONCE_SELF 200 /* 一次性自检需要的调用的预置点200*/ + #define COLLECT_DATA 0 /* 调试使用*/ #define HexCharToInt( c ) (((c) >= '0') && ((c) <= '9') ? (c) - '0' : ((c) >= 'a') && ((c) <= 'f') ? (c) - 'a' + 10 :((c) >= 'A') && ((c) <= 'F') ? (c) - 'A' + 10 : 0 ) @@ -214,6 +221,48 @@ typedef struct uint8_t Phase; /* 传感器所安装相别,指拉力和倾角11表示A1....*/ } SERIAL_PARAM; +// 云台状态数据 +typedef struct +{ + uint8_t ptz_process; /* 云台所处过程(1:自检状态;2:调用预置点;3:一般状态;)*/ + uint8_t ptz_status; /* 云台当前状态值(0:停止;1:运动;2:机芯未上电;其他:其他错误*/ + int presetno; /* 云台所处预置点值*/ + float x_coordinate; /* 云台所处位置水平方向坐标*/ + float y_coordinate; /* 云台所处位置垂直方向坐标*/ +} PTZ_STATE; +/* +$--RMC 字符串 消息 ID,RMC 语句头,’--‘为系统标识 +2 UTCtime hhmmss.ss 当前定位的 UTC 时间 +3 status 字符串 位置有效标志。 +V=接收机警告,数据无效 + A=数据有效 +4 lat ddmm.mmmmm 纬度,前 2 字符表示度,后面的字符表示分 +5 uLat 字符 纬度方向:N-北,S-南 +6 lon dddmm.mmmm + m +经度,前 3 字符表示度,后面的字符表示分 +7 uLon 字符 经度方向:E-东,W-西 +8 spd 数值 对地速度,单位为节 +9 cog 数值 对地真航向,单位为度 +10 date ddmmyy 日期(dd 为日,mm 为月,yy 为年) +11 mv 数值 磁偏角,单位为度。固定为空 +12 mvE 字符 磁偏角方向:E-东,W-西。固定为空 +13 mode 字符 定位模式标志(备注[1]) +14 navStatus 字符 导航状态标示符(V 表示系统不输出导航状态信息) +仅 NMEA 4.1 及以上版本有效 +15 CS 16 进制数值 校验和,$和*之间(不包括$和*)所有字符的异或结果*/ +// 北斗卫星数据 +typedef struct +{ + struct tm UTC_time; /* UTC时间*/ + int ms_time; /* 毫秒*/ + double lat; /* 纬度,原值(前 2 字符表示度,后面的字符表示分)转换后为° */ + char uLat; /* 纬度方向:N-北,S-南*/ + double lon; /* 经度,原值(前 3 字符表示度,后面的字符表示分)转换后为°*/ + char uLon; /* 经度'E'-东,'W'-西*/ + char status; /* 'A'=数据有效 其他字符表示数据无效*/ +} BD_GNSS_DATA; + typedef struct { int m_iRevStatus; /* */ @@ -235,16 +284,27 @@ typedef struct uint16_t ForceWaitCnt; /* 强制等待计数*/ uint8_t ReSendCmdFlag; /* 重发命令标志 */ uint8_t SendCmdFlag; /* 命令发送标志 */ - uint8_t RevCmdFlag; /* 命令正常接收标志*/ + uint8_t RevCmdFlag; /* 命令正常接收标志*/ //********************************************************** - int64_t lsendtime; /* 命令发送绝对时间计时(毫秒)*/ - int cameraaddr; /* 摄像机地址*/ + int64_t lsendtime; /* 命令发送绝对时间计时(毫秒)*/ + int cameraaddr; /* 摄像机地址*/ int SerialCmdidx; /* 正在使用的串口发送命令的命令序号(-1:表示没有命令发送) 摄像机使用命令序号存储*/ - PHOTO_DEF image; /* 临时存储图片数据*/ + PHOTO_DEF image; /* 临时存储图片数据*/ int64_t FirstCmdTimeCnt; /* 串口读取数据起始时间*/ + PTZ_STATE ptz_state; + int sendptzstatecmd; // 查询命令次数控制 + BD_GNSS_DATA bd_data; + } SIO_PARAM_SERIAL_DEF; +typedef const struct +{ + //char *account; // 命令说明 + char *cmd_name; // 命令名称 + int (*recv_process)(SIO_PARAM_SERIAL_DEF *); /* urc数据处理*/ +}BD_NMEA0183_PROC_FUNC; + //串口相关装置所有参数集中定义 typedef struct { @@ -332,11 +392,11 @@ void Gm_OpenSerialPort(int devidx); // 关闭串口通讯 void Gm_CloseSerialPort(); -void DebugLog(int commid, char *szbuf, char flag); -int SaveLogTofile(int commid, char *szbuf); +void DBG_LOG(int commid, char flag, const char* format, ...); +int SaveLogTofile(int commid, const char *szbuf); // 功能说明:串口发送数据 返回实际发送的字节数 int GM_SerialComSend(const unsigned char * cSendBuf, size_t nSendLen, int commid); -void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, char *filedir,const char *log); +void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, const char *filedir,const char *log); // 启动串口通讯 void GM_StartSerialComm(); // 启动使用串口拍照 @@ -472,6 +532,26 @@ int GM_IsCloseCamera(SIO_PARAM_SERIAL_DEF *pPortParam); int GM_CameraSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam); +int QueryPtzState(PTZ_STATE *ptz_state, int cmdidx, const char *serfile, unsigned int baud, int addr); + +void MakePtzStateQueryCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx); + +int Query_BDGNSS_Data(BD_GNSS_DATA *BD_data, int samptime, const char *serfile, unsigned int baud); + +int GM_BdSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam); + +void GM_BdSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam); + +void BdRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, u_char *buf, int len); + +unsigned char BDXorCheck(unsigned char *msg, int len); + +void BD_NMEA0183_PortDataProcess(SIO_PARAM_SERIAL_DEF *curserial); + +char** BD_NMEA0183_SplitString(char *str, int *total_fields); + +int BD_get_BDRMC_data(SIO_PARAM_SERIAL_DEF *curserial); + #endif // __SENSOR_PROTOCOL_H__ diff --git a/app/src/main/cpp/camera2/mtk_metadata_tag.h b/app/src/main/cpp/camera2/mtk_metadata_tag.h new file mode 100644 index 00000000..160430c5 --- /dev/null +++ b/app/src/main/cpp/camera2/mtk_metadata_tag.h @@ -0,0 +1,1741 @@ +/* Copyright Statement: + * + * This software/firmware and related documentation ("MediaTek Software") are + * protected under relevant copyright laws. The information contained herein is + * confidential and proprietary to MediaTek Inc. and/or its licensors. Without + * the prior written permission of MediaTek inc. and/or its licensors, any + * reproduction, modification, use or disclosure of MediaTek Software, and + * information contained herein, in whole or in part, shall be strictly + * prohibited. + * + * MediaTek Inc. (C) 2010. All rights reserved. + * + * BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES + * THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE") + * RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER + * ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL + * WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED + * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR + * NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH + * RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY, + * INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES + * TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO. + * RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO + * OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK + * SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE + * RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR + * STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S + * ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE + * RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE + * MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE + * CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE. + * + * The following software/firmware and/or related documentation ("MediaTek + * Software") have been modified by MediaTek Inc. All revisions are subject to + * any receiver's applicable license agreements with MediaTek Inc. + */ + + +#ifndef _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_CLIENT_MTK_METADATA_TAG_H_ +#define _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_CLIENT_MTK_METADATA_TAG_H_ + +typedef enum mtk_camera_metadata_section { + MTK_COLOR_CORRECTION, + MTK_CONTROL, + MTK_DEMOSAIC, + MTK_EDGE, + MTK_FLASH, + MTK_FLASH_INFO, + MTK_HOT_PIXEL, + MTK_JPEG, + MTK_LENS, + MTK_LENS_INFO, + MTK_NOISE_REDUCTION, + MTK_QUIRKS, + MTK_REQUEST, + MTK_SCALER, + MTK_SENSOR, + MTK_SENSOR_INFO, + MTK_SHADING, + MTK_STATISTICS, + MTK_STATISTICS_INFO, + MTK_TONEMAP, + MTK_LED, + MTK_INFO, + MTK_BLACK_LEVEL, + MTK_SYNC, + MTK_REPROCESS, + MTK_DEPTH, + MTK_LOGICAL_MULTI_CAMERA, + MTK_DISTORTION_CORRECTION, + MTK_HEIC, + MTK_HEIC_INFO, + MTK_HAL_INFO, + MTK_IOPIPE_INFO, + MTK_SECTION_COUNT, + + //vendor tag + MTK_VENDOR_TAG_SECTION = 0x8000, + MTK_FACE_FEATURE = 0, + MTK_NR_FEATURE = 1, + MTK_STEREO_FEATURE = 2, + MTK_HDR_FEATURE = 3, + MTK_MFNR_FEATURE = 4, + MTK_CSHOT_FEATURE = 5, + MTK_3A_FEATURE = 6, + MTK_EIS_FEATURE = 7, + MTK_STREAMING_FEATURE = 8, + MTK_VSDOF_FEATURE = 9, + MTK_MULTI_CAM_FEATURE = 10, + MTK_CONTROL_CAPTURE = 11, + MTK_BGSERVICE_FEATURE = 12, + MTK_CONFIGURE_SETTING = 13, + MTK_FLASH_FEATURE = 14, + MTK_SMVR_FEATURE = 15, + MTK_SINGLEHW_SETTING = 16, + MTK_ABF_FEATURE = 17, + MTK_VENDOR_SECTION_COUNT, +} mtk_camera_metadata_section_t; + +/** + * Hierarchy positions in enum space. All vendor extension tags must be + * defined with tag >= VENDOR_SECTION_START + */ + +typedef enum mtk_camera_metadata_section_start { + MTK_COLOR_CORRECTION_START = MTK_COLOR_CORRECTION << 16, + MTK_CONTROL_START = MTK_CONTROL << 16, + MTK_DEMOSAIC_START = MTK_DEMOSAIC << 16, + MTK_EDGE_START = MTK_EDGE << 16, + MTK_FLASH_START = MTK_FLASH << 16, + MTK_FLASH_INFO_START = MTK_FLASH_INFO << 16, + MTK_HOT_PIXEL_START = MTK_HOT_PIXEL << 16, + MTK_JPEG_START = MTK_JPEG << 16, + MTK_LENS_START = MTK_LENS << 16, + MTK_LENS_INFO_START = MTK_LENS_INFO << 16, + MTK_NOISE_REDUCTION_START = MTK_NOISE_REDUCTION << 16, + MTK_QUIRKS_START = MTK_QUIRKS << 16, + MTK_REQUEST_START = MTK_REQUEST << 16, + MTK_SCALER_START = MTK_SCALER << 16, + MTK_SENSOR_START = MTK_SENSOR << 16, + MTK_SENSOR_INFO_START = MTK_SENSOR_INFO << 16, + MTK_SHADING_START = MTK_SHADING << 16, + MTK_STATISTICS_START = MTK_STATISTICS << 16, + MTK_STATISTICS_INFO_START = MTK_STATISTICS_INFO << 16, + MTK_TONEMAP_START = MTK_TONEMAP << 16, + MTK_LED_START = MTK_LED << 16, + MTK_INFO_START = MTK_INFO << 16, + MTK_BLACK_LEVEL_START = MTK_BLACK_LEVEL << 16, + MTK_SYNC_START = MTK_SYNC << 16, + MTK_REPROCESS_START = MTK_REPROCESS << 16, + MTK_DEPTH_START = MTK_DEPTH << 16, + MTK_LOGICAL_MULTI_CAMERA_START = MTK_LOGICAL_MULTI_CAMERA << 16, + MTK_DISTORTION_CORRECTION_START = MTK_DISTORTION_CORRECTION << 16, + MTK_HEIC_START = MTK_HEIC << 16, + MTK_HEIC_INFO_START = MTK_HEIC_INFO << 16, + MTK_HAL_INFO_START = MTK_HAL_INFO << 16, + MTK_IOPIPE_INFO_START = MTK_IOPIPE_INFO << 16, + + MTK_VENDOR_TAG_SECTION_START = MTK_VENDOR_TAG_SECTION << 16, + MTK_FACE_FEATURE_START = (MTK_FACE_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_NR_FEATURE_START = (MTK_NR_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_STEREO_FEATURE_START = (MTK_STEREO_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_HDR_FEATURE_START = (MTK_HDR_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_MFNR_FEATURE_START = (MTK_MFNR_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_CSHOT_FEATURE_START = (MTK_CSHOT_FEATURE+ MTK_VENDOR_TAG_SECTION) << 16, + MTK_3A_FEATURE_START = (MTK_3A_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_EIS_FEATURE_START = (MTK_EIS_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_STREAMING_FEATURE_START = (MTK_STREAMING_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_VSDOF_FEATURE_START = (MTK_VSDOF_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_MULTI_CAM_FEATURE_START = (MTK_MULTI_CAM_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_CONTROL_CAPTURE_START = (MTK_CONTROL_CAPTURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_BGSERVICE_FEATURE_START = (MTK_BGSERVICE_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_CONFIGURE_SETTING_START = (MTK_CONFIGURE_SETTING + MTK_VENDOR_TAG_SECTION) << 16, + MTK_FLASH_FEATURE_START = (MTK_FLASH_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_SMVR_FEATURE_START = (MTK_SMVR_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, + MTK_SINGLEHW_SETTING_START = (MTK_SINGLEHW_SETTING + MTK_VENDOR_TAG_SECTION) << 16, + MTK_ABF_FEATURE_START = (MTK_ABF_FEATURE + MTK_VENDOR_TAG_SECTION) << 16, +} mtk_camera_metadata_section_start_t; + + + +/** + * Main enum for defining camera metadata tags. New entries must always go + * before the section _END tag to preserve existing enumeration values. In + * addition, the name and type of the tag needs to be added to + * "" + */ +typedef enum mtk_camera_metadata_tag { + MTK_COLOR_CORRECTION_MODE = MTK_COLOR_CORRECTION_START, + MTK_COLOR_CORRECTION_TRANSFORM, + MTK_COLOR_CORRECTION_GAINS, + MTK_COLOR_CORRECTION_ABERRATION_MODE, + MTK_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, + MTK_COLOR_CORRECTION_END, + + MTK_CONTROL_AE_ANTIBANDING_MODE = MTK_CONTROL_START, + MTK_CONTROL_AE_EXPOSURE_COMPENSATION, + MTK_CONTROL_AE_LOCK, + MTK_CONTROL_AE_MODE, + MTK_CONTROL_AE_REGIONS, + MTK_CONTROL_AE_TARGET_FPS_RANGE, + MTK_CONTROL_AE_PRECAPTURE_TRIGGER, + MTK_CONTROL_AF_MODE, + MTK_CONTROL_AF_REGIONS, + MTK_CONTROL_AF_TRIGGER, + MTK_CONTROL_AWB_LOCK, + MTK_CONTROL_AWB_MODE, + MTK_CONTROL_AWB_REGIONS, + MTK_CONTROL_CAPTURE_INTENT, + MTK_CONTROL_EFFECT_MODE, + MTK_CONTROL_MODE, + MTK_CONTROL_SCENE_MODE, + MTK_CONTROL_VIDEO_STABILIZATION_MODE, + MTK_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + MTK_CONTROL_AE_AVAILABLE_MODES, + MTK_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, + MTK_CONTROL_AE_COMPENSATION_RANGE, + MTK_CONTROL_AE_COMPENSATION_STEP, + MTK_CONTROL_AF_AVAILABLE_MODES, + MTK_CONTROL_AVAILABLE_EFFECTS, + MTK_CONTROL_AVAILABLE_SCENE_MODES, + MTK_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + MTK_CONTROL_AWB_AVAILABLE_MODES, + MTK_CONTROL_MAX_REGIONS, + MTK_CONTROL_SCENE_MODE_OVERRIDES, + MTK_CONTROL_AE_PRECAPTURE_ID, + MTK_CONTROL_AE_STATE, + MTK_CONTROL_AF_STATE, + MTK_CONTROL_AF_TRIGGER_ID, + MTK_CONTROL_AWB_STATE, + MTK_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, + MTK_CONTROL_AE_LOCK_AVAILABLE, + MTK_CONTROL_AWB_LOCK_AVAILABLE, + MTK_CONTROL_AVAILABLE_MODES, + MTK_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE, + MTK_CONTROL_POST_RAW_SENSITIVITY_BOOST, + MTK_CONTROL_ENABLE_ZSL, + MTK_CONTROL_AF_SCENE_CHANGE, + MTK_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES, + MTK_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES, + MTK_CONTROL_EXTENDED_SCENE_MODE, + MTK_CONTROL_ZOOM_RATIO_RANGE, + MTK_CONTROL_ZOOM_RATIO, + // Camera1 + MTK_CONTROL_ISP_EDGE, + MTK_CONTROL_ISP_HUE, + MTK_CONTROL_ISP_SATURATION, + MTK_CONTROL_ISP_BRIGHTNESS, + MTK_CONTROL_ISP_CONTRAST, + MTK_CONTROL_END, + + MTK_DEMOSAIC_MODE = MTK_DEMOSAIC_START, + MTK_DEMOSAIC_END, + + MTK_EDGE_MODE = MTK_EDGE_START, + MTK_EDGE_STRENGTH, + MTK_EDGE_AVAILABLE_EDGE_MODES, + MTK_EDGE_END, + + MTK_FLASH_FIRING_POWER = MTK_FLASH_START, + MTK_FLASH_FIRING_TIME, + MTK_FLASH_MODE, + MTK_FLASH_COLOR_TEMPERATURE, + MTK_FLASH_MAX_ENERGY, + MTK_FLASH_STATE, + MTK_FLASH_END, + + MTK_FLASH_INFO_AVAILABLE = MTK_FLASH_INFO_START, + MTK_FLASH_INFO_CHARGE_DURATION, + MTK_FLASH_INFO_END, + + MTK_HOT_PIXEL_MODE = MTK_HOT_PIXEL_START, + MTK_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, + MTK_HOT_PIXEL_END, + + MTK_JPEG_GPS_COORDINATES = MTK_JPEG_START, + MTK_JPEG_GPS_PROCESSING_METHOD, + MTK_JPEG_GPS_TIMESTAMP, + MTK_JPEG_ORIENTATION, + MTK_JPEG_QUALITY, + MTK_JPEG_THUMBNAIL_QUALITY, + MTK_JPEG_THUMBNAIL_SIZE, + MTK_JPEG_AVAILABLE_THUMBNAIL_SIZES, + MTK_JPEG_MAX_SIZE, + MTK_JPEG_SIZE, + MTK_JPEG_END, + + MTK_LENS_APERTURE = MTK_LENS_START, + MTK_LENS_FILTER_DENSITY, + MTK_LENS_FOCAL_LENGTH, + MTK_LENS_FOCUS_DISTANCE, + MTK_LENS_OPTICAL_STABILIZATION_MODE, + MTK_LENS_FACING, + MTK_SENSOR_INFO_FACING = MTK_LENS_FACING, + MTK_LENS_POSE_ROTATION, + MTK_LENS_POSE_TRANSLATION, + MTK_LENS_FOCUS_RANGE, + MTK_LENS_STATE, + MTK_LENS_INTRINSIC_CALIBRATION, + MTK_LENS_RADIAL_DISTORTION, + MTK_LENS_POSE_REFERENCE, + MTK_LENS_DISTORTION, + MTK_LENS_END, + + MTK_LENS_INFO_AVAILABLE_APERTURES = MTK_LENS_INFO_START, + MTK_LENS_INFO_AVAILABLE_FILTER_DENSITIES, + MTK_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, + MTK_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + MTK_LENS_INFO_HYPERFOCAL_DISTANCE, + MTK_LENS_INFO_MINIMUM_FOCUS_DISTANCE, + MTK_LENS_INFO_SHADING_MAP_SIZE, + MTK_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, + MTK_LENS_INFO_END, + + MTK_NOISE_REDUCTION_MODE = MTK_NOISE_REDUCTION_START, + MTK_NOISE_REDUCTION_STRENGTH, + MTK_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, + MTK_NOISE_REDUCTION_END, + + MTK_QUIRKS_METERING_CROP_REGION = MTK_QUIRKS_START, + MTK_QUIRKS_TRIGGER_AF_WITH_AUTO, + MTK_QUIRKS_USE_ZSL_FORMAT, + MTK_QUIRKS_USE_PARTIAL_RESULT, + MTK_QUIRKS_PARTIAL_RESULT, + MTK_QUIRKS_END, + + MTK_REQUEST_FRAME_COUNT = MTK_REQUEST_START, + MTK_REQUEST_ID, + MTK_REQUEST_INPUT_STREAMS, + MTK_REQUEST_METADATA_MODE, + MTK_REQUEST_OUTPUT_STREAMS, + MTK_REQUEST_TYPE, + MTK_REQUEST_MAX_NUM_OUTPUT_STREAMS, + MTK_REQUEST_MAX_NUM_REPROCESS_STREAMS, + MTK_REQUEST_MAX_NUM_INPUT_STREAMS, + MTK_REQUEST_PIPELINE_DEPTH, + MTK_REQUEST_PIPELINE_MAX_DEPTH, + MTK_REQUEST_PARTIAL_RESULT_COUNT, + MTK_REQUEST_AVAILABLE_CAPABILITIES, + MTK_REQUEST_AVAILABLE_REQUEST_KEYS, + MTK_REQUEST_AVAILABLE_RESULT_KEYS, + MTK_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, + MTK_REQUEST_AVAILABLE_SESSION_KEYS, // int32[] | ndk_public | HIDL v3.3 + MTK_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS, + // int32[] | hidden | HIDL v3.3 + MTK_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION, + // int32[] | hidden | HIDL v3.4 + MTK_REQUEST_END, + + MTK_SCALER_CROP_APP_REGION = MTK_SCALER_START, + MTK_SCALER_AVAILABLE_FORMATS, + MTK_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, + MTK_SCALER_AVAILABLE_JPEG_SIZES, + MTK_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + MTK_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, + MTK_SCALER_AVAILABLE_PROCESSED_SIZES, + MTK_SCALER_AVAILABLE_RAW_MIN_DURATIONS, + MTK_SCALER_AVAILABLE_RAW_SIZES, + MTK_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, + MTK_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + MTK_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, + MTK_SCALER_AVAILABLE_STALL_DURATIONS, + MTK_SCALER_CROPPING_TYPE, + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS, + // enum[] | ndk_public | HIDL v3.4 + MTK_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP, + // int32 | ndk_public | HIDL v3.4 + MTK_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES, // byte[] | hidden | HIDL v3.5 + MTK_SCALER_ROTATE_AND_CROP, // enum | hidden | HIDL v3.5 + MTK_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_WITH_DURATIONS, // int64[] | mtk extension + MTK_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_WITH_DURATIONS_CANDIDATES, // int64[] | mtk extension + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_CANDIDATES, + /* + MTK_SCALER_CROP_REGION is neither app tag nor vendor tag now because ANDROID_SCALER_CROP_REGION may + have another meaning. + + Before Android R, MTK_SCALER_CROP_REGION is the desired region of the sensor to read out, so Left + and Top can be negative to represent ultra wide FOV if active array is wide sensor. + + In Android R, the application can choose to use CONTROL_ZOOM_RATIO to specify the desired zoom level, + and MTK_SCALER_CROP_REGION is used to achieve aspect ratios. + + HAL implementation still use previous version. We create MTK_SCALER_CROP_APP_REGION to correspond + ANDROID_SCALER_CROP_REGION, and convert to MTK_SCALER_CROP_REGION for backward compatible. + */ + MTK_SCALER_CROP_REGION, + MTK_SCALER_END, + + MTK_IOPIPE_INFO_AVAILABLE_IN_PORT_INFO + = MTK_IOPIPE_INFO_START, + MTK_IOPIPE_INFO_AVAILABLE_OUT_PORT_INFO, + MTK_IOPIPE_INFO_PORT_ID, + MTK_IOPIPE_INFO_TRANSFORM, + MTK_IOPIPE_INFO_CROP, + MTK_IOPIPE_INFO_SCALE_DOWN_RATIO, + MTK_IOPIPE_INFO_SCALE_UP_RATIO, + MTK_IOPIPE_INFO_LINEBUFFER, + MTK_IOPIPE_INFO_AVAILABLE_FORMATS, + MTK_IOPIPE_INFO_END, + + MTK_SENSOR_EXPOSURE_TIME = MTK_SENSOR_START, + MTK_SENSOR_FRAME_DURATION, + MTK_SENSOR_SENSITIVITY, + MTK_SENSOR_REFERENCE_ILLUMINANT1, + MTK_SENSOR_REFERENCE_ILLUMINANT2, + MTK_SENSOR_CALIBRATION_TRANSFORM1, + MTK_SENSOR_CALIBRATION_TRANSFORM2, + MTK_SENSOR_COLOR_TRANSFORM1, + MTK_SENSOR_COLOR_TRANSFORM2, + MTK_SENSOR_FORWARD_MATRIX1, + MTK_SENSOR_FORWARD_MATRIX2, + MTK_SENSOR_BASE_GAIN_FACTOR, + MTK_SENSOR_BLACK_LEVEL_PATTERN, + MTK_SENSOR_MAX_ANALOG_SENSITIVITY, + MTK_SENSOR_ORIENTATION, + MTK_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS, + MTK_SENSOR_TIMESTAMP, + MTK_SENSOR_TEMPERATURE, + MTK_SENSOR_NEUTRAL_COLOR_POINT, + MTK_SENSOR_NOISE_PROFILE, + MTK_SENSOR_PROFILE_HUE_SAT_MAP, + MTK_SENSOR_PROFILE_TONE_CURVE, + MTK_SENSOR_GREEN_SPLIT, + MTK_SENSOR_TEST_PATTERN_DATA, + MTK_SENSOR_TEST_PATTERN_MODE, + MTK_SENSOR_AVAILABLE_TEST_PATTERN_MODES, + MTK_SENSOR_ROLLING_SHUTTER_SKEW, + MTK_SENSOR_OPTICAL_BLACK_REGIONS, + MTK_SENSOR_DYNAMIC_BLACK_LEVEL, + MTK_SENSOR_DYNAMIC_WHITE_LEVEL, + MTK_SENSOR_OPAQUE_RAW_SIZE, + MTK_SENSOR_END, + + MTK_SENSOR_INFO_ACTIVE_ARRAY_REGION = MTK_SENSOR_INFO_START, + MTK_SENSOR_INFO_SENSITIVITY_RANGE, + MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, + MTK_SENSOR_INFO_EXPOSURE_TIME_RANGE, + MTK_SENSOR_INFO_MAX_FRAME_DURATION, + MTK_SENSOR_INFO_PHYSICAL_SIZE, + MTK_SENSOR_INFO_PIXEL_ARRAY_SIZE, + MTK_SENSOR_INFO_WHITE_LEVEL, + MTK_SENSOR_INFO_TIMESTAMP_SOURCE, + MTK_SENSOR_INFO_LENS_SHADING_APPLIED, + MTK_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, + MTK_SENSOR_INFO_ORIENTATION, + MTK_SENSOR_INFO_PACKAGE, + MTK_SENSOR_INFO_DEV, + MTK_SENSOR_INFO_SCENARIO_ID, + MTK_SENSOR_INFO_FRAME_RATE, + MTK_SENSOR_INFO_REAL_OUTPUT_SIZE, + MTK_SENSOR_INFO_OUTPUT_REGION_ON_ACTIVE_ARRAY, + MTK_SENSOR_INFO_WANTED_ORIENTATION, + MTK_SENSOR_INFO_END, + + MTK_SHADING_MODE = MTK_SHADING_START, + MTK_SHADING_STRENGTH, + MTK_SHADING_AVAILABLE_MODES, + MTK_SHADING_END, + + MTK_STATISTICS_FACE_DETECT_MODE = MTK_STATISTICS_START, + MTK_STATISTICS_HISTOGRAM_MODE, + MTK_STATISTICS_SHARPNESS_MAP_MODE, + MTK_STATISTICS_HOT_PIXEL_MAP_MODE, + MTK_STATISTICS_FACE_IDS, + MTK_STATISTICS_FACE_LANDMARKS, + MTK_STATISTICS_FACE_RECTANGLES, + MTK_STATISTICS_FACE_SCORES, + MTK_STATISTICS_HISTOGRAM, + MTK_STATISTICS_SHARPNESS_MAP, + MTK_STATISTICS_LENS_SHADING_CORRECTION_MAP, + MTK_STATISTICS_LENS_SHADING_MAP, + MTK_STATISTICS_PREDICTED_COLOR_GAINS, + MTK_STATISTICS_PREDICTED_COLOR_TRANSFORM, + MTK_STATISTICS_SCENE_FLICKER, + MTK_STATISTICS_HOT_PIXEL_MAP, + MTK_STATISTICS_LENS_SHADING_MAP_MODE, + MTK_STATISTICS_OIS_DATA_MODE, + MTK_STATISTICS_OIS_TIMESTAMPS, + MTK_STATISTICS_OIS_X_SHIFTS, + MTK_STATISTICS_OIS_Y_SHIFTS, + MTK_STATISTICS_OBJECT_TRACKING_ENABLE, + MTK_STATISTICS_OBJECT_TRACKING_INIT, + MTK_STATISTICS_OBJECT_TRACKING_SCORE, + MTK_STATISTICS_OBJECT_TRACKING_RECT, + MTK_STATISTICS_END, + + MTK_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES + = MTK_STATISTICS_INFO_START, + MTK_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, + MTK_STATISTICS_INFO_MAX_FACE_COUNT, + MTK_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, + MTK_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, + MTK_STATISTICS_INFO_SHARPNESS_MAP_SIZE, + MTK_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + MTK_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, + MTK_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES, + MTK_STATISTICS_INFO_END, + + MTK_TONEMAP_CURVE_BLUE = MTK_TONEMAP_START, + MTK_TONEMAP_CURVE_GREEN, + MTK_TONEMAP_CURVE_RED, + MTK_TONEMAP_MODE, + MTK_TONEMAP_MAX_CURVE_POINTS, + MTK_TONEMAP_AVAILABLE_TONE_MAP_MODES, + MTK_TONEMAP_GAMMA, + MTK_TONEMAP_PRESET_CURVE, + MTK_TONEMAP_END, + + MTK_LED_TRANSMIT = MTK_LED_START, + MTK_LED_AVAILABLE_LEDS, + MTK_LED_END, + + MTK_INFO_SUPPORTED_HARDWARE_LEVEL = MTK_INFO_START, + MTK_INFO_VERSION, + MTK_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, + MTK_INFO_END, + + MTK_BLACK_LEVEL_LOCK = MTK_BLACK_LEVEL_START, + MTK_BLACK_LEVEL_END, + + MTK_SYNC_FRAME_NUMBER = MTK_SYNC_START, + MTK_SYNC_MAX_LATENCY, + MTK_SYNC_END, + + MTK_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR + = MTK_REPROCESS_START, + MTK_REPROCESS_MAX_CAPTURE_STALL, + MTK_REPROCESS_END, + + MTK_DEPTH_MAX_DEPTH_SAMPLES = MTK_DEPTH_START, + MTK_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, + MTK_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, + MTK_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, + MTK_DEPTH_DEPTH_IS_EXCLUSIVE, + MTK_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS, + MTK_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, + MTK_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, + MTK_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, + MTK_DEPTH_END, + + MTK_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS = MTK_LOGICAL_MULTI_CAMERA_START, + MTK_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE, + MTK_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, + MTK_LOGICAL_MULTI_CAMERA_END, + + MTK_DISTORTION_CORRECTION_MODE = // enum | public | HIDL v3.3 + MTK_DISTORTION_CORRECTION_START, + MTK_DISTORTION_CORRECTION_AVAILABLE_MODES, // byte[] | public | HIDL v3.3 + MTK_DISTORTION_CORRECTION_END, + + MTK_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS = + // enum[] | ndk_public | HIDL v3.4 + MTK_HEIC_START, + MTK_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, // int64[] | ndk_public | HIDL v3.4 + MTK_HEIC_AVAILABLE_HEIC_STALL_DURATIONS, // int64[] | ndk_public | HIDL v3.4 + MTK_HEIC_END, + + MTK_HEIC_INFO_SUPPORTED = // enum | system | HIDL v3.4 + MTK_HEIC_INFO_START, + MTK_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT,// byte | system | HIDL v3.4 + MTK_HEIC_INFO_END, + + MTK_FACE_FEATURE_GESTURE_MODE = MTK_FACE_FEATURE_START, + MTK_FACE_FEATURE_GESTURE_RESULT, + MTK_FACE_FEATURE_SMILE_DETECT_MODE, + MTK_FACE_FEATURE_SMILE_DETECT_RESULT, + MTK_FACE_FEATURE_ASD_MODE, + MTK_FACE_FEATURE_ASD_RESULT, + MTK_FACE_FEATURE_AVAILABLE_GESTURE_MODES, + MTK_FACE_FEATURE_AVAILABLE_SMILE_DETECT_MODES, + MTK_FACE_FEATURE_AVAILABLE_ASD_MODES, + MTK_FACE_FEATURE_FORCE_FACE_3A, + MTK_FACE_FEATURE_AVAILABLE_FORCE_FACE_3A, + MTK_FACE_FEATURE_FACE_ADDITIONAL_RESULT, + MTK_FACE_FEATURE_POSE_ORIENTATIONS, + MTK_FACE_FEATURE_END, + + MTK_NR_FEATURE_3DNR_MODE = MTK_NR_FEATURE_START, + MTK_NR_FEATURE_AVAILABLE_3DNR_MODES, + MTK_NR_FEATURE_END, + + MTK_HDR_FEATURE_HDR_MODE = MTK_HDR_FEATURE_START, // Control Meta + MTK_HDR_FEATURE_SESSION_PARAM_HDR_MODE, //control meta for config usage + MTK_HDR_FEATURE_HDR_DETECTION_RESULT, // Dynamic Meta, reply hdr detector result + MTK_HDR_FEATURE_AVAILABLE_HDR_MODES_PHOTO, // Static Meta, for App query use, runtime decided in MetadataProvider + MTK_HDR_FEATURE_AVAILABLE_HDR_MODES_VIDEO, // Static Meta, for App query use, runtime decided in MetadataProvider + MTK_HDR_FEATURE_AVAILABLE_VHDR_MODES, // Static Meta, for middleware query use + MTK_HDR_FEATURE_AVAILABLE_MSTREAM_HDR_MODES,// Static Meta, for App query use + MTK_HDR_FEATURE_AVAILABLE_SINGLE_FRAME_HDR, // Static Meta, for middleware query use + MTK_HDR_FEATURE_AVAILABLE_STAGGER_HDR_MODES,// Static Meta, for App query use + MTK_HDR_FEATURE_VHDR_RESULT, // Dynamic Meta, reply vhdr result + MTK_HDR_FEATURE_END, + + // Dynamic Meta, middleware use, for override timestamp need or not use + // [0] : need override or not + // [1] : overrided or not. ( If overrided, can send shutter time to Framework) + MTK_EIS_FEATURE_EIS_MODE = MTK_EIS_FEATURE_START, + MTK_EIS_FEATURE_ISNEED_OVERRIDE_TIMESTAMP, + MTK_EIS_FEATURE_NEW_SHUTTER_TIMESTAMP, // Dynamic Meta, middleware use, override timestamp for record + MTK_EIS_FEATURE_PREVIEW_EIS, + MTK_EIS_FEATURE_END, + + MTK_STREAMING_FEATURE_RECORD_STATE = MTK_STREAMING_FEATURE_START, // Control Meta, app use, tell recording or stopping + MTK_STREAMING_FEATURE_AVAILABLE_RECORD_STATES, // Static Meta, for app query use + MTK_STREAMING_FEATURE_HFPS_MODE, + MTK_STREAMING_FEATURE_AVAILABLE_HFPS_MODES, // Static Meta, for app query use + MTK_STREAMING_FEATURE_AVAILABLE_HFPS_MAX_RESOLUTIONS, // Static Meta, for app query use [maxW, maxH, fps] + MTK_STREAMING_FEATURE_AVAILABLE_HFPS_EIS_MAX_RESOLUTIONS, + MTK_STREAMING_FEATURE_HDR10, + MTK_STREAMING_FEATURE_CROP_OUTER_LINES_ENABLE, + MTK_STREAMING_FEATURE_PIP_DEVICES, + MTK_STREAMING_FEATURE_DEMO_FB, + MTK_STREAMING_FEATURE_DEMO_EIS, + MTK_STREAMING_FEATURE_DEMO_ASD, + MTK_STREAMING_FEATURE_END, + + MTK_STEREO_FEATURE_DOF_LEVEL = MTK_STEREO_FEATURE_START, + MTK_STEREO_FEATURE_SUPPORTED_DOF_LEVEL, + MTK_STEREO_FEATURE_RESULT_DOF_LEVEL, + MTK_STEREO_FEATURE_WARNING, + MTK_STEREO_FEATURE_DEPTH_AF_ON, + MTK_STEREO_FEATURE_DISTANCE_MEASURE_ON, + MTK_STEREO_FEATURE_RESULT_DISTANCE, + MTK_STEREO_FEATURE_TOUCH_POSITION, + MTK_STEREO_FEATURE_SHOT_MODE, + MTK_STEREO_FEATURE_STATE, + MTK_STEREO_FEATURE_END, + + + MTK_MFNR_FEATURE_MFB_MODE = MTK_MFNR_FEATURE_START, + MTK_MFNR_FEATURE_MFB_RESULT, + MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, + MTK_MFNR_FEATURE_AIS_MODE, + MTK_MFNR_FEATURE_AIS_RESULT, + MTK_MFNR_FEATURE_AVAILABLE_AIS_MODES, + MTK_MFNR_FEATURE_BSS_GOLDEN_INDEX, + MTK_MFNR_FEATURE_DO_ZIP_WITH_BSS, + MTK_MFNR_FEATURE_END, + + MTK_CSHOT_FEATURE_AVAILABLE_MODES = MTK_CSHOT_FEATURE_START, + MTK_CSHOT_FEATURE_CAPTURE, + MTK_CSHOT_FEATURE_END, + + MTK_3A_FEATURE_AE_ROI = MTK_3A_FEATURE_START, + MTK_3A_FEATURE_AF_ROI, + MTK_3A_FEATURE_AWB_ROI, + MTK_3A_FEATURE_AE_REQUEST_ISO_SPEED, + MTK_3A_FEATURE_AE_AVAILABLE_METERING, + MTK_3A_FEATURE_AE_REQUEST_METERING_MODE, + MTK_3A_FEATURE_AE_AVERAGE_BRIGHTNESS, + MTK_3A_FEATURE_AE_CAPTURE_SELECTION_SHUTTER, + MTK_3A_FEATURE_AE_CAPTURE_SELECTION_ISO, + MTK_3A_FEATURE_AE_PLINE_INDEX, + MTK_3A_FEATURE_AE_CUSTOM_PLINE_MODE, + MTK_3A_FEATURE_AWB_AVAILABL_RANGE, + MTK_3A_FEATURE_AWB_REQUEST_VALUE, + MTK_3A_FEATURE_AWB_CCT, + MTK_3A_FEATURE_LENS_AVAILABLE_FOCAL_LENGTHS_35MM, + MTK_3A_FEATURE_AE_SENSOR_GAIN_VALUE, + MTK_3A_FEATURE_AE_ISP_GAIN_VALUE, + MTK_3A_FEATURE_AE_LUX_INDEX_VALUE, + MTK_3A_FEATURE_ADRC_GAIN_VALUE, + MTK_3A_FEATURE_AE_METER_FD_TARGET, + MTK_3A_FEATURE_AE_METER_FD_LINK_TARGET, + MTK_3A_FEATURE_AISHUT_EXIST_MOTION, + MTK_3A_FEATURE_AISHUT_EXPOSURETIME, + MTK_3A_FEATURE_AISHUT_ISO, + MTK_3A_FEATURE_AISHUT_CAPTURE, + MTK_3A_FEATURE_END, + + MTK_HAL_VERSION = MTK_HAL_INFO_START, + + MTK_VSDOF_FEATURE_CALLBACK_BUFFERS = MTK_VSDOF_FEATURE_START, + MTK_VSDOF_FEATURE_DENOISE_MODE, + MTK_VSDOF_FEATURE_DENOISE_SUPPORTED_MODULE, + MTK_VSDOF_FEATURE_3RDPARTY_MODE, + MTK_VSDOF_FEATURE_3RDPARTY_SUPPORTED_MODULE, + MTK_VSDOF_FEATURE_PREVIEW_ENABLE, + MTK_VSDOF_FEATURE_SHOT_MODE, + MTK_VSDOF_FEATURE_PREVIEW_SIZE, + MTK_VSDOF_FEATURE_PREVIEW_MODE, // full or half + MTK_VSDOF_FEATURE_AVAILABLE_PREVIEW_MODE, // for app static metadata use, and it will set in metadata provider. + MTK_VSDOF_FEATURE_WARNING, // [vendor tag] for vsdof feature warning message + MTK_VSDOF_FEATURE_CAPTURE_WARNING_MSG, // [vendor tag] for vsdof capture request in takePicture. + MTK_VSDOF_FEATURE_RECORD_MODE, + MTK_VSDOF_FEATURE_SUPPORTED_DEPTH_MAP_SIZES, // [vendor tag] for vsdof depth stream sizes. + MTK_VSDOF_FEATURE_REFOCUS_CAPTURE_FLOW, // [vendor tag] for vsdof refocus capture flow decision. + MTK_VSDOF_FEATURE_END, + + MTK_MULTI_CAM_FEATURE_MODE = MTK_MULTI_CAM_FEATURE_START, + MTK_MULTI_CAM_FEATURE_AVAILABLE_MODE, // for app static metadata use, and it will set in metadata provider. + MTK_MULTI_CAM_FEATURE_SENSOR_MANUAL_UPDATED, + MTK_MULTI_CAM_AF_ROI, + MTK_MULTI_CAM_AE_ROI, + MTK_MULTI_CAM_AWB_ROI, + MTK_MULTI_CAM_MASTER_ID, + MTK_MULTI_CAM_FOV_CROP_REGION, + MTK_MULTI_CAM_SENSOR_CROP_REGION, + MTK_MULTI_CAM_ZOOM_RANGE, + MTK_MULTI_CAM_ZOOM_VALUE, + MTK_MULTI_CAM_ZOOM_STEPS, + MTK_MULTI_CAM_STREAMING_ID, + MTK_MULTI_CAM_CAPTURE_COUNT, + MTK_MULTI_CAM_CONFIG_SCALER_CROP_REGION, + MTK_MULTI_CAM_FEATURE_END, + + MTK_CONTROL_CAPTURE_EARLY_NOTIFICATION_SUPPORT = MTK_CONTROL_CAPTURE_START, + MTK_CONTROL_CAPTURE_EARLY_NOTIFICATION_TRIGGER, + MTK_CONTROL_CAPTURE_NEXT_READY, + MTK_CONTROL_CAPTURE_JPEG_FLIP_MODE, + MTK_CONTROL_CAPTURE_AVAILABLE_POSTVIEW_MODES, + MTK_CONTROL_CAPTURE_POSTVIEW_SIZE, + MTK_CONTROL_CAPTURE_REMOSAIC_EN, + MTK_CONTROL_CAPTURE_AVAILABLE_ZSL_MODES, // zsl + MTK_CONTROL_CAPTURE_DEFAULT_ZSL_MODE, + MTK_CONTROL_CAPTURE_ZSL_MODE, + MTK_CONTROL_CAPTURE_SINGLE_YUV_NR, // To enable or disable P2C output high quality yuv + MTK_CONTROL_CAPTURE_HIGH_QUALITY_YUV, // To enable or disable single YUV NR + MTK_CONTROL_CAPTURE_HINT_FOR_RAW_REPROCESS, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_FRAME_COUNT, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_FRAME_INDEX, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_FRAME_TUNING_INDEX, + MTK_CONTROL_CAPTURE_ISP_TUNING_DATA_ENABLE, + MTK_CONTROL_CAPTURE_ISP_TUNING_DATA_SIZE_FOR_RAW, + MTK_CONTROL_CAPTURE_ISP_TUNING_DATA_SIZE_FOR_YUV, + MTK_CONTROL_CAPTURE_ISP_TUNING_REQUEST, + MTK_CONTROL_CAPTURE_ISP_TUNING_DATA_RAW, + MTK_CONTROL_CAPTURE_ISP_TUNING_DATA_YUV, + MTK_CONTROL_CAPTURE_PACKED_RAW_SUPPORT, + MTK_CONTROL_CAPTURE_PACKED_RAW_ENABLE, + MTK_CONTROL_CAPTURE_PROCESS_RAW_ENABLE, + MTK_CONTROL_CAPTURE_RAW_BPP, + MTK_CONTROL_CAPTURE_YUV_NO_MARGIN, + MTK_CONTROL_CAPTURE_SINGLE_YUV_NR_MODE, // set in sessionparam to enable singel YUV NR + MTK_CONTROL_CAPTURE_INSENSORZOOM_MODE, + MTK_CONTROL_CAPTURE_HIDL_JPEGYUV_TUNING, + MTK_CONTROL_CAPTURE_END, + + MTK_BGSERVICE_FEATURE_PRERELEASE_AVAILABLE_MODES = MTK_BGSERVICE_FEATURE_START, + MTK_BGSERVICE_FEATURE_PRERELEASE, + MTK_BGSERVICE_FEATURE_IMAGEREADERID, + MTK_BGSERVICE_FEATURE_END, + + MTK_CONFIGURE_SETTING_INIT_REQUEST = MTK_CONFIGURE_SETTING_START, + MTK_CONFIGURE_ISP_YUV_DIRECT_JPEG, + MTK_CONFIGURE_SETTING_PROPRIETARY, + MTK_CONFIGURE_SETTING_END, + + MTK_FLASH_FEATURE_CALIBRATION_AVAILABLE = MTK_FLASH_FEATURE_START, // flash calibration available + MTK_FLASH_FEATURE_CALIBRATION_ENABLE, // flash calibration enable + MTK_FLASH_FEATURE_CALIBRATION_RESULT, // flash calibration result + MTK_FLASH_FEATURE_CALIBRATION_STATE, // flash calibration state + MTK_FLASH_FEATURE_CUSTOMIZATION_AVAILABLE, + MTK_FLASH_FEATURE_CUSTOMIZED_RESULT, + MTK_FLASH_FEATURE_CUSTOMIZED_TORCH_DUTY, + MTK_FLASH_FEATURE_END, + + MTK_SMVR_FEATURE_SMVR_MODE = MTK_SMVR_FEATURE_START, + MTK_SMVR_FEATURE_AVAILABLE_SMVR_MODES, + MTK_SMVR_FEATURE_SMVR_RESULT, + MTK_SMVR_FEATURE_END, + + MTK_SINGLEHW_SETTING_MODULE = MTK_SINGLEHW_SETTING_START, + MTK_SINGLEHW_SETTING_SOURCE_CROP, + MTK_SINGLEHW_SETTING_TRANSFORM, + MTK_SINGLEHW_SETTING_VIDEO_STREAM, + MTK_SINGLEHW_SETTING_WARP_MAP_X, + MTK_SINGLEHW_SETTING_WARP_MAP_Y, + MTK_SINGLEHW_SETTING_WARP_MAP_Z, + MTK_SINGLEHW_SETTING_OUT_STREAM_STRIDE, + MTK_SINGLEHW_SETTING_WARP_OUTPUT, + MTK_SINGLEHW_SETTING_END, + + MTK_ABF_FEATURE_ABF_MODE = MTK_ABF_FEATURE_START, + MTK_ABF_FEATURE_ABF_RESULT, + MTK_ABF_FEATURE_AVAILABLE_ABF_MODES, + MTK_ABF_FEATURE_END, +} mtk_camera_metadata_tag_t; + +/** + * Enumeration definitions for the various entries that need them + */ + +// MTK_COLOR_CORRECTION_MODE +typedef enum mtk_camera_metadata_enum_android_color_correction_mode { + MTK_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX, + MTK_COLOR_CORRECTION_MODE_FAST, + MTK_COLOR_CORRECTION_MODE_HIGH_QUALITY, +} mtk_camera_metadata_enum_android_color_correction_mode_t; + +// MTK_COLOR_CORRECTION_ABERRATION_MODE +typedef enum mtk_camera_metadata_enum_android_color_correction_aberration_mode { + MTK_COLOR_CORRECTION_ABERRATION_MODE_OFF, + MTK_COLOR_CORRECTION_ABERRATION_MODE_FAST, + MTK_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY, +} mtk_camera_metadata_enum_android_color_correction_aberration_mode_t; + +// MTK_CONTROL_AE_ANTIBANDING_MODE +typedef enum mtk_camera_metadata_enum_android_control_ae_antibanding_mode { + MTK_CONTROL_AE_ANTIBANDING_MODE_OFF, + MTK_CONTROL_AE_ANTIBANDING_MODE_50HZ, + MTK_CONTROL_AE_ANTIBANDING_MODE_60HZ, + MTK_CONTROL_AE_ANTIBANDING_MODE_AUTO, +} mtk_camera_metadata_enum_android_control_ae_antibanding_mode_t; + +// MTK_CONTROL_AE_LOCK +typedef enum mtk_camera_metadata_enum_android_control_ae_lock { + MTK_CONTROL_AE_LOCK_OFF, + MTK_CONTROL_AE_LOCK_ON, +} mtk_camera_metadata_enum_android_control_ae_lock_t; + +// MTK_CONTROL_AE_MODE +typedef enum mtk_camera_metadata_enum_android_control_ae_mode { + MTK_CONTROL_AE_MODE_OFF, + MTK_CONTROL_AE_MODE_ON, + MTK_CONTROL_AE_MODE_ON_AUTO_FLASH, + MTK_CONTROL_AE_MODE_ON_ALWAYS_FLASH, + MTK_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, + MTK_CONTROL_AE_MODE_ON_EXTERNAL_FLASH, +} mtk_camera_metadata_enum_android_control_ae_mode_t; + +// MTK_CONTROL_AE_PRECAPTURE_TRIGGER +typedef enum mtk_camera_metadata_enum_android_control_ae_precapture_trigger { + MTK_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE, + MTK_CONTROL_AE_PRECAPTURE_TRIGGER_START, + MTK_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, +} mtk_camera_metadata_enum_android_control_ae_precapture_trigger_t; + +// MTK_CONTROL_AF_MODE +typedef enum mtk_camera_metadata_enum_android_control_af_mode { + MTK_CONTROL_AF_MODE_OFF, + MTK_CONTROL_AF_MODE_AUTO, + MTK_CONTROL_AF_MODE_MACRO, + MTK_CONTROL_AF_MODE_CONTINUOUS_VIDEO, + MTK_CONTROL_AF_MODE_CONTINUOUS_PICTURE, + MTK_CONTROL_AF_MODE_EDOF, +} mtk_camera_metadata_enum_android_control_af_mode_t; + +// MTK_CONTROL_AF_TRIGGER +typedef enum mtk_camera_metadata_enum_android_control_af_trigger { + MTK_CONTROL_AF_TRIGGER_IDLE, + MTK_CONTROL_AF_TRIGGER_START, + MTK_CONTROL_AF_TRIGGER_CANCEL, +} mtk_camera_metadata_enum_android_control_af_trigger_t; + +// MTK_CONTROL_AWB_LOCK +typedef enum mtk_camera_metadata_enum_android_control_awb_lock { + MTK_CONTROL_AWB_LOCK_OFF, + MTK_CONTROL_AWB_LOCK_ON, +} mtk_camera_metadata_enum_android_control_awb_lock_t; + +// MTK_CONTROL_AWB_MODE +typedef enum mtk_camera_metadata_enum_android_control_awb_mode { + MTK_CONTROL_AWB_MODE_OFF, + MTK_CONTROL_AWB_MODE_AUTO, + MTK_CONTROL_AWB_MODE_INCANDESCENT, + MTK_CONTROL_AWB_MODE_FLUORESCENT, + MTK_CONTROL_AWB_MODE_WARM_FLUORESCENT, + MTK_CONTROL_AWB_MODE_DAYLIGHT, + MTK_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, + MTK_CONTROL_AWB_MODE_TWILIGHT, + MTK_CONTROL_AWB_MODE_SHADE, + MTK_CONTROL_AWB_MODE_GRAYWORLD, //deprecated + MTK_CONTROL_AWB_MODE_MWB, +} mtk_camera_metadata_enum_android_control_awb_mode_t; + +// MTK_CONTROL_CAPTURE_INTENT +typedef enum mtk_camera_metadata_enum_android_control_capture_intent { + MTK_CONTROL_CAPTURE_INTENT_CUSTOM, + MTK_CONTROL_CAPTURE_INTENT_PREVIEW, + MTK_CONTROL_CAPTURE_INTENT_STILL_CAPTURE, + MTK_CONTROL_CAPTURE_INTENT_VIDEO_RECORD, + MTK_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT, + MTK_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG, + MTK_CONTROL_CAPTURE_INTENT_MANUAL, + MTK_CONTROL_CAPTURE_INTENT_MOTION_TRACKING, +} mtk_camera_metadata_enum_android_control_capture_intent_t; + +// MTK_CONTROL_EFFECT_MODE +typedef enum mtk_camera_metadata_enum_android_control_effect_mode { + MTK_CONTROL_EFFECT_MODE_OFF, + MTK_CONTROL_EFFECT_MODE_MONO, + MTK_CONTROL_EFFECT_MODE_NEGATIVE, + MTK_CONTROL_EFFECT_MODE_SOLARIZE, + MTK_CONTROL_EFFECT_MODE_SEPIA, + MTK_CONTROL_EFFECT_MODE_POSTERIZE, + MTK_CONTROL_EFFECT_MODE_WHITEBOARD, + MTK_CONTROL_EFFECT_MODE_BLACKBOARD, + MTK_CONTROL_EFFECT_MODE_AQUA, //, + MTK_CONTROL_EFFECT_MODE_SEPIAGREEN, + MTK_CONTROL_EFFECT_MODE_SEPIABLUE, + MTK_CONTROL_EFFECT_MODE_NASHVILLE , //LOMO + MTK_CONTROL_EFFECT_MODE_HEFE , + MTK_CONTROL_EFFECT_MODE_VALENCIA , + MTK_CONTROL_EFFECT_MODE_XPROII , + MTK_CONTROL_EFFECT_MODE_LOFI , + MTK_CONTROL_EFFECT_MODE_SIERRA , + MTK_CONTROL_EFFECT_MODE_KELVIN , + MTK_CONTROL_EFFECT_MODE_WALDEN, + MTK_CONTROL_EFFECT_MODE_F1977 , //LOMO + MTK_CONTROL_EFFECT_MODE_NUM +} mtk_camera_metadata_enum_android_control_effect_mode_t; + +// MTK_CONTROL_MODE +typedef enum mtk_camera_metadata_enum_android_control_mode { + MTK_CONTROL_MODE_OFF, + MTK_CONTROL_MODE_AUTO, + MTK_CONTROL_MODE_USE_SCENE_MODE, + MTK_CONTROL_MODE_OFF_KEEP_STATE, + MTK_CONTROL_MODE_USE_EXTENDED_SCENE_MODE +} mtk_camera_metadata_enum_android_control_mode_t; + +// MTK_CONTROL_SCENE_MODE +typedef enum mtk_camera_metadata_enum_android_control_scene_mode { + MTK_CONTROL_SCENE_MODE_DISABLED = 0, + MTK_CONTROL_SCENE_MODE_UNSUPPORTED = MTK_CONTROL_SCENE_MODE_DISABLED, + MTK_CONTROL_SCENE_MODE_FACE_PRIORITY, + MTK_CONTROL_SCENE_MODE_ACTION, + MTK_CONTROL_SCENE_MODE_PORTRAIT, + MTK_CONTROL_SCENE_MODE_LANDSCAPE, + MTK_CONTROL_SCENE_MODE_NIGHT, + MTK_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, + MTK_CONTROL_SCENE_MODE_THEATRE, + MTK_CONTROL_SCENE_MODE_BEACH, + MTK_CONTROL_SCENE_MODE_SNOW, + MTK_CONTROL_SCENE_MODE_SUNSET, + MTK_CONTROL_SCENE_MODE_STEADYPHOTO, + MTK_CONTROL_SCENE_MODE_FIREWORKS, + MTK_CONTROL_SCENE_MODE_SPORTS, + MTK_CONTROL_SCENE_MODE_PARTY, + MTK_CONTROL_SCENE_MODE_CANDLELIGHT, + MTK_CONTROL_SCENE_MODE_BARCODE, + MTK_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO, + MTK_CONTROL_SCENE_MODE_HDR, + MTK_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT, + MTK_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START = 100, + MTK_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END = 127, + // Camera1 + MTK_CONTROL_SCENE_MODE_NORMAL, + MTK_CONTROL_SCENE_MODE_NUM +} mtk_camera_metadata_enum_android_control_scene_mode_t; + +// MTK_CONTROL_VIDEO_STABILIZATION_MODE +typedef enum mtk_camera_metadata_enum_android_control_video_stabilization_mode { + MTK_CONTROL_VIDEO_STABILIZATION_MODE_OFF, + MTK_CONTROL_VIDEO_STABILIZATION_MODE_ON, +} mtk_camera_metadata_enum_android_control_video_stabilization_mode_t; + +// MTK_CONTROL_AE_STATE +typedef enum mtk_camera_metadata_enum_android_control_ae_state { + MTK_CONTROL_AE_STATE_INACTIVE, + MTK_CONTROL_AE_STATE_SEARCHING, + MTK_CONTROL_AE_STATE_CONVERGED, + MTK_CONTROL_AE_STATE_LOCKED, + MTK_CONTROL_AE_STATE_FLASH_REQUIRED, + MTK_CONTROL_AE_STATE_PRECAPTURE, +} mtk_camera_metadata_enum_android_control_ae_state_t; + +// MTK_CONTROL_AF_STATE +typedef enum mtk_camera_metadata_enum_android_control_af_state { + MTK_CONTROL_AF_STATE_INACTIVE, + MTK_CONTROL_AF_STATE_PASSIVE_SCAN, + MTK_CONTROL_AF_STATE_PASSIVE_FOCUSED, + MTK_CONTROL_AF_STATE_ACTIVE_SCAN, + MTK_CONTROL_AF_STATE_FOCUSED_LOCKED, + MTK_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED, + MTK_CONTROL_AF_STATE_PASSIVE_UNFOCUSED, +} mtk_camera_metadata_enum_android_control_af_state_t; + +// MTK_CONTROL_AWB_STATE +typedef enum mtk_camera_metadata_enum_android_control_awb_state { + MTK_CONTROL_AWB_STATE_INACTIVE, + MTK_CONTROL_AWB_STATE_SEARCHING, + MTK_CONTROL_AWB_STATE_CONVERGED, + MTK_CONTROL_AWB_STATE_LOCKED, +} mtk_camera_metadata_enum_android_control_awb_state_t; + +// MTK_CONTROL_AE_LOCK_AVAILABLE +typedef enum mtk_camera_metadata_enum_android_control_ae_lock_available { + MTK_CONTROL_AE_LOCK_AVAILABLE_FALSE, + MTK_CONTROL_AE_LOCK_AVAILABLE_TRUE, +} mtk_camera_metadata_enum_android_control_ae_lock_available_t; + +// MTK_CONTROL_AWB_LOCK_AVAILABLE +typedef enum mtk_camera_metadata_enum_android_control_awb_lock_available { + MTK_CONTROL_AWB_LOCK_AVAILABLE_FALSE, + MTK_CONTROL_AWB_LOCK_AVAILABLE_TRUE, +} mtk_camera_metadata_enum_android_control_awb_lock_available_t; + +// MTK_CONTROL_ENABLE_ZSL +typedef enum mtk_camera_metadata_enum_android_control_enable_zsl { + MTK_CONTROL_ENABLE_ZSL_FALSE, + MTK_CONTROL_ENABLE_ZSL_TRUE, +} mtk_camera_metadata_enum_android_control_enable_zsl_t; + +// MTK_CONTROL_AF_SCENE_CHANGE +typedef enum mtk_camera_metadata_enum_android_control_af_scene_change { + MTK_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED, + MTK_CONTROL_AF_SCENE_CHANGE_DETECTED, +} mtk_camera_metadata_enum_android_control_af_scene_change_t; + +// MTK_CONTROL_EXTENDED_SCENE_MODE +typedef enum mtk_camera_metadata_enum_android_control_extended_scene_mode { + MTK_CONTROL_EXTENDED_SCENE_MODE_DISABLED = 0, // HIDL v3.5 + MTK_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE, // HIDL v3.5 + MTK_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS, // HIDL v3.5 + MTK_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START = 0x40, // HIDL v3.5 +} mtk_camera_metadata_enum_android_control_extended_scene_mode_t; + +// MTK_CONTROL_ISP_EDGE +typedef enum mtk_camera_metadata_enum_android_control_isp_edge { + MTK_CONTROL_ISP_EDGE_LOW, + MTK_CONTROL_ISP_EDGE_MIDDLE, + MTK_CONTROL_ISP_EDGE_HIGH +} mtk_camera_metadata_enum_android_control_isp_edge_t; + +// MTK_CONTROL_ISP_HUE +typedef enum mtk_camera_metadata_enum_android_control_isp_hue { + MTK_CONTROL_ISP_HUE_LOW, + MTK_CONTROL_ISP_HUE_MIDDLE, + MTK_CONTROL_ISP_HUE_HIGH +} mtk_camera_metadata_enum_android_control_isp_hue_t; + +// MTK_CONTROL_ISP_SATURATION +typedef enum mtk_camera_metadata_enum_android_control_isp_saturation { + MTK_CONTROL_ISP_SATURATION_LOW, + MTK_CONTROL_ISP_SATURATION_MIDDLE, + MTK_CONTROL_ISP_SATURATION_HIGH +} mtk_camera_metadata_enum_android_control_isp_saturation_t; + +// MTK_CONTROL_ISP_BRIGHTNESS +typedef enum mtk_camera_metadata_enum_android_control_isp_brightness { + MTK_CONTROL_ISP_BRIGHTNESS_LOW, + MTK_CONTROL_ISP_BRIGHTNESS_MIDDLE, + MTK_CONTROL_ISP_BRIGHTNESS_HIGH +} mtk_camera_metadata_enum_android_control_isp_brightness_t; + +// MTK_CONTROL_ISP_CONTRAST +typedef enum mtk_camera_metadata_enum_android_control_isp_contrast { + MTK_CONTROL_ISP_CONTRAST_LOW, + MTK_CONTROL_ISP_CONTRAST_MIDDLE, + MTK_CONTROL_ISP_CONTRAST_HIGH +} mtk_camera_metadata_enum_android_control_isp_contrast_t; + +// MTK_DEMOSAIC_MODE +typedef enum mtk_camera_metadata_enum_android_demosaic_mode { + MTK_DEMOSAIC_MODE_FAST, + MTK_DEMOSAIC_MODE_HIGH_QUALITY, +} mtk_camera_metadata_enum_android_demosaic_mode_t; + +// MTK_EDGE_MODE +typedef enum mtk_camera_metadata_enum_android_edge_mode { + MTK_EDGE_MODE_OFF, + MTK_EDGE_MODE_FAST, + MTK_EDGE_MODE_HIGH_QUALITY, + MTK_EDGE_MODE_ZERO_SHUTTER_LAG, +} mtk_camera_metadata_enum_android_edge_mode_t; + +// MTK_FLASH_MODE +typedef enum mtk_camera_metadata_enum_android_flash_mode { + MTK_FLASH_MODE_OFF, + MTK_FLASH_MODE_SINGLE, + MTK_FLASH_MODE_TORCH, +} mtk_camera_metadata_enum_android_flash_mode_t; + +// MTK_FLASH_STATE +typedef enum mtk_camera_metadata_enum_android_flash_state { + MTK_FLASH_STATE_UNAVAILABLE, + MTK_FLASH_STATE_CHARGING, + MTK_FLASH_STATE_READY, + MTK_FLASH_STATE_FIRED, + MTK_FLASH_STATE_PARTIAL, +} mtk_camera_metadata_enum_android_flash_state_t; + +// MTK_FLASH_FEATURE_CUSTOMIZED_TORCH_DUTY +typedef enum mtk_camera_metadata_enum_android_flash_feature_customized_torch_duty { + MTK_FLASH_FEATURE_CUSTOMIZED_TORCH_DUTY_LOW, + MTK_FLASH_FEATURE_CUSTOMIZED_TORCH_DUTY_MID, + MTK_FLASH_FEATURE_CUSTOMIZED_TORCH_DUTY_HIGH, +} mtk_camera_metadata_enum_android_flash_feature_customized_torch_duty_t; + +// MTK_FLASH_INFO_AVAILABLE +typedef enum mtk_camera_metadata_enum_android_flash_info_available { + MTK_FLASH_INFO_AVAILABLE_FALSE, + MTK_FLASH_INFO_AVAILABLE_TRUE, +} mtk_camera_metadata_enum_android_flash_info_available_t; + +// MTK_HOT_PIXEL_MODE +typedef enum mtk_camera_metadata_enum_android_hot_pixel_mode { + MTK_HOT_PIXEL_MODE_OFF, + MTK_HOT_PIXEL_MODE_FAST, + MTK_HOT_PIXEL_MODE_HIGH_QUALITY, +} mtk_camera_metadata_enum_android_hot_pixel_mode_t; + +// MTK_LENS_OPTICAL_STABILIZATION_MODE +typedef enum mtk_camera_metadata_enum_android_lens_optical_stabilization_mode { + MTK_LENS_OPTICAL_STABILIZATION_MODE_OFF, + MTK_LENS_OPTICAL_STABILIZATION_MODE_ON, +} mtk_camera_metadata_enum_android_lens_optical_stabilization_mode_t; + +// MTK_LENS_FACING +typedef enum mtk_camera_metadata_enum_android_lens_facing { + MTK_LENS_FACING_FRONT, + MTK_LENS_FACING_BACK, + MTK_LENS_FACING_EXTERNAL, +} mtk_camera_metadata_enum_android_lens_facing_t; + +// MTK_LENS_STATE +typedef enum mtk_camera_metadata_enum_android_lens_state { + MTK_LENS_STATE_STATIONARY, + MTK_LENS_STATE_MOVING, +} mtk_camera_metadata_enum_android_lens_state_t; + +// MTK_LENS_POSE_REFERENCE +typedef enum mtk_camera_metadata_enum_android_lens_pose_reference { + MTK_LENS_POSE_REFERENCE_PRIMARY_CAMERA, + MTK_LENS_POSE_REFERENCE_GYROSCOPE, + MTK_LENS_POSE_REFERENCE_UNDEFINED +} mtk_camera_metadata_enum_android_lens_pose_reference_t; + +// MTK_LENS_INFO_FOCUS_DISTANCE_CALIBRATION +typedef enum mtk_camera_metadata_enum_android_lens_info_focus_distance_calibration { + MTK_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED, + MTK_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE, + MTK_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED, +} mtk_camera_metadata_enum_android_lens_info_focus_distance_calibration_t; + +// MTK_NOISE_REDUCTION_MODE +typedef enum mtk_camera_metadata_enum_android_noise_reduction_mode { + MTK_NOISE_REDUCTION_MODE_OFF, + MTK_NOISE_REDUCTION_MODE_FAST, + MTK_NOISE_REDUCTION_MODE_HIGH_QUALITY, + MTK_NOISE_REDUCTION_MODE_MINIMAL, + MTK_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG, +} mtk_camera_metadata_enum_android_noise_reduction_mode_t; + +// MTK_QUIRKS_PARTIAL_RESULT +typedef enum mtk_metadata_enum_android_quirks_partial_result { + MTK_QUIRKS_PARTIAL_RESULT_FINAL, + MTK_QUIRKS_PARTIAL_RESULT_PARTIAL, +} mtk_metadata_enum_android_quirks_partial_result_t; + +// MTK_REQUEST_METADATA_MODE +typedef enum mtk_camera_metadata_enum_android_request_metadata_mode { + MTK_REQUEST_METADATA_MODE_NONE, + MTK_REQUEST_METADATA_MODE_FULL, +} mtk_camera_metadata_enum_android_request_metadata_mode_t; + +// MTK_REQUEST_TYPE +typedef enum mtk_camera_metadata_enum_android_request_type { + MTK_REQUEST_TYPE_CAPTURE, + MTK_REQUEST_TYPE_REPROCESS, +} mtk_camera_metadata_enum_android_request_type_t; + +// MTK_REQUEST_AVAILABLE_CAPABILITIES +typedef enum mtk_camera_metadata_enum_android_request_available_capabilities { + MTK_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE, + MTK_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR, + MTK_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING, + MTK_REQUEST_AVAILABLE_CAPABILITIES_RAW, + MTK_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING, + MTK_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS, + MTK_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE, + MTK_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING, + MTK_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT, + MTK_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO, + MTK_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING, // HIDL v3.3 + MTK_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA, // HIDL v3.3 + MTK_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME, // HIDL v3.3 + MTK_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA, // HIDL v3.4 + MTK_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA, // HIDL v3.5 + MTK_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING, // HIDL v3.5 +} mtk_camera_metadata_enum_android_request_available_capabilities_t; + +// MTK_SCALER_AVAILABLE_FORMATS +typedef enum mtk_camera_metadata_enum_android_scaler_available_formats { + MTK_SCALER_AVAILABLE_FORMATS_RAW16 = 0x20, + MTK_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE = 0x24, + MTK_SCALER_AVAILABLE_FORMATS_YV12 = 0x32315659, + MTK_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP = 0x11, + MTK_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED = 0x22, + MTK_SCALER_AVAILABLE_FORMATS_YCbCr_420_888 = 0x23, + MTK_SCALER_AVAILABLE_FORMATS_BLOB = 0x21, + MTK_SCALER_AVAILABLE_FORMATS_RAW10 = 0x25, // HIDL v3.4 + MTK_SCALER_AVAILABLE_FORMATS_RAW12 = 0x26, // HIDL v3.4 + MTK_SCALER_AVAILABLE_FORMATS_Y8 = 0x20203859, // HIDL v3.4: +} mtk_camera_metadata_enum_android_scaler_available_formats_t; + +// MTK_SCALER_AVAILABLE_STREAM_CONFIGURATIONS +typedef enum mtk_camera_metadata_enum_android_scaler_available_stream_configurations { + MTK_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, + MTK_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT, +} mtk_camera_metadata_enum_android_scaler_available_stream_configurations_t; + +// MTK_SCALER_CROPPING_TYPE +typedef enum mtk_camera_metadata_enum_android_scaler_cropping_type { + MTK_SCALER_CROPPING_TYPE_CENTER_ONLY, + MTK_SCALER_CROPPING_TYPE_FREEFORM, +} mkt_camera_metadata_enum_android_scaler_cropping_type_t; + +// MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS +typedef enum mtk_camera_metadata_enum_android_scaler_available_recommended_stream_configurations {// HIDL v3.4 + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW + = 0x1 << 0x0, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD + = 0x1 << 0x1, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT + = 0x1 << 0x2, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT + = 0x1 << 0x3, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL = 0x1 << 0x4, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RAW = 0x1 << 0x5, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RAW + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_LOW_LATENCY_SNAPSHOT + = 0x1 << 0x6, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_LOW_LATENCY_SNAPSHOT + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END + = 0x1 << 0x7, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END + MTK_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START + = 0x1 << 0x18, // 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START +} mtk_camera_metadata_enum_android_scaler_available_recommended_stream_configurations_t; + +// MTK_SCALER_ROTATE_AND_CROP +typedef enum mtk_camera_metadata_enum_android_scaler_rotate_and_crop { + MTK_SCALER_ROTATE_AND_CROP_NONE, // HIDL v3.5 + MTK_SCALER_ROTATE_AND_CROP_90, // HIDL v3.5 + MTK_SCALER_ROTATE_AND_CROP_180, // HIDL v3.5 + MTK_SCALER_ROTATE_AND_CROP_270, // HIDL v3.5 + MTK_SCALER_ROTATE_AND_CROP_AUTO, // HIDL v3.5 +} mtk_camera_metadata_enum_android_scaler_rotate_and_crop_t; + + +// MTK_IOPIPE_INFO_CROP +typedef enum mtk_camera_metadata_enum_android_iopipe_info_crop { + MTK_IOPIPE_INFO_CROP_NOT_SUPPORT, + MTK_IOPIPE_INFO_CROP_SYMMETRIC, + MTK_IOPIPE_INFO_CROP_ASYMMETRIC, +} mtk_camera_metadata_enum_android_iopipe_info_crop_t; + +// MTK_SENSOR_REFERENCE_ILLUMINANT1 +typedef enum mtk_camera_metadata_enum_android_sensor_reference_illuminant1 { + MTK_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1, + MTK_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2, + MTK_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3, + MTK_SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4, + MTK_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9, + MTK_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10, + MTK_SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11, + MTK_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12, + MTK_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13, + MTK_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14, + MTK_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15, + MTK_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17, + MTK_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18, + MTK_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19, + MTK_SENSOR_REFERENCE_ILLUMINANT1_D55 = 20, + MTK_SENSOR_REFERENCE_ILLUMINANT1_D65 = 21, + MTK_SENSOR_REFERENCE_ILLUMINANT1_D75 = 22, + MTK_SENSOR_REFERENCE_ILLUMINANT1_D50 = 23, + MTK_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24, +} mtk_camera_metadata_enum_android_sensor_reference_illuminant1_t; + +// MTK_SENSOR_TEST_PATTERN_MODE +typedef enum mtk_camera_metadata_enum_android_sensor_test_pattern_mode { + MTK_SENSOR_TEST_PATTERN_MODE_OFF, + MTK_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, + MTK_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, + MTK_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, + MTK_SENSOR_TEST_PATTERN_MODE_PN9, + MTK_SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256, +} mkt_camera_metadata_enum_android_sensor_test_pattern_mode_t; + +// MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT +typedef enum mtk_camera_metadata_enum_android_sensor_info_color_filter_arrangement { + MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB, + MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG, + MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG, + MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR, + MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB, + MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO, // HIDL v3.4 + MTK_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR, // HIDL v3.4 +} mtk_camera_metadata_enum_android_sensor_info_color_filter_arrangement_t; + +// MTK_SENSOR_INFO_TIMESTAMP_SOURCE +typedef enum mtk_camera_metadata_enum_android_sensor_info_timestamp_source { + MTK_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN, + MTK_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME, +} mtk_camera_metadata_enum_android_sensor_info_timestamp_source_t; + +// MTK_SENSOR_INFO_LENS_SHADING_APPLIED +typedef enum mtk_camera_metadata_enum_android_sensor_info_lens_shading_applied { + MTK_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE, + MTK_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE, +} mtk_camera_metadata_enum_android_sensor_info_lens_shading_applied_t; + +// MTK_SENSOR_INFO_SCENARIO_ID +typedef enum mtk_camera_metadata_enum_android_sensor_info_scenario_id { + MTK_SENSOR_INFO_SCENARIO_ID_ZSD, + MTK_SENSOR_INFO_SCENARIO_ID_NORMAL_PREVIEW, + MTK_SENSOR_INFO_SCENARIO_ID_NORMAL_CAPTURE, + MTK_SENSOR_INFO_SCENARIO_ID_NORMAL_VIDEO, + /************************************************************************** + * All unnamed scenario id for a specific sensor must be started with + * values >= MTK_SENSOR_INFO_SCENARIO_ID_UNNAMED_START. + **************************************************************************/ + MTK_SENSOR_INFO_SCENARIO_ID_UNNAMED_START = 0x100, +} mtk_camera_metadata_enum_android_sensor_info_scenario_id_t; + +// MTK_SHADING_MODE +typedef enum mtk_camera_metadata_enum_android_shading_mode { + MTK_SHADING_MODE_OFF, + MTK_SHADING_MODE_FAST, + MTK_SHADING_MODE_HIGH_QUALITY, +} mtk_camera_metadata_enum_android_shading_mode_t; + +// MTK_STATISTICS_FACE_DETECT_MODE +typedef enum mtk_camera_metadata_enum_android_statistics_face_detect_mode { + MTK_STATISTICS_FACE_DETECT_MODE_OFF, + MTK_STATISTICS_FACE_DETECT_MODE_SIMPLE, + MTK_STATISTICS_FACE_DETECT_MODE_FULL, +} mtk_camera_metadata_enum_android_statistics_face_detect_mode_t; + +// MTK_STATISTICS_HISTOGRAM_MODE +typedef enum mtk_camera_metadata_enum_android_statistics_histogram_mode { + MTK_STATISTICS_HISTOGRAM_MODE_OFF, + MTK_STATISTICS_HISTOGRAM_MODE_ON, +} mtk_camera_metadata_enum_android_statistics_histogram_mode_t; + +// MTK_STATISTICS_SHARPNESS_MAP_MODE +typedef enum mtk_camera_metadata_enum_android_statistics_sharpness_map_mode { + MTK_STATISTICS_SHARPNESS_MAP_MODE_OFF, + MTK_STATISTICS_SHARPNESS_MAP_MODE_ON, +} mtk_camera_metadata_enum_android_statistics_sharpness_map_mode_t; + +// MTK_STATISTICS_HOT_PIXEL_MAP_MODE +typedef enum mtk_camera_metadata_enum_android_statistics_hot_pixel_map_mode { + MTK_STATISTICS_HOT_PIXEL_MAP_MODE_OFF, + MTK_STATISTICS_HOT_PIXEL_MAP_MODE_ON, +} mtk_camera_metadata_enum_android_statistics_hot_pixel_map_mode_t; + +// MTK_STATISTICS_SCENE_FLICKER +typedef enum mtk_camera_metadata_enum_android_statistics_scene_flicker { + MTK_STATISTICS_SCENE_FLICKER_NONE, + MTK_STATISTICS_SCENE_FLICKER_50HZ, + MTK_STATISTICS_SCENE_FLICKER_60HZ, +} mtk_camera_metadata_enum_android_statistics_scene_flicker_t; + +// MTK_STATISTICS_LENS_SHADING_MAP_MODE +typedef enum mtk_camera_metadata_enum_android_statistics_lens_shading_map_mode { + MTK_STATISTICS_LENS_SHADING_MAP_MODE_OFF, + MTK_STATISTICS_LENS_SHADING_MAP_MODE_ON, +} mtk_camera_metadata_enum_android_statistics_lens_shading_map_mode_t; + +// MTK_STATISTICS_OIS_DATA_MODE +typedef enum mtk_camera_metadata_enum_android_statistics_ois_data_mode { + MTK_STATISTICS_OIS_DATA_MODE_OFF, + MTK_STATISTICS_OIS_DATA_MODE_ON +} mtk_camera_metadata_enum_android_statistics_ois_data_mode_t; + + + +// MTK_TONEMAP_MODE +typedef enum mtk_camera_metadata_enum_android_tonemap_mode { + MTK_TONEMAP_MODE_CONTRAST_CURVE, + MTK_TONEMAP_MODE_FAST, + MTK_TONEMAP_MODE_HIGH_QUALITY, + MTK_TONEMAP_MODE_GAMMA_VALUE, + MTK_TONEMAP_MODE_PRESET_CURVE, +} mtk_camera_metadata_enum_android_tonemap_mode_t; + +// MTK_TONEMAP_PRESET_CURVE +typedef enum mtk_camera_metadata_enum_android_tonemap_preset_curve { + MTK_TONEMAP_PRESET_CURVE_SRGB, + MTK_TONEMAP_PRESET_CURVE_REC709, +} mtk_camera_metadata_enum_android_tonemap_preset_curve_t; + +// MTK_LED_TRANSMIT +typedef enum mtk_camera_metadata_enum_android_led_transmit { + MTK_LED_TRANSMIT_OFF, + MTK_LED_TRANSMIT_ON, +} mtk_camera_metadata_enum_android_led_transmit_t; + +// MTK_LED_AVAILABLE_LEDS +typedef enum mtk_camera_metadata_enum_android_led_available_leds { + MTK_LED_AVAILABLE_LEDS_TRANSMIT, +} mtk_camera_metadata_enum_android_led_available_leds_t; + +// MTK_INFO_SUPPORTED_HARDWARE_LEVEL +typedef enum mtk_camera_metadata_enum_android_info_supported_hardware_level { + MTK_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED, + MTK_INFO_SUPPORTED_HARDWARE_LEVEL_FULL, + MTK_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY, + MTK_INFO_SUPPORTED_HARDWARE_LEVEL_3, + MTK_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL, +} mtk_camera_metadata_enum_android_info_supported_hardware_level_t; + +// MTK_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION +typedef enum mtk_camera_metadata_enum_android_info_supported_buffer_management_version { + MTK_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5, // HIDL v3.4 +} mtk_camera_metadata_enum_android_info_supported_buffer_management_version_t; + +// MTK_BLACK_LEVEL_LOCK +typedef enum mtk_camera_metadata_enum_android_black_level_lock { + MTK_BLACK_LEVEL_LOCK_OFF, + MTK_BLACK_LEVEL_LOCK_ON, +} mtk_camera_metadata_enum_android_black_level_lock_t; + +// MTK_SYNC_FRAME_NUMBER +typedef enum mtk_camera_metadata_enum_android_sync_frame_number { + MTK_SYNC_FRAME_NUMBER_CONVERGING = -1, + MTK_SYNC_FRAME_NUMBER_UNKNOWN = -2, +} mtk_camera_metadata_enum_android_sync_frame_number_t; + +// MTK_SYNC_MAX_LATENCY +typedef enum mtk_camera_metadata_enum_android_sync_max_latency { + MTK_SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0, + MTK_SYNC_MAX_LATENCY_UNKNOWN = -1, +} mtk_camera_metadata_enum_android_sync_max_latency_t; + +// MTK_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS +typedef enum mtk_camera_metadata_enum_android_depth_available_depth_stream_configurations { + MTK_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT, + MTK_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT, +} mtk_camera_metadata_enum_android_depth_available_depth_stream_configurations_t; + +// MTK_DEPTH_DEPTH_IS_EXCLUSIVE +typedef enum mtk_camera_metadata_enum_android_depth_depth_is_exclusive { + MTK_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE, + MTK_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE, +} mtk_camera_metadata_enum_android_depth_depth_is_exclusive_t; + +// MTK_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS +typedef enum mtk_camera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations { + MTK_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_OUTPUT + , // HIDL v3.4 + MTK_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_INPUT + , // HIDL v3.4 +} mtk_camera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_t; + +// MTK_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE +typedef enum mtk_camera_metadata_enum_android_logic_multi_camera_sensor_sync_type { + MTK_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE, + MTK_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED, +} mtk_camera_metadata_enum_android_logic_multi_camera_sensor_sync_type_t; + +// MTK_DISTORTION_CORRECTION_MODE +typedef enum mtk_camera_metadata_enum_android_distortion_correction_mode { + MTK_DISTORTION_CORRECTION_MODE_OFF , // HIDL v3.3 + MTK_DISTORTION_CORRECTION_MODE_FAST , // HIDL v3.3 + MTK_DISTORTION_CORRECTION_MODE_HIGH_QUALITY , // HIDL v3.3 +} mtk_camera_metadata_enum_android_distortion_correction_mode_t; + +// MTK_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS +typedef enum mtk_camera_metadata_enum_android_heic_available_heic_stream_configurations { + MTK_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_OUTPUT , // HIDL v3.4 + MTK_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_INPUT , // HIDL v3.4 +} mtk_camera_metadata_enum_android_heic_available_heic_stream_configurations_t; + +// MTK_HEIC_INFO_SUPPORTED +typedef enum mtk_camera_metadata_enum_android_heic_info_supported { + MTK_HEIC_INFO_SUPPORTED_FALSE , // HIDL v3.4 + MTK_HEIC_INFO_SUPPORTED_TRUE , // HIDL v3.4 +} mtk_camera_metadata_enum_android_heic_info_supported_t; + +// MTK_HAL_VERSION +#define MTKCAM_DEVICE_API_VERSION(maj,min) ((((maj) & 0xff) << 8) | ((min) & 0xff)) +typedef enum mtk_camera_metadata_num_hal_version { + MTK_HAL_VERSION_1_0 = MTKCAM_DEVICE_API_VERSION(1, 0), + MTK_HAL_VERSION_3_0 = MTKCAM_DEVICE_API_VERSION(3, 0), + MTK_HAL_VERSION_3_1 = MTKCAM_DEVICE_API_VERSION(3, 1), + MTK_HAL_VERSION_3_2 = MTKCAM_DEVICE_API_VERSION(3, 2), + MTK_HAL_VERSION_3_3 = MTKCAM_DEVICE_API_VERSION(3, 3), + MTK_HAL_VERSION_3_4 = MTKCAM_DEVICE_API_VERSION(3, 4), + MTK_HAL_VERSION_3_5 = MTKCAM_DEVICE_API_VERSION(3, 5), +} mtk_camera_metadata_num_hal_version_t; + +// MTK GESTURE SHOT FEATURE +typedef enum mtk_camera_metadata_enum_gesture_shot_mode { + MTK_FACE_FEATURE_GESTURE_MODE_OFF = 0, + MTK_FACE_FEATURE_GESTURE_MODE_SIMPLE, +} mtk_camera_metadata_enum_gesture_shot_mode_t; + +// MTK SMILE SHOT FEATURE +typedef enum mtk_camera_metadata_enum_smile_detect_mode { + MTK_FACE_FEATURE_SMILE_DETECT_MODE_OFF = 0, + MTK_FACE_FEATURE_SMILE_DETECT_MODE_SIMPLE, +} mtk_camera_metadata_enum_smile_detect_mode_t; + +// MTK AUTO SCENE DETECTION FEATURE +typedef enum mtk_camera_metadata_enum_asd_mode { + MTK_FACE_FEATURE_ASD_MODE_OFF = 0, + MTK_FACE_FEATURE_ASD_MODE_SIMPLE, + MTK_FACE_FEATURE_ASD_MODE_FULL, +} mtk_camera_metadata_enum_asd_mode_t; + +// MTK 3DNR +typedef enum mtk_camera_metadata_enum_3dnr_mode { + MTK_NR_FEATURE_3DNR_MODE_OFF = 0, + MTK_NR_FEATURE_3DNR_MODE_ON, +} mtk_camera_metadata_enum_3dnr_mode_t; + +// MTK HDR +typedef enum mtk_camera_metadata_enum_hdr_mode { // Need align include/mtkcam/feature/hdrDetection/Defs.h + MTK_HDR_FEATURE_HDR_MODE_OFF = 0, + MTK_HDR_FEATURE_HDR_MODE_ON, + MTK_HDR_FEATURE_HDR_MODE_AUTO, + MTK_HDR_FEATURE_HDR_MODE_VIDEO_ON, + MTK_HDR_FEATURE_HDR_MODE_VIDEO_AUTO, +} mtk_camera_metadata_enum_hdr_mode_t; + +typedef enum mtk_camera_metadata_enum_vhdr_mode { // Need align include/mtkcam/drv/IHalSensor.h + MTK_HDR_FEATURE_VHDR_MODE_OFF = 0, + MTK_HDR_FEATURE_VHDR_MODE_IVHDR = 1, + MTK_HDR_FEATURE_VHDR_MODE_MVHDR = 2, + MTK_HDR_FEATURE_VHDR_MODE_ZVHDR = 9, +} mtk_camera_metadata_enum_vhdr_mode_t; + +typedef enum mtk_camera_metadata_enum_hdr_hal_mode { + MTK_HDR_FEATURE_HDR_HAL_MODE_OFF = 0x0, + MTK_HDR_FEATURE_HDR_HAL_MODE_MVHDR = 0x1, + MTK_HDR_FEATURE_HDR_HAL_MODE_MSTREAM_CAPTURE = 0x2, + MTK_HDR_FEATURE_HDR_HAL_MODE_MSTREAM_PREVIEW = 0x4, + MTK_HDR_FEATURE_HDR_HAL_MODE_MSTREAM_CAPTURE_PREVIEW = ( MTK_HDR_FEATURE_HDR_HAL_MODE_MSTREAM_CAPTURE | + MTK_HDR_FEATURE_HDR_HAL_MODE_MSTREAM_PREVIEW ), + MTK_HDR_FEATURE_HDR_HAL_MODE_STAGGER_2EXP = 0x8, + MTK_HDR_FEATURE_HDR_HAL_MODE_STAGGER_3EXP = 0x10, + MTK_HDR_FEATURE_HDR_HAL_MODE_STAGGER = ( MTK_HDR_FEATURE_HDR_HAL_MODE_STAGGER_2EXP | + MTK_HDR_FEATURE_HDR_HAL_MODE_STAGGER_3EXP ), +} mtk_camera_metadata_enum_multi_exposure_hdr_mode_t; + +typedef enum mtk_camera_metadata_enum_single_frame_hdr { + MTK_HDR_FEATURE_SINGLE_FRAME_HDR_NOT_SUPPORTED = 0, + MTK_HDR_FEATURE_SINGLE_FRAME_HDR_SUPPORTED = 1, +} mtk_camera_metadata_enum_single_frame_hdr_t; + +// MTK_STREAMING_FEATURE_HDR10 +typedef enum mtk_camera_metadata_enum_hdr10 { + MTK_STREAMING_FEATURE_HDR10_OFF = 0, + MTK_STREAMING_FEATURE_HDR10_ON, +} mtk_camera_metadata_enum_hdr10_t; + +// MTK_STEREO_FEATURE_STATE +typedef enum mtk_camera_metadata_enum_stereo_feature_status { + MTK_STEREO_FEATURE_STATUS_OFF = 0, + MTK_STEREO_FEATURE_STATUS_PREVIEW, + MTK_STEREO_FEATURE_STATUS_CAPTURE, + MTK_STEREO_FEATURE_STATUS_RECORD, +} mtk_camera_metadata_enum_stereo_feature_status_t; + +// MTK MFB +typedef enum mtk_camera_metadata_enum_mfnr_mfb { + MTK_MFNR_FEATURE_MFB_OFF = 0, + MTK_MFNR_FEATURE_MFB_MFLL, + MTK_MFNR_FEATURE_MFB_AIS, + MTK_MFNR_FEATURE_MFB_AUTO = 0xFF, +} mtk_camera_metadata_enum_mfnr_mfb_t; + +// MTK MFNR AIS +typedef enum mtk_camera_metadata_enum_mfnr_ais { + MTK_MFNR_FEATURE_AIS_OFF = 0, + MTK_MFNR_FEATURE_AIS_ON, +} mtk_camera_metadata_enum_mfnr_ais_t; + +// MTK CShot +typedef enum mtk_camera_metadata_enum_cshot_mode { + MTK_CSHOT_FEATURE_AVAILABLE_MODE_OFF = 0, + MTK_CSHOT_FEATURE_AVAILABLE_MODE_ON, +} mtk_camera_metadata_enum_cshot_mode_t; + +typedef enum mtk_camera_metadata_enum_cshot_capture { + MTK_CSHOT_FEATURE_CAPTURE_OFF = 0, + MTK_CSHOT_FEATURE_CAPTURE_ON, +} mtk_camera_metadata_enum_cshot_capture_t; + + +//postview +typedef enum mtk_camera_metadata_enum_control_capture_available_postview_modes { + MTK_CONTROL_CAPTURE_POSTVIEW_MODE_OFF = 0, + MTK_CONTROL_CAPTURE_POSTVIEW_MODE_ON, +} mtk_camera_metadata_enum_control_capture_available_postview_modes; + +typedef enum mtk_camera_metadata_enum_control_capture_postview_size { + MTK_CONTROL_CAPTURE_POSTVIEW_SIZE_WIDTH = 0, + MTK_CONTROL_CAPTURE_POSTVIEW_SIZE_HEIGHT = 0, +} mtk_camera_metadata_enum_control_capture_postview_size; + +//p2done callback +typedef enum mtk_camera_metadata_enum_control_capture_early_notification_support { + MTK_CONTROL_CAPTURE_EARLY_NOTIFICATION_SUPPORT_OFF = 0, + MTK_CONTROL_CAPTURE_EARLY_NOTIFICATION_SUPPORT_ON, +} mtk_camera_metadata_enum_control_capture_early_notification_support; + +typedef enum mtk_camera_metadata_enum_control_capture_early_notification_trigger { + MTK_CONTROL_CAPTURE_EARLY_NOTIFICATION_TRIGGER_OFF = 0, + MTK_CONTROL_CAPTURE_EARLY_NOTIFICATION_TRIGGER_ON, +} mtk_camera_metadata_enum_control_capture_early_notification_trigger; + +typedef enum mtk_camera_metadata_enum_control_capture_early_next_ready { + MTK_CONTROL_CAPTURE_EARLY_NOTIFICATION_NOT_READY = 0, + MTK_CONTROL_CAPTURE_EARLY_NOTIFICATION_READY, +} mtk_camera_metadata_enum_control_capture_early_next_ready; + +// raw reprocess hint +typedef enum mtk_camera_metadata_enum_control_capture_hint_for_raw_reprocess { + MTK_CONTROL_CAPTURE_HINT_FOR_RAW_REPROCESS_FALSE = 0, + MTK_CONTROL_CAPTURE_HINT_FOR_RAW_REPROCESS_TRUE, +} mtk_camera_metadata_enum_control_capture_hint_for_raw_reprocess; + +// raw reprocess hint +typedef enum mtk_camera_metadata_enum_control_capture_hint_for_isp_tuning { + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_DEFAULT_NONE = 0, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_AINR, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_YUV_REPROCESS, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_AIHDR, + // Above only for MTK turn key feautre usage + + // MUST add customer hint as below. (reserved for customer features' hint) + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_FOR_3RD_PARTY = 5000, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_CUSTOMER_YUV_HDR, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_CUSTOMER_RAW_HDR, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_CUSTOMER_SUPER_NIGHT, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_CUSTOMER_SUPER_RESOLUTION, + MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_END, +} mtk_camera_metadata_enum_control_capture_hint_for_isp_tuning; + +// Jpeg flip +typedef enum mtk_camera_metadata_enum_control_capture_jpeg_flip_mode { + MTK_CONTROL_CAPTURE_JPEG_FLIP_MODE_OFF = 0, + MTK_CONTROL_CAPTURE_JPEG_FLIP_MODE_ON, +} mtk_camera_metadata_enum_control_capture_jpeg_flip_mode; + +// zsl +typedef enum mtk_camera_metadata_enum_control_capture_zsl_mode { + MTK_CONTROL_CAPTURE_ZSL_MODE_OFF = 0, + MTK_CONTROL_CAPTURE_ZSL_MODE_ON, +} mtk_camera_metadata_enum_control_capture_zsl_mode; + +// isp tuning +typedef enum mtk_camera_metadata_enum_control_capture_tuning_request { + MTK_CONTROL_CAPTURE_ISP_TUNING_REQ_RAW = 0x1, + MTK_CONTROL_CAPTURE_ISP_TUNING_REQ_YUV = 0x2, +} mtk_camera_metadata_enum_control_capture_tuning_request; + +//packed raw +typedef enum mtk_camera_metadata_enum_control_capture_packed_raw_support { + MTK_CONTROL_CAPTURE_PACKED_RAW_SUPPORT_OFF = 0, + MTK_CONTROL_CAPTURE_PACKED_RAW_SUPPORT_ON, +} mtk_camera_metadata_enum_control_capture_packed_raw_support; + +//bgservice +typedef enum mtk_camera_metadata_enum_bgservice_prerelease_availablemode { + MTK_BGSERVICE_FEATURE_PRERELEASE_MODE_OFF = 0, + MTK_BGSERVICE_FEATURE_PRERELEASE_MODE_ON, +} mtk_camera_metadata_enum_bgservice_prerelease_availablemode; + +typedef enum mtk_camera_metadata_enum_bgservice_prerelease { + MTK_BGSERVICE_FEATURE_PRERELEASE_OFF = 0, + MTK_BGSERVICE_FEATURE_PRERELEASE_ON, +} mtk_camera_metadata_enum_bgservice_prerelease; + +// MTK EIS +typedef enum mtk_camera_metadata_enum_eis_mode { + MTK_EIS_FEATURE_EIS_MODE_OFF = 0, + MTK_EIS_FEATURE_EIS_MODE_ON = 1, +} mtk_camera_metadata_enum_eis_mode_t; + +typedef enum mtk_camera_metadata_enum_preview_eis { + MTK_EIS_FEATURE_PREVIEW_EIS_OFF = 0, + MTK_EIS_FEATURE_PREVIEW_EIS_ON = 1, +} mtk_camera_metadata_enum_preview_eis_t; + +typedef enum mtk_camera_metadata_enum_record_state { + // STATE_PREVIEW : It can be video preview or recording stop state, decided by has encode buffer or not + MTK_STREAMING_FEATURE_RECORD_STATE_PREVIEW = 0, + MTK_STREAMING_FEATURE_RECORD_STATE_RECORD, +} mtk_camera_metadata_enum_record_state_t; + +typedef enum mtk_camera_metadata_enum_streaming_hfps_mode { + MTK_STREAMING_FEATURE_HFPS_MODE_NORMAL = 0, + MTK_STREAMING_FEATURE_HFPS_MODE_60FPS, +} mtk_camera_metadata_enum_streaming_hfps_mode_t; + +typedef enum mtk_camera_metadata_enum_streaming_crop_outer_lines_enable { + MTK_STREAMING_FEATURE_CROP_OUTER_LINES_OFF = 0, + MTK_STREAMING_FEATURE_CROP_OUTER_LINES_ON, +} mtk_camera_metadata_enum_streaming_crop_outer_lines_enable_t; + +// MTK_VSDOF_FEATURE_DENOISE_MODE +typedef enum mtk_camera_metadata_enum_vsdof_feature_denoise_mode { + MTK_VSDOF_FEATURE_DENOISE_OFF = 0, + MTK_VSDOF_FEATURE_DENOISE_ON, +} mtk_camera_metadata_enum_vsdof_feature_denoise_mode_t; + +// MTK_VSDOF_FEATURE_3RDPARTY_MODE +typedef enum mtk_camera_metadata_enum_vsdof_feature_3rdparty_mode { + MTK_VSDOF_FEATURE_3RDPARTY_OFF = 0, + MTK_VSDOF_FEATURE_3RDPARTY_ON, +} mtk_camera_metadata_enum_vsdof_feature_3rdparty_mode_t; + +//MTK_VSDOF_FEATURE_PREVIEW_ENABLE +typedef enum mtk_camera_metadata_enum_vsdof_feature_preview_enable { + MTK_VSDOF_FEATURE_PREVIEW_OFF = 0, + MTK_VSDOF_FEATURE_PREVIEW_ON, +} mtk_camera_metadata_enum_vsdof_feature_preview_enable_t; + +//MTK_LOGICALMULTICAMERA_SENSOR_SYNC_TYPE +typedef enum mtk_camera_metadata_enum_android_logicalmulticamera_sensor_sync_type { + MTK_LOGICALMULTICAMERA_SENSOR_SYNC_TYPE_APPROXIMATE, + MTK_LOGICALMULTICAMERA_SENSOR_SYNC_TYPE_CALIBRATED, +} mtk_camera_metadata_enum_android_logicalmulticamera_sensor_sync_type_t; + +//MTK_MULTI_CAM_FEATURE +typedef enum mtk_camera_metadata_enum_multi_cam_mode { + MTK_MULTI_CAM_FEATURE_MODE_ZOOM = 0, + MTK_MULTI_CAM_FEATURE_MODE_VSDOF, + MTK_MULTI_CAM_FEATURE_MODE_DENOISE, +} mtk_camera_metadata_enum_multi_cam_mode_t; + +//MTK_MULTI_CAM_FEATURE_PREVIEW_MODE +typedef enum mtk_camera_metadata_enum_vsdof_feature_preview_mode { + MTK_VSDOF_FEATURE_PREVIEW_MODE_FULL = 0, + MTK_VSDOF_FEATURE_PREVIEW_MODE_HALF, +} mtk_camera_metadata_enum_stereo_vsdof_preview_mode; + +//MTK_CONTROL_CAPTURE_SINGLE_YUV_NR_MODE +typedef enum mtk_camera_metadata_enum_single_yuvnr_mode { + MTK_CONTROL_CAPTURE_SINGLE_YUV_NR_MODE_OFF = 0, + MTK_CONTROL_CAPTURE_SINGLE_YUV_NR_MODE_ON, +} mtk_camera_metadata_enum_single_yuvnr_mode_t; + +//MTK_CONTROL_CAPTURE_INSENSORZOOM_MODE +typedef enum mtk_camera_metadata_enum_insensorzoom_mode { + MTK_CONTROL_CAPTURE_INSENSORZOOM_MODE_OFF = 0, + MTK_CONTROL_CAPTURE_INSENSORZOOM_MODE_ON, +} mtk_camera_metadata_enum_insensorzoom_mode_t; +//MTK_SINGLEHW_SETTING_MODULE +typedef enum mtk_camera_metadata_enum_singlehw_module { + MTK_SINGLEHW_SETTING_MODULE_NONE = 0, + MTK_SINGLEHW_SETTING_MODULE_MDP, + MTK_SINGLEHW_SETTING_MODULE_WPE, +} mtk_camera_metadata_enum_singlehw_module_t; + +//MTK_SINGLEHW_SETTING_TRANSFORM +typedef enum mtk_camera_metadata_enum_singlehw_transform { + MTK_SINGLEHW_SETTING_TRANSFORM_NONE = 0, + MTK_SINGLEHW_SETTING_TRANSFORM_FLIP_H, + MTK_SINGLEHW_SETTING_TRANSFORM_FLIP_V, + MTK_SINGLEHW_SETTING_TRANSFORM_ROT_90, + MTK_SINGLEHW_SETTING_TRANSFORM_ROT_180, + MTK_SINGLEHW_SETTING_TRANSFORM_ROT_270, + MTK_SINGLEHW_SETTING_TRANSFORM_ROT_90_FLIP_H, + MTK_SINGLEHW_SETTING_TRANSFORM_ROT_90_FLIP_V, +} mtk_camera_metadata_enum_singlehw_transform_t; + +//MTK_ABF_FEATURE_ABF_MODE +typedef enum mtk_camera_metadata_enum_afb { + MTK_ABF_FEATURE_ABF_OFF = 0, + MTK_ABF_FEATURE_ABF_ON, +} mtk_camera_metadata_enum_afb_t; + +#endif diff --git a/app/src/main/cpp/camera2/mtk_platform_metadata_tag.h b/app/src/main/cpp/camera2/mtk_platform_metadata_tag.h new file mode 100644 index 00000000..4d0ab352 --- /dev/null +++ b/app/src/main/cpp/camera2/mtk_platform_metadata_tag.h @@ -0,0 +1,724 @@ +/* Copyright Statement: + * + * This software/firmware and related documentation ("MediaTek Software") are + * protected under relevant copyright laws. The information contained herein is + * confidential and proprietary to MediaTek Inc. and/or its licensors. Without + * the prior written permission of MediaTek inc. and/or its licensors, any + * reproduction, modification, use or disclosure of MediaTek Software, and + * information contained herein, in whole or in part, shall be strictly + * prohibited. + * + * MediaTek Inc. (C) 2010. All rights reserved. + * + * BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES + * THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE") + * RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER + * ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL + * WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED + * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR + * NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH + * RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY, + * INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES + * TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO. + * RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO + * OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK + * SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE + * RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR + * STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S + * ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE + * RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE + * MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE + * CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE. + * + * The following software/firmware and/or related documentation ("MediaTek + * Software") have been modified by MediaTek Inc. All revisions are subject to + * any receiver's applicable license agreements with MediaTek Inc. + */ + +#ifndef _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_ +#define _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_ + +/****************************************************************************** + * + ******************************************************************************/ +typedef enum mtk_platform_metadata_section { + MTK_HAL_REQUEST = 0xC000, // MTK HAL internal metadata become from 0xC000 0000 + MTK_P1NODE, + MTK_P2NODE, + MTK_3A_TUNINING, + MTK_3A_EXIF, + MTK_MF_EXIF, + MTK_EIS, + MTK_STEREO, + MTK_FRAMESYNC, + MTK_VHDR, + MTK_PIPELINE, + MTK_NR, + MTK_PLUGIN, + MTK_DUALZOOM, + MTK_FEATUREPIPE, + MTK_POSTPROC, + MTK_FEATURE, + MTK_FSC, +} mtk_platform_metadata_section_t; + + +/****************************************************************************** + * + ******************************************************************************/ +typedef enum mtk_platform_metadata_section_start { + MTK_HAL_REQUEST_START = MTK_HAL_REQUEST << 16, + MTK_P1NODE_START = MTK_P1NODE << 16, + MTK_P2NODE_START = MTK_P2NODE << 16, + MTK_3A_TUNINING_START = MTK_3A_TUNINING << 16, + MTK_3A_EXIF_START = MTK_3A_EXIF << 16, + MTK_EIS_START = MTK_EIS << 16, + MTK_STEREO_START = MTK_STEREO << 16, + MTK_FRAMESYNC_START = MTK_FRAMESYNC << 16, + MTK_VHDR_START = MTK_VHDR << 16, + MTK_PIPELINE_START = MTK_PIPELINE << 16, + MTK_NR_START = MTK_NR << 16, + MTK_PLUGIN_START = MTK_PLUGIN << 16, + MTK_DUALZOOM_START = MTK_DUALZOOM << 16, + MTK_FEATUREPIPE_START = MTK_FEATUREPIPE << 16, + MTK_POSTPROC_START = MTK_POSTPROC << 16, + MTK_FEATURE_START = MTK_FEATURE << 16, + MTK_FSC_START = MTK_FSC << 16, +} mtk_platform_metadata_section_start_t; + + +/****************************************************************************** + * + ******************************************************************************/ +typedef enum mtk_platform_metadata_tag { + MTK_HAL_REQUEST_REQUIRE_EXIF = MTK_HAL_REQUEST_START, //MUINT8 + MTK_HAL_REQUEST_DUMP_EXIF, //MUINT8 + MTK_HAL_REQUEST_REPEAT, //MUINT8 + MTK_HAL_REQUEST_DUMMY, //MUINT8 + MTK_HAL_REQUEST_SENSOR_SIZE, //MSize + MTK_HAL_REQUEST_SENSOR_ID, //MINT32 + MTK_HAL_REQUEST_DEVICE_ID, //MINT32 + MTK_HAL_REQUEST_HIGH_QUALITY_CAP, //MUINT8 + MTK_HAL_REQUEST_ISO_SPEED, //MINT32 + MTK_HAL_REQUEST_BRIGHTNESS_MODE, //MINT32 + MTK_HAL_REQUEST_CONTRAST_MODE, //MINT32 + MTK_HAL_REQUEST_HUE_MODE, //MINT32 + MTK_HAL_REQUEST_SATURATION_MODE, //MINT32 + MTK_HAL_REQUEST_EDGE_MODE, //MINT32 + MTK_HAL_REQUEST_PASS1_DISABLE, //MINT32 + MTK_HAL_REQUEST_ERROR_FRAME, // used for error handling //MUINT8 + MTK_HAL_REQUEST_PRECAPTURE_START, // 4cell //MUINT8 + MTK_HAL_REQUEST_AF_TRIGGER_START, // 4cell //MUINT8 + MTK_HAL_REQUEST_IMG_IMGO_FORMAT, //MINT32 + MTK_HAL_REQUEST_IMG_RRZO_FORMAT, //MINT32 + MTK_HAL_REQUEST_INDEX, //MINT32 + MTK_HAL_REQUEST_COUNT, //MINT32 + MTK_HAL_REQUEST_SMVR_FPS, //MUINT8 // 0: NOT batch request + MTK_HAL_REQUEST_REMOSAIC_ENABLE, //MUINT8 // 0: preview mode 1: capture mode + MTK_HAL_REQUEST_INDEX_BSS, //MINT32 + MTK_HAL_REQUEST_ZSD_CAPTURE_INTENT, //MUINT8 + MTK_HAL_REQUEST_REAL_CAPTURE_SIZE, //MSize + MTK_HAL_REQUEST_VIDEO_SIZE, //MSize + MTK_HAL_REQUEST_RAW_IMAGE_INFO, //MINT32 // index[0]: raw fmt, index[1]: raw stride, index[2]: raw size(width), index[3]: raw size(height) + MTK_HAL_REQUEST_ISP_PIPELINE_MODE, //MINT32 + MTK_P1NODE_SCALAR_CROP_REGION = MTK_P1NODE_START, //MRect + MTK_P1NODE_BIN_CROP_REGION, //MRect + MTK_P1NODE_DMA_CROP_REGION, //MRect + MTK_P1NODE_BIN_SIZE, //MSize + MTK_P1NODE_RESIZER_SIZE, //MSize + MTK_P1NODE_RESIZER_SET_SIZE, //MSize + MTK_P1NODE_CTRL_RESIZE_FLUSH, //MBOOL + MTK_P1NODE_CTRL_READOUT_FLUSH, //MBOOL + MTK_P1NODE_CTRL_RECONFIG_SENSOR_SETTING, //MBOOL + MTK_P1NODE_PROCESSOR_MAGICNUM, //MINT32 + MTK_P1NODE_MIN_FRM_DURATION, //MINT64 + MTK_P1NODE_RAW_TYPE, //MINT32 + MTK_P1NODE_SENSOR_CROP_REGION, //MRect + MTK_P1NODE_YUV_RESIZER1_CROP_REGION, //MRect + MTK_P1NODE_YUV_RESIZER2_CROP_REGION, //MRect + MTK_P1NODE_YUV_RESIZER1_SIZE, //MSize + MTK_P1NODE_SENSOR_MODE, //MINT32 + MTK_P1NODE_SENSOR_VHDR_MODE, //MINT32 + MTK_P1NODE_METADATA_TAG_INDEX, //MINT32 + MTK_P1NODE_RSS_SIZE, //MSize + MTK_P1NODE_SENSOR_STATUS, //MINT32 + MTK_P1NODE_SENSOR_RAW_ORDER, //MINT32 + MTK_P1NODE_TWIN_SWITCH, //MINT32 + MTK_P1NODE_TWIN_STATUS, //MINT32 + MTK_P1NODE_RESIZE_QUALITY_SWITCH, //MINT32 + MTK_P1NODE_RESIZE_QUALITY_STATUS, //MINT32 + MTK_P1NODE_RESIZE_QUALITY_LEVEL, //MINT32 + MTK_P1NODE_RESIZE_QUALITY_SWITCHING, //MBOOL + MTK_P1NODE_RESUME_SHUTTER_TIME_US, //MINT32 + MTK_P1NODE_FRAME_START_TIMESTAMP, //MINT64 + MTK_P1NODE_FRAME_START_TIMESTAMP_BOOT, //MINT64 + MTK_P1NODE_REQUEST_PROCESSED_WITHOUT_WB, //MBOOL + MTK_P1NODE_ISNEED_GMV, //MBOOL + MTK_P2NODE_HIGH_SPEED_VDO_FPS = MTK_P2NODE_START, //MINT32 + MTK_P2NODE_HIGH_SPEED_VDO_SIZE, //MSize + MTK_P2NODE_CTRL_CALTM_ENABLE, //MBOOL + MTK_P2NODE_FD_CROP_REGION, //MRect + MTK_P2NODE_CROP_REGION, //MRect // for removing black edge + MTK_P2NODE_DSDN_ENABLE, //MBOOL // for DSDN on/off controled by Policy + MTK_P2NODE_SENSOR_CROP_REGION, //MRect + MTK_3A_AE_HIGH_ISO_BINNING, //MBOOL // for 3HDR high iso binning mode + MTK_SENSOR_SCALER_CROP_REGION, //MRect + MTK_PROCESSOR_CAMINFO = MTK_3A_TUNINING_START, //IMemory + MTK_ISP_ATMS_MAPPING_INFO, //IMemory + MTK_3A_ISP_PROFILE, //MUINT8 + MTK_3A_ISP_P1_PROFILE, //MUINT8 + MTK_CAMINFO_LCSOUT_INFO, //IMemory + MTK_3A_ISP_BYPASS_LCE, //MBOOL + MTK_3A_ISP_DISABLE_NR, //MBOOL + MTK_3A_ISP_NR3D_SW_PARAMS, //MINT32[14] //GMVX, GMVY, confX, confY, MAX_GMV, frameReset, GMV_Status,ISO_cutoff + MTK_3A_ISP_NR3D_HW_PARAMS, //IMemory + MTK_3A_ISP_LCE_GAIN, //MINT32, bits[0:15]: LCE gain, bits[16:31]: LCE gain confidence ratio (0-100) + MTK_3A_ISP_FUS_NUM, //MINT32 + MTK_3A_AE_CAP_PARAM, //IMemory + MTK_3A_AE_CAP_SINGLE_FRAME_HDR, //MUINT8 + MTK_3A_AE_BV_TRIGGER, //MBOOL + MTK_3A_AF_LENS_POSITION, //MINT32 + MTK_3A_FLICKER_RESULT, //MINT32 + MTK_3A_DUMMY_BEFORE_REQUEST_FRAME, //MBOOL // Dummy frame before capture, only for capture intent, preview don't use + MTK_3A_DUMMY_AFTER_REQUEST_FRAME, //MBOOL // Dummy frame after capture, only for capture intent, preview don't use + MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MAX, //MINT32 + MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MIN, //MINT32 + MTK_3A_MANUAL_AWB_COLORTEMPERATURE, //MINT32 + MTK_3A_HDR_MODE, //MUINT8 + MTK_3A_AE_HDR_MIXED_ISO, //MUINT32 + MTK_3A_AE_ZSL_STABLE, //MINT32 ( MBOOL ) + MTK_3A_PGN_ENABLE, //MUINT8 + MTK_3A_SKIP_HIGH_QUALITY_CAPTURE, //MUINT8 + MTK_3A_AI_SHUTTER, //MBOOL + MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, //MINT32 + MTK_3A_FEATURE_AE_TARGET_MODE, //MINT32 + MTK_3A_OPEN_ID, //MINT32 + MTK_LSC_TBL_DATA, //IMemory + MTK_LSC_TSF_DATA, //IMemory + MTK_LSC_TSF_DUMP_NO, //IMemory + MTK_ISP_P2_ORIGINAL_SIZE, //MSize + MTK_ISP_P2_CROP_REGION, //MRect + MTK_ISP_P2_RESIZER_SIZE, //MSize + MTK_ISP_P2_IN_IMG_FMT, //MINT32, 0 or not exist: RAW->YUV, 1: YUV->YUV + MTK_ISP_P2_TUNING_UPDATE_MODE, //MUINT8, [0 or not exist]: as default; [1]: keep existed parameters but some parts will be updated; [2]: keep all existed parameters (force mode) [3] LPCNR Pass1 [4] LPCNR Pass2 + MTK_ISP_P2_IN_IMG_RES_REVISED, //MINT32, describes P2 input image revised resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist. + MTK_ISP_APP_TARGET_SIZE, //MINT32, describes APP Target resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist. + MTK_MSF_SCALE_INDEX, //MINT32, which scale stage index, would only exist with scaling flow + MTK_MSF_FRAME_NUM, //MINT32, After BSS which frame number is this stage using + MTK_TOTAL_MULTI_FRAME_NUM, //MINT32, MSYUV fuction used this input to know frame nunber + MTK_TOTAL_MULTI_FRAME_NUM_CAPTURED, //MINT32, MSF function used + MTK_SW_DSDN_VERSION, //MINT32, distinguish different dsdn version + MTK_ISP_COLOR_SPACE, //MINT32 + MTK_ISP_DRC_CURVE, //IMemory + MTK_ISP_DRC_CURVE_SIZE, //MINT32 + MTK_ISP_FEO_DATA, //IMemory + MTK_ISP_FEO_ENABLE, //MINT32 + MTK_ISP_FEO_INFO, //IMemory + MTK_ISP_HLR_RATIO, //MINT32, which is a HDR ratio applied in HLR + MTK_ISP_STAGE, //MINT32 + MTK_FOCUS_AREA_POSITION, //MINT32 + MTK_FOCUS_AREA_SIZE, //MSize + MTK_FOCUS_AREA_RESULT, //MUINT8 + MTK_FOCUS_PAUSE, //MUINT8 + MTK_FOCUS_MZ_ON, //MUINT8 + MTK_3A_AF_FOCUS_VALUE, //MINT64 + MTK_3A_PRV_CROP_REGION, //MRect + MTK_3A_ISP_MDP_TARGET_SIZE, //MSize + MTK_3A_REPEAT_RESULT, //MUINT8 + MTK_3A_SKIP_PRECAPTURE, //MBOOL //if CUST_ENABLE_FLASH_DURING_TOUCH is true, MW can skip precapture + MTK_3A_SKIP_BAD_FRAME, //MBOOL + MTK_3A_FLARE_IN_MANUAL_CTRL_ENABLE, //MBOOL + MTK_3A_DYNAMIC_SUBSAMPLE_COUNT, //MINT32 30fps = 1, 60fps = 2, ... , 120fps = 4 + MTK_3A_AE_LV_VALUE, //MINT32 + MTK_APP_CONTROL, //MINT32 + MTK_3A_CUST_PARAMS, //IMemory + MTK_3A_SETTING_CUST_PARAMS, //IMemory + MTK_3A_PERFRAME_INFO, //IMemory + MTK_SENSOR_MODE_INFO_ACTIVE_ARRAY_CROP_REGION, //MRect + MTK_3A_AE_BV, //MINT32 + MTK_3A_AE_CWV, //MINT32 + MTK_ISP_P2_PROCESSED_RAW, //MINT32 + MTK_3A_EXIF_METADATA = MTK_3A_EXIF_START, //IMetadata + MTK_EIS_REGION = MTK_EIS_START, //MINT32 + MTK_EIS_INFO, //MINT64 + MTK_EIS_VIDEO_SIZE, //MRect + MTK_EIS_NEED_OVERRIDE_TIMESTAMP, //MBOOL + MTK_EIS_LMV_DATA, //IMemory + MTK_STEREO_JPS_MAIN1_CROP = MTK_STEREO_START, //MRect + MTK_STEREO_JPS_MAIN2_CROP, //MRect + MTK_STEREO_SYNC2A_MODE, //MINT32 + MTK_STEREO_SYNCAF_MODE, //MINT32 + MTK_STEREO_HW_FRM_SYNC_MODE, //MINT32 + MTK_STEREO_NOTIFY, //MINT32 + MTK_STEREO_SYNC2A_MASTER_SLAVE, //MINT32[2] + MTK_STEREO_SYNC2A_STATUS, //IMemory + MTK_JPG_ENCODE_TYPE, //MINT8 + MTK_CONVERGENCE_DEPTH_OFFSET, //MFLOAT + MTK_N3D_WARPING_MATRIX_SIZE, //MUINT32 + MTK_P1NODE_MAIN2_HAL_META, //IMetadata + MTK_P2NODE_BOKEH_ISP_PROFILE, //MUINT8 + MTK_STEREO_FEATURE_DENOISE_MODE, //MINT32 + MTK_STEREO_FEATURE_SENSOR_PROFILE, //MINT32 + MTK_P1NODE_MAIN2_APP_META, //IMetadata + MTK_STEREO_FEATURE_OPEN_ID, //MINT32 + MTK_STEREO_FRAME_PER_CAPTURE, //MINT32 + MTK_STEREO_ENABLE_MFB, //MINT32 + MTK_STEREO_BSS_RESULT, //MINT32 + MTK_STEREO_FEATURE_FOV_CROP_REGION, //MINT32[6] // p.x, p.y, p.w, p.h, srcW, srcH + MTK_STEREO_DCMF_FEATURE_MODE, //MINT32 // mtk_platform_metadata_enum_dcmf_feature_mode + MTK_STEREO_HDR_EV, //MINT32 + MTK_STEREO_DELAY_FRAME_COUNT, //MINT32 + MTK_STEREO_DCMF_DEPTHMAP_SIZE, //MSize + MTK_STEREO_WITH_CAMSV, //MBOOL + MTK_FRAMESYNC_ID = MTK_FRAMESYNC_START, //MINT32 + MTK_FRAMESYNC_TOLERANCE, //MINT64 + MTK_FRAMESYNC_FAILHANDLE, //MINT32 + MTK_FRAMESYNC_RESULT, //MINT64 + MTK_FRAMESYNC_TYPE, //MINT32 + MTK_FRAMESYNC_MODE, //MUINT8 + MTK_VHDR_LCEI_DATA = MTK_VHDR_START, //Memory + MTK_VHDR_IMGO_3A_ISP_PROFILE, //MUINT8 + MTK_HDR_FEATURE_HDR_HAL_MODE, + MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM, + MTK_VHDR_MULTIFRAME_TIMESTAMP, //MINT64 + MTK_VHDR_MULTIFRAME_EXPOSURE_TIME, //MINT64 + MTK_PIPELINE_UNIQUE_KEY = MTK_PIPELINE_START, //MINT32 + MTK_PIPELINE_FRAME_NUMBER, //MINT32 + MTK_PIPELINE_REQUEST_NUMBER, //MINT32 + MTK_PIPELINE_EV_VALUE, //MINT32 + MTK_PIPELINE_DUMP_UNIQUE_KEY, //MINT32 + MTK_PIPELINE_DUMP_FRAME_NUMBER, //MINT32 + MTK_PIPELINE_DUMP_REQUEST_NUMBER, //MINT32 + MTK_PIPELINE_VIDEO_RECORD, //MINT32 + MTK_NR_MODE = MTK_NR_START, //MINT32 + MTK_NR_MNR_THRESHOLD_ISO, //MINT32 + MTK_NR_SWNR_THRESHOLD_ISO, //MINT32 + MTK_REAL_LV, //MINT32 + MTK_ANALOG_GAIN, //MUINT32 + MTK_AWB_RGAIN, //MINT32 + MTK_AWB_GGAIN, //MINT32 + MTK_AWB_BGAIN, //MINT32 + MTK_PLUGIN_MODE = MTK_PLUGIN_START, //MINT64 + MTK_PLUGIN_COMBINATION_KEY, //MINT64 + MTK_PLUGIN_P2_COMBINATION, //MINT64 + MTK_PLUGIN_PROCESSED_FRAME_COUNT, //MINT32 + MTK_PLUGIN_CUSTOM_HINT, //MINT32 + MTK_PLUGIN_DETACT_JOB_SYNC_TOKEN, //MINT64, may be not exists. + MTK_PLUGIN_UNIQUEKEY, + MTK_DUALZOOM_DROP_REQ = MTK_DUALZOOM_START, //MINT32 + MTK_DUALZOOM_FORCE_ENABLE_P2, //MINT32 + MTK_DUALZOOM_DO_FRAME_SYNC, //MINT32 + MTK_DUALZOOM_ZOOM_FACTOR, //MINT32 + MTK_DUALZOOM_DO_FOV, //MINT32 + MTK_DUALZOOM_FOV_RECT_INFO, //MINT32 + MTK_DUALZOOM_FOV_CALB_INFO, //MINT32 + MTK_DUALZOOM_FOV_MARGIN_PIXEL, //MSize + MTK_DUALCAM_AF_STATE, //MUINT8 + MTK_DUALCAM_LENS_STATE, //MUINT8 + MTK_DUALCAM_TIMESTAMP, //MINT64 + MTK_DUALZOOM_3DNR_MODE, //MINT32 + MTK_DUALZOOM_ZOOMRATIO, //MINT32 + MTK_DUALZOOM_CENTER_SHIFT, //MINT32 + MTK_DUALZOOM_FOV_RATIO, //MFLOAT + MTK_DUALZOOM_REAL_MASTER, //MINT32 + MTK_DUALZOOM_FD_TARGET_MASTER, //MINT32 + MTK_DUALZOOM_FD_REAL_MASTER, //MINT32 // maybe not set + MTK_LMV_SEND_SWITCH_OUT, //MINT32 + MTK_LMV_SWITCH_OUT_RESULT, //MINT32 + MTK_LMV_VALIDITY, //MINT32 + MTK_VSDOF_P1_MAIN1_ISO, //MINT32 + MTK_DUALZOOM_IS_STANDBY, //MBOOL + MTK_DUALZOOM_CAP_CROP, //MRect + MTK_DUALZOOM_MASTER_UPDATE_MODE, //MBOOL + MTK_DUALZOOM_STREAMING_NR, //MINT32 + MTK_FEATUREPIPE_APP_MODE = MTK_FEATUREPIPE_START, //MINT32 + MTK_POSTPROC_TYPE = MTK_POSTPROC_START, //MINT32 + MTK_FEATURE_STREAMING = MTK_FEATURE_START, //MINT64 + MTK_FEATURE_CAPTURE, //MINT64 + MTK_FEATURE_CAPTURE_PHYSICAL, //MINT64 + MTK_FEATURE_FREE_MEMORY_MBYTE, //MINT32 + MTK_FEATURE_MFNR_NVRAM_QUERY_INDEX, //MINT32 + MTK_FEATURE_MFNR_NVRAM_DECISION_ISO, //MINT32 + MTK_FEATURE_MFNR_TUNING_INDEX_HINT, //MINT64 + MTK_FEATURE_MFNR_FINAL_EXP, //MINT32 + MTK_FEATURE_MFNR_OPEN_ID, //MINT32 + MTK_FEATURE_AINR_MDLA_MODE, //MINT32 + MTK_ISP_AINR_MDLA_MODE, //MINT32 + MTK_ISP_LTM_BIT_MODE, //MINT32 + MTK_FEATURE_BSS_SELECTED_FRAME_COUNT, //MINT32 + MTK_FEATURE_BSS_FORCE_DROP_NUM, //MINT32 + MTK_FEATURE_BSS_FIXED_LSC_TBL_DATA, //MUINT8 + MTK_FEATURE_BSS_PROCESS, //MINT32 + MTK_FEATURE_BSS_ISGOLDEN, //MBOOL + MTK_FEATURE_BSS_REORDER, //MBOOL + MTK_FEATURE_BSS_MANUAL_ORDER, //MUINT8 + MTK_FEATURE_BSS_RRZO_DATA, //MUINT8 + MTK_FEATURE_BSS_DOWNSAMPLE, //MBOOL + MTK_FEATURE_PACK_RRZO, //MUINT8 + MTK_FEATURE_FACE_RECTANGLES, //MRect array + MTK_FEATURE_FACE_POSE_ORIENTATIONS, //MINT32[n*3] array, each struct include: xAsix, yAsix, zAsix + MTK_FEATURE_CAP_YUV_PROCESSING, //MUINT8 + MTK_FEATURE_CAP_PIPE_DCE_CONTROL, //MUINT8 + MTK_FEATURE_MULTIFRAMENODE_BYPASSED, //MUINT8 + MTK_FEATURE_FACE_APPLIED_GAMMA, //MINT32 + MTK_FEATURE_CAP_PQ_USERID, //MINT64 + MTK_FEATURE_FLIP_IN_P2A, //MINT32 + MTK_FSC_CROP_DATA = MTK_FSC_START, //IMemory + MTK_FSC_WARP_DATA, //IMemory + MTK_STAGGER_ME_META, //IMetadata + MTK_STAGGER_SE_META, //IMetadata + MTK_STAGGER_BLOB_IMGO_ORDER //MUINT8 +} mtk_platform_metadata_tag_t; + + +/****************************************************************************** + * + ******************************************************************************/ +typedef enum mtk_platform_3a_exif_metadata_tag { + MTK_3A_EXIF_FNUMBER, //MINT32 + MTK_3A_EXIF_FOCAL_LENGTH, //MINT32 + MTK_3A_EXIF_FOCAL_LENGTH_35MM, //MINT32 + MTK_3A_EXIF_SCENE_MODE, //MINT32 + MTK_3A_EXIF_AWB_MODE, //MINT32 + MTK_3A_EXIF_LIGHT_SOURCE, //MINT32 + MTK_3A_EXIF_EXP_PROGRAM, //MINT32 + MTK_3A_EXIF_SCENE_CAP_TYPE, //MINT32 + MTK_3A_EXIF_FLASH_LIGHT_TIME_US, //MINT32 + MTK_3A_EXIF_AE_METER_MODE, //MINT32 + MTK_3A_EXIF_AE_EXP_BIAS, //MINT32 + MTK_3A_EXIF_CAP_EXPOSURE_TIME, //MINT32 + MTK_3A_EXIF_AE_ISO_SPEED, //MINT32 + MTK_3A_EXIF_REAL_ISO_VALUE, //MINT32 + MTK_3A_EXIF_AE_BRIGHTNESS_VALUE, //MINT32 + MTK_3A_EXIF_FLASH_FIRING_STATUS, //MINT32 + MTK_3A_EXIF_FLASH_RETURN_DETECTION, //MINT32 + MTK_3A_EXIF_FLASH_MODE, //MINT32 + MTK_3A_EXIF_FLASH_FUNCTION, //MINT32 + MTK_3A_EXIF_FLASH_REDEYE, //MINT32 + MTK_3A_EXIF_DEBUGINFO_BEGIN, // debug info begin + // key: MINT32 + MTK_3A_EXIF_DBGINFO_AAA_KEY = MTK_3A_EXIF_DEBUGINFO_BEGIN, //MINT32 + MTK_3A_EXIF_DBGINFO_AAA_DATA, + MTK_3A_EXIF_DBGINFO_SDINFO_KEY, + MTK_3A_EXIF_DBGINFO_SDINFO_DATA, + MTK_3A_EXIF_DBGINFO_ISP_KEY, + MTK_3A_EXIF_DBGINFO_ISP_DATA, + // + MTK_CMN_EXIF_DBGINFO_KEY, + MTK_CMN_EXIF_DBGINFO_DATA, + // + MTK_MF_EXIF_DBGINFO_MF_KEY, + MTK_MF_EXIF_DBGINFO_MF_DATA, + // + MTK_N3D_EXIF_DBGINFO_KEY, + MTK_N3D_EXIF_DBGINFO_DATA, + // + MTK_POSTNR_EXIF_DBGINFO_NR_KEY, + MTK_POSTNR_EXIF_DBGINFO_NR_DATA, + // + MTK_RESVB_EXIF_DBGINFO_KEY, + MTK_RESVB_EXIF_DBGINFO_DATA, + // + MTK_RESVC_EXIF_DBGINFO_KEY, + MTK_RESVC_EXIF_DBGINFO_DATA, + // data: Memory + MTK_3A_EXIF_DEBUGINFO_END, // debug info end +} mtk_platform_3a_exif_metadata_tag_t; + +// MTK_3A_FEATURE_AE_EXPOSURE_LEVEL +typedef enum mtk_camera_metadata_enum_ae_exposure_level { + MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NONE = 0, + MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_SHORT, + MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NORMAL, + MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG, +} mtk_camera_metadata_enum_ae_exposure_level_t; + +// MTK_3A_FEATURE_AE_TARGET_MODE +typedef enum mtk_camera_metadata_enum_ae_target_mode { + MTK_3A_FEATURE_AE_TARGET_MODE_NORMAL = 0, + MTK_3A_FEATURE_AE_TARGET_MODE_IVHDR, + MTK_3A_FEATURE_AE_TARGET_MODE_MVHDR, + MTK_3A_FEATURE_AE_TARGET_MODE_ZVHDR, + MTK_3A_FEATURE_AE_TARGET_MODE_LE_FIX, + MTK_3A_FEATURE_AE_TARGET_MODE_SE_FIX, + MTK_3A_FEATURE_AE_TARGET_MODE_4CELL_MVHDR, + MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR, + MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR_RTO1X, + MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_2EXP, + MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_3EXP, +} mtk_camera_metadata_enum_ae_target_mode_t; + +//MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM +typedef enum mtk_camera_metadata_enum_stagger_valid_exposure_num { + MTK_STAGGER_VALID_EXPOSURE_NON = 0, + MTK_STAGGER_VALID_EXPOSURE_1 = 1, + MTK_STAGGER_VALID_EXPOSURE_2 = 2, + MTK_STAGGER_VALID_EXPOSURE_3 = 3 +} mtk_camera_metadata_enum_stagger_valid_exposure_num_t; + +//MTK_3A_ISP_FUS_NUM +typedef enum mtk_camera_metadata_enum_3a_isp_fus_num { + MTK_3A_ISP_FUS_NUM_NON = 0, + MTK_3A_ISP_FUS_NUM_1 = 1, + MTK_3A_ISP_FUS_NUM_2 = 2, + MTK_3A_ISP_FUS_NUM_3 = 3, +} mtk_camera_metadata_enum_3a_isp_fus_num_t; + +/****************************************************************************** + * + ******************************************************************************/ +typedef enum mtk_platform_metadata_enum_nr_mode { + MTK_NR_MODE_OFF = 0, + MTK_NR_MODE_MNR, + MTK_NR_MODE_SWNR, + MTK_NR_MODE_AUTO +} mtk_platform_metadata_enum_nr_mode_t; + +typedef enum mtk_platform_metadata_enum_mfb_mode { + MTK_MFB_MODE_OFF = 0, + MTK_MFB_MODE_MFLL, + MTK_MFB_MODE_AIS, + MTK_MFB_MODE_NUM, +} mtk_platform_metadata_enum_mfb_mode_t; + +typedef enum mtk_platform_metadata_enum_custom_hint { + MTK_CUSTOM_HINT_0 = 0, + MTK_CUSTOM_HINT_1, + MTK_CUSTOM_HINT_2, + MTK_CUSTOM_HINT_3, + MTK_CUSTOM_HINT_4, + MTK_CUSTOM_HINT_NUM, +} mtk_platform_metadata_enum_custom_hint_t; + +typedef enum mtk_platform_metadata_enum_plugin_mode { + MTK_PLUGIN_MODE_COMBINATION = 1 << 0, + MTK_PLUGIN_MODE_NR = 1 << 1, + MTK_PLUGIN_MODE_HDR = 1 << 2, + MTK_PLUGIN_MODE_MFNR = 1 << 3, + MTK_PLUGIN_MODE_COPY = 1 << 4, + MTK_PLUGIN_MODE_TEST_PRV = 1 << 5, + MTK_PLUGIN_MODE_BMDN = 1 << 6, + MTK_PLUGIN_MODE_MFHR = 1 << 7, + MTK_PLUGIN_MODE_BMDN_3rdParty = 1 << 8, + MTK_PLUGIN_MODE_MFHR_3rdParty = 1 << 9, + MTK_PLUGIN_MODE_FUSION_3rdParty = 1 << 10, + MTK_PLUGIN_MODE_VSDOF_3rdParty = 1 << 11, + MTK_PLUGIN_MODE_COLLECT = 1 << 12, + MTK_PLUGIN_MODE_HDR_3RD_PARTY = 1 << 13, + MTK_PLUGIN_MODE_MFNR_3RD_PARTY = 1 << 14, + MTK_PLUGIN_MODE_BOKEH_3RD_PARTY = 1 << 15, + MTK_PLUGIN_MODE_DCMF_3RD_PARTY = 1 << 16, +} mtk_platform_metadata_enum_plugin_mode_t; + +typedef enum mtk_platform_metadata_enum_p2_plugin_combination { + MTK_P2_RAW_PROCESSOR = 1 << 0, + MTK_P2_ISP_PROCESSOR = 1 << 1, + MTK_P2_YUV_PROCESSOR = 1 << 2, + MTK_P2_MDP_PROCESSOR = 1 << 3, + MTK_P2_CAPTURE_REQUEST = 1 << 4, + MTK_P2_PREVIEW_REQUEST = 1 << 5 +} mtk_platform_metadata_enum_p2_plugin_combination; + +typedef enum mtk_platform_metadata_enum_isp_color_space { + MTK_ISP_COLOR_SPACE_SRGB = 0 , + MTK_ISP_COLOR_SPACE_DISPLAY_P3 = 1 , + MTK_ISP_COLOR_SPACE_CUSTOM_1 = 2 +} mtk_platform_metadata_enum_isp_color_space; + +typedef enum mtk_platform_metadata_enum_dualzoom_drop_req { + MTK_DUALZOOM_DROP_NEVER_DROP = 0, + MTK_DUALZOOM_DROP_NONE = 1, + MTK_DUALZOOM_DROP_DIRECTLY = 2, + MTK_DUALZOOM_DROP_NEED_P1, + MTK_DUALZOOM_DROP_NEED_SYNCMGR, + MTK_DUALZOOM_DROP_NEED_SYNCMGR_NEED_STREAM_F_PIPE, +} mtk_platform_metadata_enum_dualzoom_drop_req_t; + +typedef enum mtk_platform_metadata_enum_p1_sensor_status { + MTK_P1_SENSOR_STATUS_NONE = 0, + MTK_P1_SENSOR_STATUS_STREAMING = 1, + MTK_P1_SENSOR_STATUS_SW_STANDBY = 2, + MTK_P1_SENSOR_STATUS_HW_STANDBY = 3, +} mtk_platform_metadata_enum_p1_sensor_status_t; + +typedef enum mtk_platform_metadata_enum_p1_twin_switch { + MTK_P1_TWIN_SWITCH_NONE = 0, + MTK_P1_TWIN_SWITCH_ONE_TG = 1, + MTK_P1_TWIN_SWITCH_TWO_TG = 2 +} mtk_platform_metadata_enum_p1_twin_switch_t; + +typedef enum mtk_platform_metadata_enum_p1_twin_status { + MTK_P1_TWIN_STATUS_NONE = 0, + MTK_P1_TWIN_STATUS_TG_MODE_1 = 1, + MTK_P1_TWIN_STATUS_TG_MODE_2 = 2, + MTK_P1_TWIN_STATUS_TG_MODE_3 = 3, +} mtk_platform_metadata_enum_p1_twin_status_t; + +typedef enum mtk_platform_metadata_enum_p1_resize_quality_switch { + MTK_P1_RESIZE_QUALITY_SWITCH_NONE = 0, + MTK_P1_RESIZE_QUALITY_SWITCH_L_L = 1, + MTK_P1_RESIZE_QUALITY_SWITCH_L_H = 2, + MTK_P1_RESIZE_QUALITY_SWITCH_H_L = 3, + MTK_P1_RESIZE_QUALITY_SWITCH_H_H = 4, +} mtk_platform_metadata_enum_p1_resize_quality_switch_t; + +typedef enum mtk_platform_metadata_enum_p1_resize_quality_status { + MTK_P1_RESIZE_QUALITY_STATUS_NONE = 0, + MTK_P1_RESIZE_QUALITY_STATUS_ACCEPT = 1, + MTK_P1_RESIZE_QUALITY_STATUS_IGNORE = 2, + MTK_P1_RESIZE_QUALITY_STATUS_REJECT = 3, + MTK_P1_RESIZE_QUALITY_STATUS_ILLEGAL = 4, +} mtk_platform_metadata_enum_p1_resize_quality_status_t; + +typedef enum mtk_platform_metadata_enum_p1_resize_quality_level { + MTK_P1_RESIZE_QUALITY_LEVEL_UNKNOWN = 0, + MTK_P1_RESIZE_QUALITY_LEVEL_L = 1, + MTK_P1_RESIZE_QUALITY_LEVEL_H = 2, +} mtk_platform_metadata_enum_p1_resize_quality_level_t; + +typedef enum mtk_platform_metadata_enum_lmv_result { + MTK_LMV_RESULT_OK = 0, + MTK_LMV_RESULT_FAILED, + MTK_LMV_RESULT_SWITCHING +} mtk_platform_metadata_enum_lmv_result_t; + +typedef enum mtk_platform_metadata_enum_featurepipe_app_mode { + MTK_FEATUREPIPE_PHOTO_PREVIEW = 0, + MTK_FEATUREPIPE_VIDEO_PREVIEW = 1, + MTK_FEATUREPIPE_VIDEO_RECORD = 2, + MTK_FEATUREPIPE_VIDEO_STOP = 3, +} mtk_platform_metadata_enum_featurepipe_app_mode_t; + +typedef enum mtk_platform_metadata_enum_dcmf_feature_mode { + MTK_DCMF_FEATURE_BOKEH = 0, + MTK_DCMF_FEATURE_MFNR_BOKEH = 1, + MTK_DCMF_FEATURE_HDR_BOKEH = 2, +} mtk_platform_metadata_enum_dcmf_feature_mode_t; + +typedef enum mtk_platform_metadata_enum_smvr_fps { + MTK_SMVR_FPS_30 = 0, + MTK_SMVR_FPS_120 = 1, + MTK_SMVR_FPS_240 = 2, + MTK_SMVR_FPS_480 = 3, + MTK_SMVR_FPS_960 = 4, +} mtk_platform_metadata_enum_smvr_fps_t; + +//MTK_FRAMESYNC_FAILHANDLE +typedef enum mtk_platform_metadata_enum_fremesync_failhandle { + MTK_FRAMESYNC_FAILHANDLE_CONTINUE, + MTK_FRAMESYNC_FAILHANDLE_DROP, +} mtk_platform_metadata_enum_fremesync_failhandle_t; + +//MTK_FRAMESYNC_RESULT +typedef enum mtk_platform_metadata_enum_fremesync_result { + MTK_FRAMESYNC_RESULT_PASS, + MTK_FRAMESYNC_RESULT_FAIL_CONTINUE, + MTK_FRAMESYNC_RESULT_FAIL_DROP, +} mtk_platform_metadata_enum_fremesync_result_t; + +//MTK_FRAMESYNC_MODE +typedef enum mtk_platform_metadata_enum_fremesync_mode { + MTK_FRAMESYNC_MODE_VSYNC_ALIGNMENT, + MTK_FRAMESYNC_MODE_READOUT_CENTER_ALIGNMENT, +} mtk_platform_metadata_enum_fremesync_mode_t; + +//MTK_FEATURE_MULTIFRAMENODE_BYPASSED +typedef enum mtk_platform_metadata_enum_multiframenode_bypassed { + MTK_FEATURE_MULTIFRAMENODE_NOT_BYPASSED = 0, + MTK_FEATURE_MULTIFRAMENODE_TO_BE_BYPASSED = 1 +} mtk_platform_metadata_enum_mfllnode_bypassed_t; + +//MTK_FEATURE_BSS_PROCESS +typedef enum mtk_platform_metadata_enum_bss_processing { + MTK_FEATURE_BSS_PROCESS_ENABLE = 0, + MTK_FEATURE_BSS_PROCESS_DISABLE = 1 +} mtk_platform_metadata_enum_bss_processing_t; + +//MTK_FEATURE_BSS_MANUAL_ORDER +typedef enum mtk_platform_metadata_enum_bss_manual_order { + MTK_FEATURE_BSS_MANUAL_ORDER_OFF = 0, + MTK_FEATURE_BSS_MANUAL_ORDER_GOLDEN = 1 +} mtk_platform_metadata_enum_bss_manual_order_t; + +//MTK_FEATURE_CAP_YUV_PROCESSING +typedef enum mtk_platform_metadata_enum_cap_yuv_processing { + MTK_FEATURE_CAP_YUV_PROCESSING_NOT_NEEDED = 0, + MTK_FEATURE_CAP_YUV_PROCESSING_NEEDED = 1 +} mtk_platform_metadata_enum_cap_yuv_processing_t; + +//MTK_FEATURE_CAP_PIPE_DCE_CONTROL +typedef enum mtk_platform_metadata_enum_cap_pipe_control { + MTK_FEATURE_CAP_PIPE_DCE_ENABLE_BUT_NOT_APPLY = 2, + MTK_FEATURE_CAP_PIPE_DCE_MANUAL_DISABLE = 1, + MTK_FEATURE_CAP_PIPE_DCE_DEFAULT_APPLY = 0 +} mtk_platform_metadata_enum_cap_pipe_dce_control_t; + +// MTK_FEATURE_AINR_MDLA_MODE, MTK_ISP_AINR_MDLA_MODE +typedef enum mtk_platform_metadata_enum_ainr_mdla_mode { + MTK_FEATURE_AINR_MDLA_MODE_NONE = 0, + MTK_FEATURE_AINR_MDLA_MODE_DRCOUT_16BIT = 1, + MTK_FEATURE_AINR_MDLA_MODE_NNOUT_12BIT = 2, + MTK_FEATURE_AINR_MDLA_MODE_NNOUT_16BIT = 3, +} mtk_platform_metadata_enum_ainr_mdla_mode_t; + +//MTK_ISP_P2_PROCESSED_RAW +typedef enum mtk_platform_metadata_enum_p2_processed_raw { + MTK_ISP_P2_PROCESSED_RAW_NOT_NEEDED = 0, + MTK_ISP_P2_PROCESSED_RAW_NEEDED = 1 +} mtk_platform_metadata_enum_p2_processed_raw_t; + +//MTK_DUALZOOM_STREAMING_NR +typedef enum mtk_platform_metadata_enum_dualzoom_streaming_nr { + MTK_DUALZOOM_STREAMING_NR_AUTO = 0, + MTK_DUALZOOM_STREAMING_NR_OFF = 1 +} mtk_platform_metadata_enum_dualzoom_streaming_nr_t; + +//MTK_STAGGER_BLOB_IMGO_ORDER +typedef enum mtk_platform_metadata_enum_stagger_blob_imgo_order { + MTK_STAGGER_IMGO_NONE = 0, + MTK_STAGGER_IMGO_NE = 1, + MTK_STAGGER_IMGO_ME = 2, + MTK_STAGGER_IMGO_SE = 3 +} mtk_platform_metadata_enum_stagger_blob_imgo_order_t; + +//MTK_3A_EXIF_FLASH_FIRING_STATUS +typedef enum mtk_platform_metadata_enum_3a_exif_flash_firing_status_t { + MTK_3A_EXIF_FLASH_FIRING_STATUS_NOT_FIRED = 0, + MTK_3A_EXIF_FLASH_FIRING_STATUS_FIRED = 1, +} mtk_platform_metadata_enum_3a_exif_flash_firing_status_t; + +//MTK_3A_EXIF_FLASH_RETURN_DETECTION +typedef enum mtk_platform_metadata_enum_3a_exif_flash_return_detection_t { + MTK_3A_EXIF_FLASH_RETURN_DETECTION_NOT_SUPPORT = 0, + MTK_3A_EXIF_FLASH_RETURN_DETECTION_RESERVED = 1, + MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_NOT_DETECTED = 2, + MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_DETECTED = 3, +} mtk_platform_metadata_enum_3a_exif_flash_return_detection_t; + +//MTK_3A_EXIF_FLASH_MODE +typedef enum mtk_platform_metadata_enum_3a_exif_flash_mode_t { + MTK_3A_EXIF_FLASH_MODE_UNKNOWN = 0, + MTK_3A_EXIF_FLASH_MODE_COMPULSORY_FIRING = 1, + MTK_3A_EXIF_FLASH_MODE_COMPULSORY_SUPPRESSION = 2, + MTK_3A_EXIF_FLASH_MODE_AUTO = 3, +} mtk_platform_metadata_enum_3a_exif_flash_mode_t; + +//MTK_3A_EXIF_FLASH_FUNCTION +typedef enum mtk_platform_metadata_enum_3a_exif_flash_function_t { + MTK_3A_EXIF_FLASH_FUNCTION_SUPPORT = 0, + MTK_3A_EXIF_FLASH_FUNCTION_NOT_SUPPORT = 1, +} mtk_platform_metadata_enum_3a_exif_flash_function_t; + +//MTK_3A_EXIF_FLASH_REDEYE +typedef enum mtk_platform_metadata_enum_3a_exif_flash_redeye_t { + MTK_3A_EXIF_FLASH_REDEYE_NOT_SUPPORT = 0, + MTK_3A_EXIF_FLASH_REDEYE_SUPPORT = 1, +} mtk_platform_metadata_enum_3a_exif_flash_redeye_t; + +//MTK_FEATURE_ABF +typedef enum mtk_platform_metadata_enum_abf_mode { + MTK_ABF_MODE_OFF = 0, + MTK_ABF_MODE_ON, +} mtk_platform_metadata_enum_abf_mode_t; + +#endif diff --git a/app/src/main/cpp/camera2/ndkcamera.cpp b/app/src/main/cpp/camera2/ndkcamera.cpp index c7ac545e..fd1d32b8 100644 --- a/app/src/main/cpp/camera2/ndkcamera.cpp +++ b/app/src/main/cpp/camera2/ndkcamera.cpp @@ -17,7 +17,9 @@ #include #include #include +#include #include +#include #include #include #include @@ -28,10 +30,62 @@ #include #include "DngCreator.h" +#include "mtk_platform_metadata_tag.h" +#include "mtk_metadata_tag.h" + +void saveYuvToFile(AImage* image, const std::string& filePath) { + + int32_t width, height; + AImage_getWidth(image, &width); + AImage_getHeight(image, &height); + + // 获取 YUV 数据 + uint8_t* yPlane = nullptr; + uint8_t* uPlane = nullptr; + uint8_t* vPlane = nullptr; + int yLength, uLength, vLength; + + AImage_getPlaneData(image, 0, &yPlane, &yLength); // Y 分量 + AImage_getPlaneData(image, 1, &uPlane, &uLength); // U 分量 + AImage_getPlaneData(image, 2, &vPlane, &vLength); // V 分量 + + int32_t yStride, uStride, vStride; + AImage_getPlaneRowStride(image, 0, &yStride); // Y 分量的 Stride + AImage_getPlaneRowStride(image, 1, &uStride); // U 分量的 Stride + AImage_getPlaneRowStride(image, 2, &vStride); // V 分量的 Stride + + + + // 打开文件 + std::ofstream file(filePath, std::ios::binary); + if (!file.is_open()) { + // 文件打开失败 + return; + } + + // 写入 Y 分量(逐行复制,处理 Stride) + for (int i = 0; i < height; i++) { + file.write(reinterpret_cast(yPlane + i * yStride), width); + } + + // 写入 U 分量(逐行复制,处理 Stride) + for (int i = 0; i < height / 2; i++) { + file.write(reinterpret_cast(uPlane + i * uStride), width / 2); + } + + // 写入 V 分量(逐行复制,处理 Stride) + for (int i = 0; i < height / 2; i++) { + file.write(reinterpret_cast(vPlane + i * vStride), width / 2); + } + // 关闭文件 + file.close(); +} + #ifdef _DEBUG void Auto_AImage_delete(AImage* image) { + XYLOG(XYLOG_SEVERITY_DEBUG,"delete image"); AImage_delete(image); } #else @@ -158,6 +212,10 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA camera_orientation = 0; m_params = params; + if (m_params.burstCaptures == 0) +{ + m_params.burstCaptures = 1; + } m_firstFrame = true; m_photoTaken = false; mWidth = width; @@ -165,6 +223,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA mCaptureTriggered = false; mFocusTriggered = false; + mCaptureDispatched = false; maxFrameDuration = 0; afSupported = false; @@ -178,6 +237,8 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA numberOfPrecaptures = 0; m_precaptureStartTime = 0; + m_minTimestamp = 0; + activeArraySize[0] = 0; activeArraySize[1] = 0; @@ -194,36 +255,29 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA mPreviewOutputTarget = NULL; mPreviewSessionOutput = NULL; + camera_device = 0; + mImageReader = NULL; mImageWindow = NULL; mOutputTarget = NULL; mSessionOutput = NULL; - mImageReader2 = NULL; - mImageWindow2 = NULL; - mOutputTarget2 = NULL; - mSessionOutput2 = NULL; - - camera_device = 0; - capture_session_output_container = 0; capture_session = 0; lightDetected = false; + mStableFrameCount = 0; + mResult = { 0 }; mLdr = ~0; mFinalLdr = 0; - mFinalBurstCaptures = m_params.burstRawCapture == 0 ? 1 : m_params.burstCaptures; - if (mFinalBurstCaptures == 0) - { - mFinalBurstCaptures = 1; - } - mFinalOutputFormat = (m_params.burstRawCapture == 0) ? AIMAGE_FORMAT_YUV_420_888 : AIMAGE_FORMAT_RAW16; + mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888; } NdkCamera::~NdkCamera() { + XYLOG(XYLOG_SEVERITY_DEBUG, "NdkCamera::~NdkCamera %s", mCameraId.c_str()); close(); } @@ -297,7 +351,7 @@ int NdkCamera::selfTest(const std::string& cameraId, int32_t& maxResolutionX, in } int NdkCamera::open(const std::string& cameraId) { - XYLOG(XYLOG_SEVERITY_DEBUG, "DBG::try open %s", cameraId.c_str()); + // XYLOG(XYLOG_SEVERITY_DEBUG, "DBG::try open %s", cameraId.c_str()); // camera_facing = _camera_facing; @@ -513,16 +567,7 @@ int NdkCamera::open(const std::string& cameraId) { if (status == ACAMERA_OK) { exposureRange.min_ = val.data.i64[0]; - if (exposureRange.min_ < kMinExposureTime) - { - exposureRange.min_ = kMinExposureTime; - } exposureRange.max_ = val.data.i64[1]; - if (exposureRange.max_ > kMaxExposureTime) - { - exposureRange.max_ = kMaxExposureTime; - } - // exposureTime = exposureRange.value(2); } else { @@ -665,7 +710,7 @@ int NdkCamera::open(const std::string& cameraId) { } } - XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::Open %s Orientation=%d width=%d height=%d", cameraId.c_str(), camera_orientation, foundRes.width(), foundRes.height()); + XYLOG(XYLOG_SEVERITY_DEBUG, "CAM Open %s Orientation=%d width=%d height=%d", cameraId.c_str(), camera_orientation, foundRes.width(), foundRes.height()); status = ACaptureSessionOutputContainer_create(&capture_session_output_container); @@ -711,26 +756,6 @@ int NdkCamera::open(const std::string& cameraId) { status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput); - - if (m_params.burstRawCapture == 1) // Auto - { - mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, burstCaptures, &mImageReader2); - if (mstatus == AMEDIA_OK) - { - AImageReader_ImageListener listener; - listener.context = this; - listener.onImageAvailable = ::onImageAvailable; - mstatus = AImageReader_setImageListener(mImageReader2, &listener); - mstatus = AImageReader_getWindow(mImageReader2, &mImageWindow2); - ANativeWindow_acquire(mImageWindow2); - } - status = ACameraOutputTarget_create(mImageWindow2, &mOutputTarget2); - - status = ACaptureSessionOutput_create(mImageWindow2, &mSessionOutput2); - status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput2); - } - - CaptureRequest *request = CreateRequest(true); mCaptureRequests.push_back(request); @@ -741,7 +766,11 @@ int NdkCamera::open(const std::string& cameraId) { camera_capture_session_state_callbacks.onReady = ::onSessionReady; camera_capture_session_state_callbacks.onClosed = onSessionClosed; status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session); - AASSERT(status == ACAMERA_OK, "Failed to call ACameraDevice_createCaptureSession, status=%d", status); + if (status != ACAMERA_OK) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to call ACameraDevice_createCaptureSession, status=%d", status); + return 1; + } ACameraCaptureSession_captureCallbacks capture_session_capture_callbacks; capture_session_capture_callbacks.context = this; @@ -764,22 +793,36 @@ int NdkCamera::open(const std::string& cameraId) { return status == ACAMERA_OK ? 0 : 1; } -NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest) +NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest, int32_t sensitivity/* = -1*/) { camera_status_t status = ACAMERA_OK; CaptureRequest *request = new CaptureRequest(); std::memset(request, 0, sizeof(CaptureRequest)); - bool autoSwitchToOneFrame = (m_params.burstRawCapture == 1) && (mFinalOutputFormat == AIMAGE_FORMAT_YUV_420_888); request->pThis = this; - request->imageReader = isPreviewRequest ? mPreviewImageReader : (autoSwitchToOneFrame ? mImageReader2 : mImageReader); - request->imageWindow = isPreviewRequest ? mPreviewImageWindow : (autoSwitchToOneFrame ? mImageWindow2 : mImageWindow); - request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : (autoSwitchToOneFrame ? mOutputTarget2 : mOutputTarget); - request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : (autoSwitchToOneFrame ? mSessionOutput2 : mSessionOutput); + request->imageReader = isPreviewRequest ? mPreviewImageReader : mImageReader; + request->imageWindow = isPreviewRequest ? mPreviewImageWindow : mImageWindow; + request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget; + request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : mSessionOutput; request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; + // request->templateId = (ACameraDevice_request_template)m_params.requestTemplate; + uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : GetCaptureIntent((ACameraDevice_request_template)m_params.requestTemplate); + +#if 0 + bool forceToPreview = false; + if (!isPreviewRequest && sensitivity >= 150 && sensitivity <= 400 && (m_params.burstRawCapture == 2 || m_params.burstRawCapture == 3)) + { + if (request->templateId == TEMPLATE_STILL_CAPTURE) + { + request->templateId = TEMPLATE_PREVIEW; + captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW; + forceToPreview = true; - // mCaptureRequests.push_back(request); + XYLOG(XYLOG_SEVERITY_WARNING, "Force to use preview mode to avoid pink issue ISO=%d CameraId=%s", sensitivity, mCameraId.c_str()); + } + } +#endif // capture request status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request); @@ -790,7 +833,6 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest) uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO; status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode); - uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : GetCaptureIntent((ACameraDevice_request_template)m_params.requestTemplate); status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent); uint8_t flashMode = ACAMERA_FLASH_MODE_OFF; @@ -861,7 +903,17 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest) status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); // ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_); - if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0) + if (m_params.minFps != 0) + { + int32_t fpsRange[2] = {m_params.minFps, 60}; + // status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_TARGET_FPS_RANGE, 2, fpsRange); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set ACAMERA_CONTROL_AE_TARGET_FPS_RANGE: %d", status); + } + } + + if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0) { int32_t compensation = m_params.compensation; if (compensation < aeCompensationRange.min_) @@ -970,6 +1022,46 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest) // status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput); // status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput); + { +#if 0 + uint8_t colorMode = ACAMERA_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_COLOR_CORRECTION_MODE, 1, &colorMode); + + // 设置均衡的RGGB增益 + float rggbGains[4] = {1.0f, 1.0f, 1.0f, 1.0f}; + status = ACaptureRequest_setEntry_float(request->request, ACAMERA_COLOR_CORRECTION_GAINS, 4, rggbGains); + + // 设置单位色彩变换矩阵 + float colorMatrix[9] = { + 1.0f, 0.0f, 0.0f, + 0.0f, 1.0f, 0.0f, + 0.0f, 0.0f, 1.0f + }; + status = ACaptureRequest_setEntry_float(request->request, ACAMERA_COLOR_CORRECTION_TRANSFORM, 9, colorMatrix); +#endif + + if (m_params.burstRawCapture == 1) + { + SetupHDR(mCharacteristics.get(), request->request, sensitivity); + } + if (m_params.burstRawCapture == 2) + { + SetupMFNR(mCharacteristics.get(), request->request, false, sensitivity); + } + else if (m_params.burstRawCapture == 3) + { + SetupMFNR(mCharacteristics.get(), request->request, true, sensitivity); + } + else if (m_params.burstRawCapture == 4) + { + Setup3DNR(mCharacteristics.get(), request->request, sensitivity); + } + else if (m_params.burstRawCapture == 5) + { + SetupTonemapCurve(mCharacteristics.get(), request->request); + } + } + return request; } @@ -998,7 +1090,10 @@ void NdkCamera::close() } */ + mPreviewResults.reset(); + mCaptureResults.clear(); mCaptureFrames.clear(); + mCaptureResultMap.clear(); if ((ACameraManager *)camera_manager != NULL) { @@ -1007,7 +1102,8 @@ void NdkCamera::close() if (capture_session) { - // res = ACameraCaptureSession_stopRepeating(capture_session); + res = ACameraCaptureSession_stopRepeating(capture_session); + std::this_thread::sleep_for(std::chrono::milliseconds(512)); ACameraCaptureSession_close(capture_session); capture_session = 0; } @@ -1049,12 +1145,24 @@ void NdkCamera::close() if (mPreviewImageReader != NULL) { +#ifdef _DEBUG + ALOGD("Will Free mPreviewImageReader"); +#endif + AImage* image = NULL; + media_status_t mstatus; + while ((mstatus = AImageReader_acquireNextImage(mPreviewImageReader, &image)) == AMEDIA_OK) + { + AImage_delete(image); + image = NULL; + } AImageReader_setImageListener(mPreviewImageReader, NULL); //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); AImageReader_delete(mPreviewImageReader); //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); - mPreviewImageReader = 0; +#ifdef _DEBUG + ALOGD("After Free mPreviewImageReader"); +#endif } if (mOutputTarget != NULL) @@ -1071,36 +1179,27 @@ void NdkCamera::close() if (mImageReader != NULL) { +#ifdef _DEBUG + ALOGD("Will Free mImageReader"); +#endif + AImage* image = NULL; + media_status_t mstatus; + while ((mstatus = AImageReader_acquireNextImage(mImageReader, &image)) == AMEDIA_OK) + { + AImage_delete(image); + image = NULL; + } AImageReader_setImageListener(mImageReader, NULL); + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); AImageReader_delete(mImageReader); - //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); + //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); mImageReader = 0; +#ifdef _DEBUG + ALOGD("After Free mImageReader"); +#endif } - - if (mOutputTarget2 != NULL) - { - ACameraOutputTarget_free(mOutputTarget2); - mOutputTarget2 = 0; - } - - if (mImageWindow2 != NULL) - { - ANativeWindow_release(mImageWindow2); - mImageWindow2 = 0; - } - - if (mImageReader2 != NULL) - { - AImageReader_setImageListener(mImageReader2, NULL); - //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); - AImageReader_delete(mImageReader2); - //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); - - mImageReader2 = 0; - } - if (mPreviewSessionOutput != NULL) { if (capture_session_output_container) @@ -1120,15 +1219,6 @@ void NdkCamera::close() ACaptureSessionOutput_free(mSessionOutput); mSessionOutput = 0; } - if (mSessionOutput2 != NULL) - { - if (capture_session_output_container) - { - ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput2); - } - ACaptureSessionOutput_free(mSessionOutput2); - mSessionOutput2 = 0; - } if (capture_session_output_container) { @@ -1188,126 +1278,169 @@ void NdkCamera::onImageAvailable(AImageReader* reader) else { uint32_t burstCaptures = getBurstCaptures(); + uint64_t ts = GetMicroTimeStamp(); + size_t expectedTimes = mCaptureRequests.size() - 1; if (burstCaptures == 0) { burstCaptures = 1; } - if (burstCaptures == 1) + if (true) { - mstatus = AImageReader_acquireNextImage(reader, &image); - if (mstatus != AMEDIA_OK) + while (1) { - // https://stackoverflow.com/questions/67063562 - if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) + mstatus = AImageReader_acquireNextImage(reader, &image); + if (mstatus != AMEDIA_OK) { - if (mCaptureFrames.size() < burstCaptures) + // https://stackoverflow.com/questions/67063562 + if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) { - XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus); + if (mCaptureFrames.size() < burstCaptures) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus); + } } + break; } - return; - } - unsigned long long ts = GetMicroTimeStamp(); + int32_t format; + mstatus = AImage_getFormat(image, &format); - int32_t format; - mstatus = AImage_getFormat(image, &format); - - if (format == AIMAGE_FORMAT_YUV_420_888) - { - int32_t width; - int32_t height; - mstatus = AImage_getWidth(image, &width); - mstatus = AImage_getHeight(image, &height); - - int32_t y_pixelStride = 0; - int32_t u_pixelStride = 0; - int32_t v_pixelStride = 0; - AImage_getPlanePixelStride(image, 0, &y_pixelStride); - AImage_getPlanePixelStride(image, 1, &u_pixelStride); - AImage_getPlanePixelStride(image, 2, &v_pixelStride); - - int32_t y_rowStride = 0; - int32_t u_rowStride = 0; - int32_t v_rowStride = 0; - AImage_getPlaneRowStride(image, 0, &y_rowStride); - AImage_getPlaneRowStride(image, 1, &u_rowStride); - AImage_getPlaneRowStride(image, 2, &v_rowStride); - - uint8_t* y_data = 0; - uint8_t* u_data = 0; - uint8_t* v_data = 0; - int y_len = 0; - int u_len = 0; - int v_len = 0; - AImage_getPlaneData(image, 0, &y_data, &y_len); - AImage_getPlaneData(image, 1, &u_data, &u_len); - AImage_getPlaneData(image, 2, &v_data, &v_len); - - if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) - { - // already nv21 - ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation, - camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame); - } - else + cv::Mat frame; + if (format == AIMAGE_FORMAT_YUV_420_888) { - // construct nv21 - uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; + int32_t width; + int32_t height; + mstatus = AImage_getWidth(image, &width); + mstatus = AImage_getHeight(image, &height); + + int32_t y_pixelStride = 0; + int32_t u_pixelStride = 0; + int32_t v_pixelStride = 0; + AImage_getPlanePixelStride(image, 0, &y_pixelStride); + AImage_getPlanePixelStride(image, 1, &u_pixelStride); + AImage_getPlanePixelStride(image, 2, &v_pixelStride); + + int32_t y_rowStride = 0; + int32_t u_rowStride = 0; + int32_t v_rowStride = 0; + AImage_getPlaneRowStride(image, 0, &y_rowStride); + AImage_getPlaneRowStride(image, 1, &u_rowStride); + AImage_getPlaneRowStride(image, 2, &v_rowStride); + + uint8_t* y_data = 0; + uint8_t* u_data = 0; + uint8_t* v_data = 0; + int y_len = 0; + int u_len = 0; + int v_len = 0; + AImage_getPlaneData(image, 0, &y_data, &y_len); + AImage_getPlaneData(image, 1, &u_data, &u_len); + AImage_getPlaneData(image, 2, &v_data, &v_len); + + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) + { + // already nv21 + ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation, + camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame); + } + else { - // Y - uint8_t* yptr = nv21; - for (int y = 0; y < height; y++) + // construct nv21 + uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; { - const uint8_t* y_data_ptr = y_data + y_rowStride * y; - for (int x = 0; x < width; x++) + // Y + uint8_t* yptr = nv21; + for (int y = 0; y < height; y++) { - yptr[0] = y_data_ptr[0]; - yptr++; - y_data_ptr += y_pixelStride; + const uint8_t* y_data_ptr = y_data + y_rowStride * y; + for (int x = 0; x < width; x++) + { + yptr[0] = y_data_ptr[0]; + yptr++; + y_data_ptr += y_pixelStride; + } } - } - // UV - uint8_t* uvptr = nv21 + width * height; - for (int y = 0; y < height / 2; y++) - { - const uint8_t* v_data_ptr = v_data + v_rowStride * y; - const uint8_t* u_data_ptr = u_data + u_rowStride * y; - for (int x = 0; x < width / 2; x++) + // UV + uint8_t* uvptr = nv21 + width * height; + for (int y = 0; y < height / 2; y++) { - uvptr[0] = v_data_ptr[0]; - uvptr[1] = u_data_ptr[0]; - uvptr += 2; - v_data_ptr += v_pixelStride; - u_data_ptr += u_pixelStride; + const uint8_t* v_data_ptr = v_data + v_rowStride * y; + const uint8_t* u_data_ptr = u_data + u_rowStride * y; + for (int x = 0; x < width / 2; x++) + { + uvptr[0] = v_data_ptr[0]; + uvptr[1] = u_data_ptr[0]; + uvptr += 2; + v_data_ptr += v_pixelStride; + u_data_ptr += u_pixelStride; + } } } + + ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation, + camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame); + + delete[] nv21; } + } + m_photoTaken = true; - ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation, - camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame); + int64_t frameTs = 0; + mstatus = AImage_getTimestamp(image, &frameTs); - delete[] nv21; +#ifdef OUTPUT_DBG_INFO +#if 0 + if (mWidth == 1920) + { + std::string dt = FormatLocalDateTime("%d%02d%02d%02d%02d%02d", time(NULL)); + std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt; + fileName += "_" + mCameraId + std::to_string(frameTs) + ".yuv"; + saveYuvToFile(image, fileName.c_str()); } - } - m_photoTaken = true; +#endif +#endif + AImage_delete(image); - AImage_delete(image); + bool captureCompleted = false; + bool captureDispatchable = false; + m_locker.lock(); + if (!frame.empty()) + { + mOneFrame.push_back(std::make_pair<>(frameTs, frame)); + } + if (mOneFrame.size() >= expectedTimes) + { + bool allExisted = true; + for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame) + { + if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend()) + { + allExisted = false; + break; + } + } + if (allExisted) + { + captureCompleted = true; + } + } - std::shared_ptr result; - bool captureCompleted = false; - m_locker.lock(); - if (!mCaptureResults.empty()) - { - captureCompleted = true; - result = mCaptureResults[0]; - } - m_locker.unlock(); + if (captureCompleted && !mCaptureDispatched) + { + mCaptureDispatched = true; + captureDispatchable = true; + } + m_locker.unlock(); - if (captureCompleted) - { - onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame); + if (captureCompleted && captureDispatchable) + { + XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable"); + camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session); + FireOneCapture(ts); + // onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame); + break; + } } } else @@ -1337,17 +1470,22 @@ void NdkCamera::onImageAvailable(AImageReader* reader) } bool captureCompleted = false; - size_t expectedTimes = mCaptureRequests.size() - 1; + bool captureDispatchable = false; + m_locker.lock(); captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes; + if (captureCompleted && !mCaptureDispatched) + { + mCaptureDispatched = true; + captureDispatchable = true; + } m_locker.unlock(); - if (captureCompleted) + if (captureCompleted && captureDispatchable) { FireBurstCapture(); } } - } } @@ -1359,7 +1497,7 @@ void NdkCamera::onDisconnected(ACameraDevice* device) { } -bool NdkCamera::on_image(cv::Mat& rgb) +bool NdkCamera::on_image(cv::Mat rgb) { return false; } @@ -1526,6 +1664,8 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE; uint8_t afState = ACAMERA_CONTROL_AF_STATE_INACTIVE; + int32_t sensitivity = -1; + ACameraMetadata_const_entry val = { 0 }; val = { 0 }; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); @@ -1539,6 +1679,11 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; + + val = {0}; + status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); + sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : -1; + // XYLOG(XYLOG_SEVERITY_DEBUG, "Preview State AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); // Check if timeout @@ -1576,6 +1721,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque { if (aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE) { +#if 0 uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); @@ -1583,6 +1729,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); //XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)aeState); AASSERT(status == ACAMERA_OK, "Failed to call PRECAPTURE_TRIGGER, status=%d", status); +#endif readyForCapture = false; numberOfPrecaptures = 0; @@ -1654,6 +1801,17 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque #endif } + if (readyForCapture) + { + mStableFrameCount++; + if (mStableFrameCount >= 3) { // 确保连续3帧稳定 + // 进行实际的静态抓拍 + mStableFrameCount = 0; + } else { + readyForCapture = false; // 继续等待 + } + } + if (readyForCapture/* && mCaptureRequests.size() > 1*/) { // Must update mFinalLdr As getBurstCaptures getOutputFormat depends mFinalLdr @@ -1662,23 +1820,10 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque mFinalLdr = mLdr; } - XYLOG(XYLOG_SEVERITY_INFO, "Ready for Capture AFS=%u AES=%u AWBS=%u LDR=%u Time=%u", - (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, mFinalLdr, (unsigned int)(ts - m_startTime)); - if (m_params.burstRawCapture == 1) - { - if (mFinalLdr > 50) - { - XYLOG(XYLOG_SEVERITY_WARNING, "Switch to OneFrame Capture(YUV) As LDR=%u", mFinalLdr); - mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888; - mFinalBurstCaptures = 1; - } - } + XYLOG(XYLOG_SEVERITY_INFO, "Ready for Capture AFS=%u AES=%u AWBS=%u LDR=%u ISO=%d Time=%u", + (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, mFinalLdr, sensitivity, (unsigned int)(ts - m_startTime)); uint32_t burstCaptures = getBurstCaptures(); - if (burstCaptures == 0) - { - burstCaptures = 1; - } std::vector requests; int sequenceId = 0; @@ -1686,13 +1831,14 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque for (int idx = 0; idx < burstCaptures; idx++) { - CaptureRequest* request = CreateRequest(false); + CaptureRequest* request = CreateRequest(false, sensitivity); mCaptureRequests.push_back(request); // CopyPreviewRequest(mCaptureRequests[idx]->request, result); requests.push_back(request->request); } - if (m_params.customHdr) +#if 0 + if (m_params.customHdr && burstCaptures > 1) { int32_t hdrStep = m_params.hdrStep; if (hdrStep == 0) @@ -1706,22 +1852,33 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); - int sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0; + int32_t sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0; XYLOG(XYLOG_SEVERITY_INFO, "HDR: Base Exp=%lld ISO=%d", exTime / 1000, sensitivity); if (exTime != -1 && sensitivity > 0) { uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF; - for (int idx = 0; idx < burstCaptures; idx++) + // for (int idx = 0; idx < burstCaptures; idx++) { - ACaptureRequest_setEntry_u8(requests[idx], ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff); - int64_t expt = (idx == 0) ? exTime : (exTime * (hdrStep + idx)); - ACaptureRequest_setEntry_i64(requests[idx], ACAMERA_SENSOR_EXPOSURE_TIME, 1, &expt); - ACaptureRequest_setEntry_i32(requests[idx], ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); - sensitivity = sensitivityRange.min_; + ACaptureRequest_setEntry_u8(requests[burstCaptures - 1], ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff); + // int64_t expt = (idx == 0) ? exTime : (exTime * (hdrStep + idx)); + int64_t expt = exTime + ((exTime * hdrStep) >> 1); + ACaptureRequest_setEntry_i64(requests[burstCaptures - 1], ACAMERA_SENSOR_EXPOSURE_TIME, 1, &expt); + + int32_t newSensitivity = sensitivity - ((sensitivity * hdrStep) >> 2); + if (m_params.sensitivity != 0) + { + newSensitivity = m_params.sensitivity; + } + if (newSensitivity < sensitivityRange.min_) + { + newSensitivity = sensitivityRange.min_; + } + ACaptureRequest_setEntry_i32(requests[burstCaptures - 1], ACAMERA_SENSOR_SENSITIVITY, 1, &newSensitivity); } } } +#endif // ALOGW("Will Stop Repeating Request"); // status = ACameraCaptureSession_stopRepeating(capture_session); @@ -1737,7 +1894,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque capture_session_capture_cb.onCaptureSequenceAborted = onCaptureSequenceAborted; capture_session_capture_cb.onCaptureBufferLost = 0; - int numberOfRequests = requests.size(); + int numberOfRequests = requests.size(); status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb, numberOfRequests, &requests[0], &sequenceId); AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_capture, status=%d", status); @@ -1762,19 +1919,48 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque ACameraMetadata* pCopy = ACameraMetadata_copy(result); bool captureCompleted = false; + bool captureDispatchable = false; size_t expectedTimes = mCaptureRequests.size() - 1; + int64_t resultTimestamp = GetTimestamp(result); std::shared_ptr captureResult(pCopy, ACameraMetadata_free); - if (expectedTimes == 1) + + if (true) { m_locker.lock(); mCaptureResults.push_back(captureResult); - captureCompleted = !mOneFrame.empty(); + mCaptureResultMap[resultTimestamp] = captureResult; + + if (mOneFrame.size() >= expectedTimes) + { + bool allExisted = true; + for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame) + { + if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend()) + { + allExisted = false; + break; + } + } + if (allExisted) + { + captureCompleted = true; + } + } + + if (captureCompleted && !mCaptureDispatched) + { + mCaptureDispatched = true; + captureDispatchable = true; + } m_locker.unlock(); - if (captureCompleted) + if (captureCompleted && captureDispatchable) { - onOneCapture(mCharacteristics, captureResult, mFinalLdr, ts - m_startTime, mOneFrame); + XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onCaptureCompleted"); + camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session); + + FireOneCapture(ts); } } else @@ -1782,9 +1968,14 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque m_locker.lock(); mCaptureResults.push_back(captureResult); captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes; + if (captureCompleted && !mCaptureDispatched) + { + mCaptureDispatched = true; + captureDispatchable = true; + } m_locker.unlock(); - if (captureCompleted) + if (captureCompleted && captureDispatchable) { FireBurstCapture(); } @@ -1793,11 +1984,57 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque } } +int64_t NdkCamera::GetTimestamp(const ACameraMetadata* result) +{ + ACameraMetadata_const_entry entry; + camera_status_t status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_TIMESTAMP, &entry); + + if (status == ACAMERA_OK && entry.count > 0) { + return entry.data.i64[0]; + } + + return 0; +} + +void NdkCamera::FireOneCapture(uint64_t ts) +{ +#ifdef OUTPUT_DBG_INFO +#if 0 + if (mWidth == 1920 && mOneFrame.size() > 1) + { + std::string dt = FormatLocalDateTime("%d%02d%02d%02d%02d%02d", ts / 1000); + std::vector params; + params.push_back(cv::IMWRITE_JPEG_QUALITY); + params.push_back(50); + + for (auto it = mOneFrame.cbegin(); it != mOneFrame.cend(); ++it) + { + std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt; + size_t idx = std::distance(mOneFrame.cbegin(), it); + std::shared_ptr result = mCaptureResults[idx]; + CAPTURE_RESULT captureResult = { 0 }; + EnumCameraResult(result.get(), captureResult); + + fileName += "_" + mCameraId + "_" + std::to_string(captureResult.aeState) + "_" + std::to_string(idx) + ".jpg"; + cv::imwrite(fileName, it->second, params); + } + } +#endif + + +#endif + + + onOneCapture(mCharacteristics, mCaptureResultMap[mOneFrame.back().first], mFinalLdr, ts - m_startTime, mOneFrame.back().second); +} + void NdkCamera::FireBurstCapture() { + camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session); + unsigned long long ts = GetMicroTimeStamp(); - size_t expectedTimes = mCaptureRequests.size() - 1; + size_t expectedTimes = getBurstCaptures(); std::vector > captureResults; uint32_t ldr; std::vector > captureFrames; @@ -1876,13 +2113,25 @@ void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetada focusDistance = *val.data.f; } */ + + // 添加AWB和色彩校正参数的复制 + ACameraMetadata_const_entry entry; + if (ACameraMetadata_getConstEntry(previewResult, ACAMERA_COLOR_CORRECTION_GAINS, &entry) == ACAMERA_OK) { + ACaptureRequest_setEntry_float(request, ACAMERA_COLOR_CORRECTION_GAINS, entry.count, entry.data.f); + } + + if (ACameraMetadata_getConstEntry(previewResult, ACAMERA_COLOR_CORRECTION_TRANSFORM, &entry) == ACAMERA_OK) { + ACaptureRequest_setEntry_float(request, ACAMERA_COLOR_CORRECTION_TRANSFORM, entry.count, entry.data.f); + } } void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure) { - XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d PhotoTaken=%d", session, request, failure->reason, m_photoTaken ? 1 : 0); + bool isPreview = (request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request); + + XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d CameraId=%s PhotoTaken=%d Preview=%d", session, request, failure->reason, mCameraId.c_str(), m_photoTaken ? 1 : 0, isPreview ? 1 : 0); - if (failure->sequenceId == mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId) + if (isPreview) { return; } @@ -1942,7 +2191,7 @@ int32_t NdkCamera::getOutputFormat() const int32_t NdkCamera::getBurstCaptures() const { - return mFinalBurstCaptures; + return m_params.burstCaptures; } void NdkCamera::CreateSession(ANativeWindow* previewWindow, @@ -2243,4 +2492,451 @@ void NdkCamera::EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captur val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val); captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0; + + val = {0}; + status = ACameraMetadata_getConstEntry(result, MTK_HDR_FEATURE_HDR_DETECTION_RESULT, &val); + ALOGI("HDR Detection Result: %d", val.data.i32[0]); + + val = {0}; + status = ACameraMetadata_getConstEntry(result, MTK_HDR_FEATURE_HDR_MODE, &val); + if (status == ACAMERA_OK && val.count > 0) { + int32_t appliedHdrMode = val.data.i32[0]; + ALOGI("Applied HDR Mode: %d", appliedHdrMode); + + // 判断是否与请求的HDR模式一致 + if (appliedHdrMode == MTK_HDR_FEATURE_HDR_MODE_AUTO || + appliedHdrMode == MTK_HDR_FEATURE_HDR_MODE_ON) { + ALOGI("HDR mode successfully applied"); + } + } + + + // 检查 HDR 是否激活(最重要的指标) dd + // 从结果中获取 MTK_HDR_FEATURE_HDR_HAL_MODE + val = {0}; + status = ACameraMetadata_getConstEntry(result, MTK_HDR_FEATURE_HDR_HAL_MODE, &val); + if (status == ACAMERA_OK && val.count > 0) { + int32_t hdrHalMode = val.data.i32[0]; + ALOGI("HDR HAL Mode: %d", hdrHalMode); + + if (hdrHalMode != MTK_HDR_FEATURE_HDR_HAL_MODE_OFF) { + ALOGI("HDR is actively working on hardware level"); + } + } + + val = {0}; + status = ACameraMetadata_getConstEntry(result, MTK_3A_ISP_FUS_NUM, &val); + if (status == ACAMERA_OK && val.count > 0) { + int32_t fusionFrames = val.data.i32[0]; + ALOGI("多帧融合数量: %d", fusionFrames); + + if (fusionFrames > 1) { + ALOGI("正在使用多帧融合,这通常表明 HDR 处理正在进行"); + } + } + + + int aa = 0; + +} + + +void NdkCamera::SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity) +{ + // 1. 设置基础的相机参数 + camera_status_t status; + // __system_property_set("vendor.mfll.force", "1"); +#if 0 + int32_t tagCount = 0; + const uint32_t* tags = nullptr; + ACameraMetadata_getAllTags(characteristics, &tagCount, &tags); + for (int32_t i = 0; i < tagCount; i++) { + if (MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES == tags[i]) + { + ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES Tag ID: 0x%x\n", tags[i]); + } + } + + ACameraMetadata_const_entry entry; + status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry); + if (status == ACAMERA_OK) + { + for (int i = 0; i < entry.count; i++) + { + ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]); + } + } + + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry); + if (status == ACAMERA_OK) + { + for (int i = 0; i < entry.count; i++) + { + ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]); + } + } +#endif + + // 2. 设置 MediaTek 特定的 MFNR 参数 + // 使用 vendor tag 描述符 + // int32_t mfbMode = MTK_MFNR_FEATURE_MFB_AUTO; // 1 Enable MFNR + int32_t mfbMode = ais ? 2 : 1; // 1 Enable MFNR + uint8_t aeMode = MTK_CONTROL_AE_MODE_ON; + // status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode); + + // int32_t mfbMode = ais ? 2 : 1; // 1 Enable MFNR + status = ACaptureRequest_setEntry_i32(request, MTK_MFNR_FEATURE_MFB_MODE, 1, &mfbMode); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_MFNR_FEATURE_MFB_MODE, status: %d", status); + } + + int32_t aeTargetMode = 1; //MTK_3A_FEATURE_AE_TARGET_MODE_LE_FIX; + status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_TARGET_MODE, 1, &aeTargetMode); + if (status != ACAMERA_OK) { + ALOGE("Failed to set MTK_3A_FEATURE_AE_TARGET_MODE: %d", status); + } + + // 设置为长曝光级别(3) + int32_t exposureLevel = MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG; // MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG + status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, 1,&exposureLevel); + if (status != ACAMERA_OK) { + ALOGE("Failed to set exposure level: %d", status); + } + + int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR; + status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning); + + uint8_t reqRemosaicEnable = 1; + status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_HAL_REQUEST_REMOSAIC_ENABLE, status: %d", status); + } + + if (m_params.compensation != 0) + { + int32_t compensation = m_params.compensation; + status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_CONTROL_AE_EXPOSURE_COMPENSATION, status: %d", status); + } + } +} + +void NdkCamera::Setup3DNR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity) +{ + // 1. 设置基础的相机参数 + camera_status_t status; +#if 0 + int32_t tagCount = 0; + const uint32_t* tags = nullptr; + ACameraMetadata_getAllTags(characteristics, &tagCount, &tags); + for (int32_t i = 0; i < tagCount; i++) { + if (MTK_NR_FEATURE_AVAILABLE_3DNR_MODES == tags[i]) + { + ALOGI("MTK_NR_FEATURE_AVAILABLE_3DNR_MODES Tag ID: 0x%x\n", tags[i]); + } + } + + ACameraMetadata_const_entry entry; + status = ACameraMetadata_getConstEntry(characteristics, MTK_NR_FEATURE_AVAILABLE_3DNR_MODES, &entry); + if (status == ACAMERA_OK) + { + for (int i = 0; i < entry.count; i++) + { + ALOGI("MTK_NR_FEATURE_AVAILABLE_3DNR_MODES: 0x%x\n", entry.data.i32[i]); + } + } +#endif + + int32_t nrMode = MTK_NR_FEATURE_3DNR_MODE_ON; + status = ACaptureRequest_setEntry_i32(request, MTK_NR_FEATURE_3DNR_MODE, 1, &nrMode); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_NR_FEATURE_3DNR_MODE, status: %d", status); + } + + uint8_t reqRemosaicEnable = 1; + status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_HAL_REQUEST_REMOSAIC_ENABLE, status: %d", status); + } + + if (m_params.compensation != 0) + { + int32_t compensation = m_params.compensation; + status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_CONTROL_AE_EXPOSURE_COMPENSATION, status: %d", status); + } + } +} + +void NdkCamera::SetupHDR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity) +{ + // 1. 设置基础的相机参数 + camera_status_t status; + __system_property_set("vendor.forceset.hdrmode", "1"); +#if 1 + + // 首先检查相机是否支持 HDR + ACameraMetadata_const_entry entry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, MTK_HDR_FEATURE_AVAILABLE_HDR_MODES_PHOTO, &entry); + if (status == ACAMERA_OK) { + bool hdrSupported = false; + for (int i = 0; i < entry.count; i++) { + ALOGI("支持的 HDR 模式: 0x%x", entry.data.i32[i]); + if (entry.data.i32[i] == MTK_HDR_FEATURE_HDR_MODE_AUTO || + entry.data.i32[i] == MTK_HDR_FEATURE_HDR_MODE_ON) { + hdrSupported = true; + } + } + + if (!hdrSupported) { + ALOGI("警告: 相机不支持 AUTO 或 ON 模式的 HDR"); + return; + } + } else { + ALOGI("警告: 无法获取支持的 HDR 模式列表"); + } + + int32_t tagCount = 0; + const uint32_t* tags = nullptr; + ACameraMetadata_getAllTags(characteristics, &tagCount, &tags); + for (int32_t i = 0; i < tagCount; i++) { + if (MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES == tags[i]) + { + ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES Tag ID: 0x%x\n", tags[i]); + } + } + + entry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry); + if (status == ACAMERA_OK) + { + for (int i = 0; i < entry.count; i++) + { + ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]); + } + } + + +#endif + + entry = { 0 }; + status = ACameraMetadata_getConstEntry(characteristics, MTK_HDR_FEATURE_AVAILABLE_HDR_MODES_PHOTO, &entry); + if (status == ACAMERA_OK) + { + for (int i = 0; i < entry.count; i++) + { + ALOGI("MTK_HDR_FEATURE_AVAILABLE_HDR_MODES_PHOTO: 0x%x\n", entry.data.i32[i]); + } + } + + // 2. 设置 MediaTek 特定的 HDR 参数 + // 使用 vendor tag 描述符 + uint8_t aeMode = MTK_CONTROL_AE_MODE_ON; + status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode); + + uint8_t sceneMode = ACAMERA_CONTROL_SCENE_MODE_HDR; + status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode); + if (status == ACAMERA_OK) { + ALOGI("已设置场景模式为 HDR"); + + // 启用场景模式控制 + uint8_t sceneModeControl = ACAMERA_CONTROL_MODE_USE_SCENE_MODE; + ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_MODE, 1, &sceneModeControl); + } + + + int32_t hdrMode = MTK_HDR_FEATURE_HDR_MODE_AUTO; // 1 Enable HDR + ALOGI("Try to set MTK_HDR_FEATURE_HDR_MODE = %d", hdrMode); + status = ACaptureRequest_setEntry_i32(request, MTK_HDR_FEATURE_HDR_MODE, 1, &hdrMode); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_HDR_FEATURE_HDR_MODE, status: %d", status); + } + + int32_t halHdrMode = MTK_HDR_FEATURE_HDR_HAL_MODE_MSTREAM_CAPTURE; + status = ACaptureRequest_setEntry_i32(request, MTK_HDR_FEATURE_HDR_HAL_MODE, 1, &halHdrMode); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_HDR_FEATURE_HDR_HAL_MODE, status: %d", status); + } + + // int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_AIHDR; + // status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning); + + uint8_t reqRemosaicEnable = 1; + status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_HAL_REQUEST_REMOSAIC_ENABLE, status: %d", status); + } + + // 设置HDR的AE目标模式 + int32_t aeTargetMode = MTK_3A_FEATURE_AE_TARGET_MODE_NORMAL; + status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_TARGET_MODE, 1, &aeTargetMode); + + // 设置更高的曝光等级 + int32_t exposureLevel = MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG; // 选择长曝光模式 + status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, 1, &exposureLevel); + + int32_t hdrModeParam = 1; // 偏向于亮部 + status = ACaptureRequest_setEntry_i32(request, MTK_3A_HDR_MODE, 1, &hdrModeParam); + + if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0) + { + int32_t compensation = m_params.compensation; + if (compensation < aeCompensationRange.min_) + { + compensation = aeCompensationRange.min_; + } + if (compensation > aeCompensationRange.max_) + { + compensation = aeCompensationRange.max_; + } + ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); + + if (compensation > 0) + { + // 调整HDR混合ISO模式,优化暗部 + int32_t hdrMixedIso = 1; // 启用混合ISO + status = ACaptureRequest_setEntry_i32(request, MTK_3A_AE_HDR_MIXED_ISO, 1, &hdrMixedIso); + + // 降低高光恢复强度,允许更多过曝 - 这个设置没有问题 + float hlrRatio = 0.3f; + status = ACaptureRequest_setEntry_float(request, MTK_ISP_HLR_RATIO, 1, &hlrRatio); + + int32_t aeTargetMode = MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR; // 多帧HDR模式 + status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_TARGET_MODE, 1, &aeTargetMode); + + int32_t ispBrightness = MTK_CONTROL_ISP_BRIGHTNESS_HIGH; // 范围通常是0-10,值越大越亮 + status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_ISP_BRIGHTNESS, 1, &ispBrightness); + + } + } + + + int aa = 0; +} + +bool NdkCamera::SetupTonemapCurve(ACameraMetadata* characteristics, ACaptureRequest* request) +{ + camera_status_t status; + +#if 1 + int32_t tagCount = 0; + const uint32_t* tags = nullptr; + ACameraMetadata_getAllTags(characteristics, &tagCount, &tags); + for (int32_t i = 0; i < tagCount; i++) { + if (MTK_3A_FEATURE_AE_TARGET_MODE == tags[i]) + { + ALOGI("MTK_3A_FEATURE_AE_TARGET_MODE Tag ID: 0x%x\n", tags[i]); + } + } + + ACameraMetadata_const_entry entry; + status = ACameraMetadata_getConstEntry(characteristics, MTK_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, &entry); + if (status == ACAMERA_OK) + { + for (int i = 0; i < entry.count; i++) + { + ALOGI("MTK_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES: %d\n", entry.data.i32[i]); + } + } + + +#endif + // MTK_CONTROL_AE_TARGET_FPS_RANGE + // 4. 构建范围值 + int32_t fpsRange[2] = {60, 60}; + + // 5. 设置请求参数 + status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_TARGET_FPS_RANGE, 2, fpsRange); + if (status != ACAMERA_OK) { + ALOGE("Failed to set MTK_CONTROL_AE_TARGET_FPS_RANGE: %d", status); + } + + int32_t aeTargetMode = 1; // MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_3EXP; + status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_TARGET_MODE, 1, &aeTargetMode); + if (status != ACAMERA_OK) { + ALOGE("Failed to set MTK_3A_FEATURE_AE_TARGET_MODE: %d", status); + } + + + int32_t zsl = MTK_CONTROL_ENABLE_ZSL_TRUE; + status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_ENABLE_ZSL, 1, &zsl); + if (status != ACAMERA_OK) { + ALOGE("Failed to set MTK_CONTROL_ENABLE_ZSL: %d", status); + } + + int32_t brightness = MTK_CONTROL_ISP_BRIGHTNESS_LOW; + status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_ISP_BRIGHTNESS, 1, &brightness); + if (status != ACAMERA_OK) { + ALOGE("Failed to set MTK_CONTROL_ISP_BRIGHTNESS_LOW: %d", status); + } + +#if 0 + + // 创建色调映射曲线点 - 每个点包含(x,y)坐标,表示输入和输出亮度值的映射 + // 这个曲线用于压制高光,减少过曝 + const int numPoints = 5; + float curve[numPoints * 2] = { + 0.0f, 0.0f, // 阴影部分保持不变 + 0.25f, 0.22f, // 稍微压暗中间调 + 0.5f, 0.45f, // 中间调较明显压暗 + 0.75f, 0.65f, // 高光部分明显压暗 + 1.0f, 0.85f // 最亮部分显著压暗 + }; + + // 红色通道曲线 + ACameraMetadata_const_entry entry; + int result = ACaptureRequest_setEntry_float(request, + ACAMERA_TONEMAP_CURVE_RED, + numPoints * 2, + curve); + if (result != ACAMERA_OK) { + ALOGE("Failed to set red tonemap curve: %d", result); + return false; + } + + // 绿色通道曲线 (使用相同的曲线) + result = ACaptureRequest_setEntry_float(request, + ACAMERA_TONEMAP_CURVE_GREEN, + numPoints * 2, + curve); + if (result != ACAMERA_OK) { + ALOGE("Failed to set green tonemap curve: %d", result); + return false; + } + + // 蓝色通道曲线 (使用相同的曲线) + result = ACaptureRequest_setEntry_float(request, + ACAMERA_TONEMAP_CURVE_BLUE, + numPoints * 2, + curve); + if (result != ACAMERA_OK) { + ALOGE("Failed to set blue tonemap curve: %d", result); + return false; + } + + // 设置色调映射模式为曲线模式 + uint8_t tonemapMode = ACAMERA_TONEMAP_MODE_CONTRAST_CURVE; + result = ACaptureRequest_setEntry_u8(request, + ACAMERA_TONEMAP_MODE, + 1, + &tonemapMode); + if (result != ACAMERA_OK) { + ALOGE("Failed to set tonemap mode: %d", result); + return false; + } +#endif + + return true; } \ No newline at end of file diff --git a/app/src/main/cpp/camera2/ndkcamera.h b/app/src/main/cpp/camera2/ndkcamera.h index d8e8bdf0..6f3fc2fc 100644 --- a/app/src/main/cpp/camera2/ndkcamera.h +++ b/app/src/main/cpp/camera2/ndkcamera.h @@ -23,6 +23,7 @@ #include #include "Camera2Helper.h" #include +#include #include /** @@ -39,6 +40,9 @@ static const uint64_t kMaxExposureTime = static_cast(250000000); #define WAIT_AF_LOCKED 4 #define PREVIEW_REQUEST_IDX 0 +#define CAPTURE_REQUEST_IDX 1 + +#define DEFAULT_WARMUP_TIME 250 // 250ms class CameraManager { @@ -81,10 +85,11 @@ public: unsigned int orientation:3; unsigned int zoom : 1; unsigned int wait3ALocked : 3; - unsigned int burstRawCapture : 2; + unsigned int burstRawCapture : 3; unsigned int customHdr : 1; unsigned int hdrStep : 3; - unsigned int reserved : 12; + unsigned int minFps : 4; + unsigned int reserved : 7; int64_t exposureTime; unsigned int sensitivity; int compensation; @@ -160,12 +165,12 @@ public: void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height); void CreateSession(ANativeWindow* previewWindow); - CaptureRequest* CreateRequest(bool isPreviewRequest); + CaptureRequest* CreateRequest(bool isPreviewRequest, int32_t sensitivity = -1); void DestroyRequest(CaptureRequest* request); void DestroySession(); - virtual bool on_image(cv::Mat& rgb); + virtual bool on_image(cv::Mat rgb); virtual void on_error(const std::string& msg); virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height); virtual void onDisconnected(ACameraDevice* device); @@ -183,6 +188,7 @@ public: void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult); void FireBurstCapture(); + void FireOneCapture(uint64_t ts); uint32_t GetLdr() const { @@ -195,10 +201,17 @@ public: } bool IsCameraAvailable(const std::string& cameraId); + int64_t GetTimestamp(const ACameraMetadata* result); static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height); static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult); +protected: + void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity); + void Setup3DNR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity); + void SetupHDR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity); + bool SetupTonemapCurve(ACameraMetadata* characteristics, ACaptureRequest* request); + protected: std::mutex m_locker; std::set m_availableCameras; @@ -236,9 +249,11 @@ protected: bool mCaptureTriggered; bool mFocusTriggered; + bool mCaptureDispatched; + uint32_t mStableFrameCount; CAPTURE_RESULT mResult; - unsigned long long m_startTime; + uint64_t m_startTime; protected: @@ -259,32 +274,24 @@ protected: ACameraOutputTarget* mOutputTarget; ACaptureSessionOutput* mSessionOutput; - AImageReader* mImageReader2; - ANativeWindow* mImageWindow2; - ACameraOutputTarget* mOutputTarget2; - ACaptureSessionOutput* mSessionOutput2; - std::shared_ptr mCharacteristics; std::vector mCaptureRequests; + ACameraCaptureSession* capture_session; + std::shared_ptr mPreviewResults; std::vector > mCaptureResults; + std::map > mCaptureResultMap; uint32_t mLdr; uint32_t mFinalLdr; uint32_t mFinalBurstCaptures; int32_t mFinalOutputFormat; std::vector > mCaptureFrames; - cv::Mat mOneFrame; + // cv::Mat mOneFrame; + std::vector > mOneFrame; std::vector > mRawFrames; - - ACameraCaptureSession* capture_session; - - // AImageReader* image_reader; - // ANativeWindow* image_reader_surface; - // ACameraOutputTarget* image_reader_target; - // ACaptureRequest* capture_request; - // ACaptureSessionOutput* capture_session_output; + int64_t m_minTimestamp; }; diff --git a/app/src/main/cpp/media/RTSPRecorder.cpp b/app/src/main/cpp/media/RTSPRecorder.cpp new file mode 100644 index 00000000..71b40115 --- /dev/null +++ b/app/src/main/cpp/media/RTSPRecorder.cpp @@ -0,0 +1,428 @@ +// +// Created by Matthew on 2025/3/1. +// + +#include "RTSPRecorder.h" +#include +#include +#include +#include +extern "C" { +#include +#include +#include +#include +#include +} + + +#define LOG_TAG "libcurl" + +#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__) +#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__) +#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__) + +#include +#include + +void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl) { + // Map FFmpeg log levels to Android log levels + int android_log_level; + switch (level) { + case AV_LOG_PANIC: + case AV_LOG_FATAL: + android_log_level = ANDROID_LOG_FATAL; + break; + case AV_LOG_ERROR: + android_log_level = ANDROID_LOG_ERROR; + break; + case AV_LOG_WARNING: + android_log_level = ANDROID_LOG_WARN; + break; + case AV_LOG_INFO: + android_log_level = ANDROID_LOG_INFO; + break; + case AV_LOG_VERBOSE: + android_log_level = ANDROID_LOG_VERBOSE; + break; + case AV_LOG_DEBUG: + case AV_LOG_TRACE: + android_log_level = ANDROID_LOG_DEBUG; + break; + default: + android_log_level = ANDROID_LOG_INFO; + break; + } + + // Format the log message + char log_message[1024]; + vsnprintf(log_message, sizeof(log_message), fmt, vl); + + // Send the log message to logcat + __android_log_print(android_log_level, "FFmpeg", "%s", log_message); +} + + +int setup_output_streams(AVFormatContext *input_ctx, AVFormatContext *output_ctx) { + // Copy streams and fix time_base + for (unsigned int i = 0; i < input_ctx->nb_streams; i++) { + AVStream *in_stream = input_ctx->streams[i]; + AVStream *out_stream = avformat_new_stream(output_ctx, NULL); + if (!out_stream) { + return AVERROR_UNKNOWN; + } + + // Copy codec parameters + int ret = avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar); + if (ret < 0) { + return ret; + } + + // Fix time base + out_stream->time_base = in_stream->time_base; + + // Clear any existing flags + out_stream->codecpar->codec_tag = 0; + } + return 0; +} + +int write_mp4_header(AVFormatContext *output_ctx) { + AVDictionary *opts = NULL; + + // MP4 specific options + av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0); + av_dict_set(&opts, "brand", "mp42", 0); + + // Write header + int ret = avformat_write_header(output_ctx, &opts); + if (ret < 0) { + char errbuf[AV_ERROR_MAX_STRING_SIZE]; + av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE); + fprintf(stderr, "Header write failed: %s (code: %d)\n", errbuf, ret); + } + + av_dict_free(&opts); + return ret; +} + + +void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle) +{ + AVFormatContext* inputFormatContext = nullptr; + AVFormatContext* outputFormatContext = nullptr; + AVPacket packet; + + // Open input RTMP stream + if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) { + fprintf(stderr, "Could not open input file '%s'\n", rtmpUrl); + return; + } + + // Retrieve input stream information + if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) { + fprintf(stderr, "Could not find stream information\n"); + avformat_close_input(&inputFormatContext); + return; + } + + // Open output MP4 file + if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) { + fprintf(stderr, "Could not create output context\n"); + avformat_close_input(&inputFormatContext); + return; + } + + // Copy stream information from input to output + for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) { + AVStream* inStream = inputFormatContext->streams[i]; + AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr); + if (!outStream) { + fprintf(stderr, "Failed to allocate output stream\n"); + avformat_close_input(&inputFormatContext); + avformat_free_context(outputFormatContext); + return; + } + + if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) { + fprintf(stderr, "Failed to copy codec parameters\n"); + avformat_close_input(&inputFormatContext); + avformat_free_context(outputFormatContext); + return; + } + outStream->codecpar->codec_tag = 0; + } + + // Open output file + if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) { + if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) { + fprintf(stderr, "Could not open output file '%s'\n", outputPath); + avformat_close_input(&inputFormatContext); + avformat_free_context(outputFormatContext); + return; + } + } + + // Write output file header + if (avformat_write_header(outputFormatContext, nullptr) < 0) { + fprintf(stderr, "Error occurred when writing header to output file\n"); + avformat_close_input(&inputFormatContext); + avformat_free_context(outputFormatContext); + return; + } + + // Start a thread to stop the streaming after the specified duration + std::thread stop_thread([&]() { + std::this_thread::sleep_for(std::chrono::milliseconds(duration)); + av_read_pause(inputFormatContext); + }); + + // Read packets from input and write them to output + while (av_read_frame(inputFormatContext, &packet) >= 0) { + AVStream* inStream = inputFormatContext->streams[packet.stream_index]; + AVStream* outStream = outputFormatContext->streams[packet.stream_index]; + + packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); + packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); + packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base); + packet.pos = -1; + + if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) { + fprintf(stderr, "Error muxing packet\n"); + break; + } + + av_packet_unref(&packet); + } + + stop_thread.join(); + + // Write output file trailer + av_write_trailer(outputFormatContext); + + // Clean up + avformat_close_input(&inputFormatContext); + if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) { + avio_closep(&outputFormatContext->pb); + } + avformat_free_context(outputFormatContext); +} + + +void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle) +{ + AVFormatContext* inputFormatContext = nullptr; + AVFormatContext* outputFormatContext = nullptr; + AVPacket packet; + +#ifndef NDEBUG + + // Set the custom log callback + av_log_set_callback(ffmpeg_log_callback); + av_log_set_level(AV_LOG_WARNING); + +#endif + + std::string url = rtspUrl; + AVDictionary* options = NULL; + av_dict_set(&options, "rtsp_transport", "tcp", 0); + av_dict_set(&options, "stimeout", "5000000", 0); + if (!userName.empty()) + { + av_dict_set(&options, "username", userName.c_str(), 0); // Replace with actual username + av_dict_set(&options, "password", password.c_str(), 0); // Replace with actual password + + char auth[512] = { 0 }; + snprintf(auth, sizeof(auth), "%s:%s@", userName.c_str(), password.c_str()); + + url.insert(url.begin() + 7, auth, auth + strlen(auth)); + } + + // Open input RTSP stream + int res = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &options); + av_dict_free(&options); + if (res != 0) { + char errbuf[AV_ERROR_MAX_STRING_SIZE]; + av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE); + fprintf(stderr, "Could not open input: %s (error code: %d)\n", errbuf, res); + // fprintf(stderr, "Could not open input file '%s'\n", rtspUrl); + return; + } + + + // Retrieve input stream information + if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) { + // fprintf(stderr, "Could not find stream information\n"); + avformat_close_input(&inputFormatContext); + return; + } + + // Open output MP4 file + if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) { + fprintf(stderr, "Could not create output context\n"); + avformat_close_input(&inputFormatContext); + return; + } + + // Copy stream information from input to output + for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) { + AVStream* inStream = inputFormatContext->streams[i]; + const AVCodecParameters *in_codecpar = inStream->codecpar; + + // Skip audio streams + if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) { + continue; + } + + if (in_codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { + // Copy video stream as-is + const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id); + AVStream *out_stream = avformat_new_stream(outputFormatContext, codec); + if (!out_stream) { + return; + } + avcodec_parameters_copy(out_stream->codecpar, in_codecpar); + out_stream->codecpar->codec_tag = 0; + out_stream->time_base = (AVRational){1, 90000}; + out_stream->avg_frame_rate = inStream->avg_frame_rate; + } + else if (in_codecpar->codec_type == AVMEDIA_TYPE_AUDIO) { + // Setup AAC audio stream + const AVCodec *aac_encoder = avcodec_find_encoder(AV_CODEC_ID_AAC); + if (!aac_encoder) { + fprintf(stderr, "AAC encoder not found\n"); + return; + } + + AVStream *out_stream = avformat_new_stream(outputFormatContext, aac_encoder); + if (!out_stream) { + return; + } + + // Set AAC parameters + out_stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO; + out_stream->codecpar->codec_id = AV_CODEC_ID_AAC; + out_stream->codecpar->sample_rate = in_codecpar->sample_rate; + out_stream->codecpar->format = AV_SAMPLE_FMT_FLTP; + out_stream->codecpar->channels = in_codecpar->channels; + out_stream->codecpar->channel_layout = av_get_default_channel_layout(in_codecpar->channels); + out_stream->codecpar->bit_rate = 128000; + out_stream->codecpar->frame_size = 1024; // AAC frame size + out_stream->time_base = (AVRational){1, in_codecpar->sample_rate}; + } + } + + // Open output file + if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) { + if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) { + fprintf(stderr, "Could not open output file '%s'\n", outputPath); + avformat_close_input(&inputFormatContext); + avformat_free_context(outputFormatContext); + return; + } + } + + AVDictionary *opts = NULL; + + // Set output format options + av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0); + av_dict_set(&opts, "brand", "mp42", 0); + + // Write output file header + res = avformat_write_header(outputFormatContext, &opts); + av_dict_free(&opts); + if (res < 0) { + char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 }; + av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE); + fprintf(stderr, "Error occurred when writing header to output file: %s (error code: %d)\n", errbuf, res); + avformat_close_input(&inputFormatContext); + avformat_free_context(outputFormatContext); + return; + } + +#if 0 + // Start a thread to stop the streaming after the specified duration + std::thread stop_thread([&]() { + std::this_thread::sleep_for(std::chrono::milliseconds(duration)); + av_read_pause(inputFormatContext); + }); +#endif + + uint32_t framesToSkip = 16; + uint32_t framesSkipped = 0; + // Skip initial frames + while (framesSkipped < framesToSkip) { + if (av_read_frame(inputFormatContext, &packet) < 0) + break; + + if (packet.stream_index == 0) { // Video stream + framesSkipped++; + } + av_packet_unref(&packet); + } + + auto startTime = av_gettime(); + // int64_t durationNs = (int64_t)duration * 1000000; + int64_t durationNs = (int64_t)(duration + 32) * 1000; + // Read packets from input and write them to output + while (1) { + + if ((av_gettime() - startTime) >= durationNs) { + // printf("Duration limit reached (%d seconds)\n", ctx->duration_secs); + break; + } + + +#if 0 + AVStream* inStream = inputFormatContext->streams[packet.stream_index]; + AVStream* outStream = outputFormatContext->streams[packet.stream_index]; + + packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); + packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); + packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base); + packet.pos = -1; + + if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) { + fprintf(stderr, "Error muxing packet\n"); + break; + } +#endif + + if (av_read_frame(inputFormatContext, &packet) < 0) break; + + // Skip audio packets + if (inputFormatContext->streams[packet.stream_index]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) + { + av_packet_unref(&packet); + continue; + } + + // Adjust packet timebase + AVStream *in_stream = inputFormatContext->streams[packet.stream_index]; + AVStream *out_stream = outputFormatContext->streams[packet.stream_index]; + av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base); + packet.pos = -1; + + res = av_write_frame(outputFormatContext, &packet); + + av_packet_unref(&packet); + + if (res < 0) + { + break; + } + + } + + // stop_thread.join(); + + // Write output file trailer + av_write_trailer(outputFormatContext); + + // Clean up + avformat_close_input(&inputFormatContext); + if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) { + avio_closep(&outputFormatContext->pb); + } + avformat_free_context(outputFormatContext); +} diff --git a/app/src/main/cpp/media/RTSPRecorder.h b/app/src/main/cpp/media/RTSPRecorder.h new file mode 100644 index 00000000..56d66568 --- /dev/null +++ b/app/src/main/cpp/media/RTSPRecorder.h @@ -0,0 +1,20 @@ +// +// Created by Matthew on 2025/3/1. +// + +#ifndef MICROPHOTO_RTSPRECORDER_H +#define MICROPHOTO_RTSPRECORDER_H + +#include +#include + +// void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs); +void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle); +void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle); + +class RTSPRecorder { + +}; + + +#endif //MICROPHOTO_RTSPRECORDER_H diff --git a/app/src/main/cpp/media/RTSPToMP4.cpp b/app/src/main/cpp/media/RTSPToMP4.cpp new file mode 100644 index 00000000..26b096a2 --- /dev/null +++ b/app/src/main/cpp/media/RTSPToMP4.cpp @@ -0,0 +1,186 @@ +// +// Created by Matthew on 2025/2/28. +// + +#include "RTSPToMP4.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include + +int32_t getMaxInputSize(AMediaExtractor* extractor, size_t trackIndex) +{ + AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, trackIndex); + int32_t maxInputSize = 0; + if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, &maxInputSize)) { + // LOGI("Max input size for track %zu: %d", trackIndex, maxInputSize); + } else { + // LOGE("Failed to get max input size for track %zu", trackIndex); + } + AMediaFormat_delete(format); + return maxInputSize; +} + +RTSPToMP4::RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs/* = 0*/) + : fd(-1), codec(nullptr), extractor(nullptr), muxer(nullptr), videoTrackIndex(-1), durationInMs(durationInMs), running(false) { + initExtractor(rtspUrl); + initCodec("video/avc"); + initMuxer(outputPath); +} + +RTSPToMP4::~RTSPToMP4() { + if (codec) AMediaCodec_delete(codec); + if (extractor) AMediaExtractor_delete(extractor); + if (muxer) AMediaMuxer_delete(muxer); + + if (fd != -1) + { + fdatasync(fd); + close(fd); + fd = -1; + } +} + +void RTSPToMP4::initCodec(const char* mime) { + codec = AMediaCodec_createDecoderByType(mime); + AMediaFormat* format = AMediaFormat_new(); + AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mime); + // Set other format parameters as needed + // ... + AMediaCodec_configure(codec, format, nullptr, nullptr, 0); + AMediaFormat_delete(format); +} + +void RTSPToMP4::initExtractor(const char* rtspUrl) { + extractor = AMediaExtractor_new(); + media_status_t status = AMediaExtractor_setDataSource(extractor, rtspUrl); + if (status != AMEDIA_OK) { + // Handle error + // ... + } +} + +void RTSPToMP4::initMuxer(const char* outputPath) { + fd = open(outputPath, O_CREAT | O_WRONLY, 0644); + muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4); + + int numTracks = AMediaExtractor_getTrackCount(extractor); + if (numTracks <= 0) { + // LOGE("No tracks found in RTSP stream"); + AMediaExtractor_delete(extractor); + return; + } + + for (int i = 0; i < numTracks; ++i) { + AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, i); + const char* mime; + if (AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime) && strncmp(mime, "video/", 6) == 0) { + videoTrackIndex = AMediaMuxer_addTrack(muxer, format); + AMediaExtractor_selectTrack(extractor, i); + } + AMediaFormat_delete(format); + } + + if (videoTrackIndex == -1) { + // LOGE("No video track found in RTSP stream"); + AMediaExtractor_delete(extractor); + AMediaMuxer_delete(muxer); + return; + } + + int32_t maxInputSize = getMaxInputSize(extractor, videoTrackIndex); + if (maxInputSize <= 0) { + // LOGE("Invalid max input size"); + // releaseMediaExtractor(extractor); + sampleData.resize(1920 * 1080 * 4, 0); + return; + } + + sampleData.resize(maxInputSize, 0); +} + +void RTSPToMP4::startDecodingAndMuxing() { + AMediaCodec_start(codec); + size_t bufferSize = sampleData.size(); + uint8_t* buffer = &sampleData[0]; + int64_t sampleTime = 0; + int64_t startTime = 0; + bool firstSampleData = true; + + int64_t durationTime = (durationInMs == 0) ? std::numeric_limits::max() : (int64_t)durationInMs * 1000; + + + while (running) { + // Extract data from RTSP stream + ssize_t sampleSize = AMediaExtractor_readSampleData(extractor, buffer, bufferSize); + if (sampleSize < 0) { + break; // End of stream + } + + sampleTime = AMediaExtractor_getSampleTime(extractor); + if (firstSampleData) + { + startTime = sampleTime; + firstSampleData = false; + } + + sampleTime -= startTime; + + // Feed data to codec + size_t inputBufferIndex; + uint8_t* inputBuffer = AMediaCodec_getInputBuffer(codec, inputBufferIndex, &bufferSize); + memcpy(inputBuffer, buffer, sampleSize); + AMediaCodec_queueInputBuffer(codec, inputBufferIndex, 0, sampleSize, sampleTime, 0); + + // Retrieve decoded frames and write to muxer + AMediaCodecBufferInfo bufferInfo; + ssize_t outputBufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &bufferInfo, 0); + if (outputBufferIndex >= 0) { + + bufferInfo.offset = 0; + bufferInfo.size = sampleSize; + bufferInfo.presentationTimeUs = sampleTime; + bufferInfo.flags = AMediaExtractor_getSampleFlags(extractor); + + uint8_t* outputBuffer = AMediaCodec_getOutputBuffer(codec, outputBufferIndex, &bufferSize); + AMediaMuxer_writeSampleData(muxer, videoTrackIndex, outputBuffer, &bufferInfo); + AMediaCodec_releaseOutputBuffer(codec, outputBufferIndex, false); + } + + AMediaExtractor_advance(extractor); + + if (sampleTime > durationTime) + { + break; + } + } + + AMediaCodec_stop(codec); + AMediaMuxer_stop(muxer); + + if (fd != -1) + { + fdatasync(fd); + close(fd); + fd = -1; + } +} + +void RTSPToMP4::start() { + // Add video track to muxer + AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, 0); + videoTrackIndex = AMediaMuxer_addTrack(muxer, format); + running = true; + AMediaMuxer_start(muxer); + + startDecodingAndMuxing(); +} + +void RTSPToMP4::stop() { + running = false; +} diff --git a/app/src/main/cpp/media/RTSPToMP4.h b/app/src/main/cpp/media/RTSPToMP4.h new file mode 100644 index 00000000..6759a8fd --- /dev/null +++ b/app/src/main/cpp/media/RTSPToMP4.h @@ -0,0 +1,38 @@ +// +// Created by Matthew on 2025/2/28. +// + +#ifndef MICROPHOTO_RTSPTOMP4_H +#define MICROPHOTO_RTSPTOMP4_H + +#include +#include +#include +#include + +class RTSPToMP4 { +public: + RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs = 0); + ~RTSPToMP4(); + void start(); + void stop(); + +private: + void initCodec(const char* mime); + void initExtractor(const char* rtspUrl); + void initMuxer(const char* outputPath); + void startDecodingAndMuxing(); + + int fd; + AMediaCodec* codec; + AMediaExtractor* extractor; + AMediaMuxer* muxer; + int videoTrackIndex; + uint64_t durationInMs; + bool running; + + std::vector sampleData; +}; + + +#endif //MICROPHOTO_RTSPTOMP4_H diff --git a/app/src/main/cpp/media/Streaming.cpp b/app/src/main/cpp/media/Streaming.cpp new file mode 100644 index 00000000..4a7cb898 --- /dev/null +++ b/app/src/main/cpp/media/Streaming.cpp @@ -0,0 +1,547 @@ +// +// Created by Matthew on 2025/3/11. +// + +#include "Streaming.h" + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +extern "C" { +#include +#include +#include +#include +#include +} + +extern void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl); + +#if 0 +StreamForwarder::~StreamForwarder() { + stop(); + if (inputCtx) { + avformat_close_input(&inputCtx); + } + if (outputCtx) { + if (outputCtx->pb) { + avio_closep(&outputCtx->pb); + } + avformat_free_context(outputCtx); + } +} + +bool StreamForwarder::initialize(const std::string& inputUrl, const std::string& outputUrl) { + if (!openInput(inputUrl)) { + return false; + } + + if (!openOutput(outputUrl)) { + return false; + } + + return true; +} + +bool StreamForwarder::openInput(const std::string& inputUrl) { + inputCtx = avformat_alloc_context(); + if (!inputCtx) { + return false; + } + + if (avformat_open_input(&inputCtx, inputUrl.c_str(), nullptr, nullptr) < 0) { + return false; + } + + if (avformat_find_stream_info(inputCtx, nullptr) < 0) { + return false; + } + + return true; +} + +bool StreamForwarder::openOutput(const std::string& outputUrl) { + int ret = avformat_alloc_output_context2(&outputCtx, nullptr, "flv", outputUrl.c_str()); + if (ret < 0) { + return false; + } + + // Copy streams from input to output + for (unsigned int i = 0; i < inputCtx->nb_streams; i++) { + AVStream* inStream = inputCtx->streams[i]; + AVStream* outStream = avformat_new_stream(outputCtx, inStream->codec->codec); + if (!outStream) { + return false; + } + + ret = avcodec_copy_context(outStream->codec, inStream->codec); + if (ret < 0) { + return false; + } + } + + // Open output file + if (!(outputCtx->oformat->flags & AVFMT_NOFILE)) { + ret = avio_open(&outputCtx->pb, outputUrl.c_str(), AVIO_FLAG_WRITE); + if (ret < 0) { + return false; + } + } + + // Write header + ret = avformat_write_header(outputCtx, nullptr); + if (ret < 0) { + return false; + } + + return true; +} + +void StreamForwarder::setFrameCallback(std::function callback) { + frameCallback = callback; +} + +void StreamForwarder::start() { + isRunning = true; + forwardPackets(); +} + +void StreamForwarder::stop() { + isRunning = false; +} + +void StreamForwarder::forwardPackets() { + AVPacket packet; + AVFrame* frame = av_frame_alloc(); + + while (isRunning) { + if (av_read_frame(inputCtx, &packet) < 0) { + break; + } + + // Process video frames if callback is set + if (frameCallback && packet.stream_index == 0) { // Assuming video is stream 0 + AVCodecContext* codecCtx = inputCtx->streams[packet.stream_index]->codec; + int ret = avcodec_send_packet(codecCtx, &packet); + if (ret < 0) { + continue; + } + + while (ret >= 0) { + ret = avcodec_receive_frame(codecCtx, frame); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; + } else if (ret < 0) { + goto end; + } + + processFrame(frame); + } + } + + // Forward packet + av_packet_rescale_ts(&packet, + inputCtx->streams[packet.stream_index]->time_base, + outputCtx->streams[packet.stream_index]->time_base); + + int ret = av_interleaved_write_frame(outputCtx, &packet); + if (ret < 0) { + break; + } + + av_packet_unref(&packet); + } + + end: + av_frame_free(&frame); + av_write_trailer(outputCtx); +} + +void StreamForwarder::processFrame(AVFrame* frame) { + if (frameCallback) { + frameCallback(frame->data[0], frame->linesize[0], + frame->width, frame->height); + } +} +#endif + + +RtspForwarder::RtspForwarder(const std::string& input, const std::string& output) + : inputUrl(input), outputUrl(output), isRunning(false) +{ +} + +bool RtspForwarder::isStreaming() const +{ + return isRunning; +} + +bool RtspForwarder::start() +{ + run(); + return true; +} + +bool RtspForwarder::stop() +{ + isRunning = false; + return true; +} + +int RtspForwarder::run() +{ +#ifndef NDEBUG + + // Set the custom log callback + av_log_set_callback(ffmpeg_log_callback); + av_log_set_level(AV_LOG_DEBUG); + +#endif + isRunning = true; + AVFormatContext* inputFormatContext = nullptr; + AVFormatContext* outputFormatContext = nullptr; + int ret; + int videoStreamIndex = -1; + int64_t startTime = AV_NOPTS_VALUE; + AVBSFContext* bsf_ctx = nullptr; + + std::string url = inputUrl; + if (!m_userName.empty()) + { + char auth[512] = { 0 }; + snprintf(auth, sizeof(auth), "%s:%s@", m_userName.c_str(), m_password.c_str()); + + url.insert(url.begin() + 7, auth, auth + strlen(auth)); + } + + // Input options + AVDictionary* inputOptions = nullptr; + av_dict_set(&inputOptions, "rtsp_transport", "tcp", 0); + av_dict_set(&inputOptions, "stimeout", "5000000", 0); // 5 second timeout + // av_dict_set(&inputOptions, "buffer_size", "1024000", 0); // 1MB buffer + + std::cout << "Opening input: " << url << std::endl; + + // Open input + ret = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &inputOptions); + av_dict_free(&inputOptions); + if (ret < 0) { + std::cerr << "Could not open input: " << av_err2str(ret) << std::endl; + return ret; + } + + // Get stream info + ret = avformat_find_stream_info(inputFormatContext, nullptr); + if (ret < 0) { + // std::cerr << "Failed to get stream info: " << av_err2str(ret) << std::endl; + avformat_close_input(&inputFormatContext); + return ret; + } + + // Find video stream + for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) { + if (inputFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { + videoStreamIndex = i; + break; + } + } + + if (videoStreamIndex == -1) { + // std::cerr << "No video stream found" << std::endl; + avformat_close_input(&inputFormatContext); + return -1; + } + + // Create stream mapping + std::vector streamMapping(inputFormatContext->nb_streams, -1); + int outputStreamIdx = 0; + + + // Allocate output context + ret = avformat_alloc_output_context2(&outputFormatContext, nullptr, "rtsp", outputUrl.c_str()); + if ((ret < 0) || !outputFormatContext) { + std::cerr << "Could not create output context" << std::endl; + avformat_close_input(&inputFormatContext); + return false; + } + + // FIXED VERSION - remove the redundant stream creation + for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) { + AVStream* inStream = inputFormatContext->streams[i]; + const AVCodecParameters *in_codecpar = inStream->codecpar; + + // Skip non-video streams if needed + if (in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO) { + streamMapping[i] = -1; + continue; + } + + // Create only ONE stream per input stream + const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id); + AVStream *outStream = avformat_new_stream(outputFormatContext, codec); + if (!outStream) { + return false; + } + + ret = avcodec_parameters_copy(outStream->codecpar, in_codecpar); + outStream->codecpar->codec_tag = 0; + outStream->time_base = (AVRational){1, 90000}; + outStream->avg_frame_rate = inStream->avg_frame_rate; + + // Map input stream to output stream + streamMapping[i] = outputStreamIdx++; + } + + const AVBitStreamFilter* filter = av_bsf_get_by_name("h264_mp4toannexb"); + if (filter) + { + for (unsigned i = 0; i < outputFormatContext->nb_streams; i++) { + AVStream* stream = outputFormatContext->streams[i]; + if (stream->codecpar->codec_id == AV_CODEC_ID_H264) { + ret = av_bsf_alloc(filter, &bsf_ctx); + if (ret < 0) { + std::cerr << "Failed to allocate bitstream filter context: " << av_err2str(ret) << std::endl; + return false; + } + + // Copy parameters from input to bsf + ret = avcodec_parameters_copy(bsf_ctx->par_in, stream->codecpar); + if (ret < 0) { + std::cerr << "Failed to copy parameters to bsf: " << av_err2str(ret) << std::endl; + return false; + } + + // Initialize the bsf context + ret = av_bsf_init(bsf_ctx); + if (ret < 0) { + std::cerr << "Failed to initialize bitstream filter: " << av_err2str(ret) << std::endl; + return false; + } + + // Update output parameters + ret = avcodec_parameters_copy(stream->codecpar, bsf_ctx->par_out); + if (ret < 0) { + std::cerr << "Failed to copy parameters from bsf: " << av_err2str(ret) << std::endl; + return false; + } + break; // Only apply to the first H.264 stream + } + } + } + + AVDictionary* outputOptions = nullptr; + av_dict_set(&outputOptions, "rtsp_transport", "tcp", 0); + av_dict_set(&outputOptions, "rtsp_flags", "filter_src", 0); + av_dict_set(&outputOptions, "timeout", "5000000", 0); + av_dict_set(&outputOptions, "allowed_media_types", "video", 0); + av_dict_set(&outputOptions, "buffer_size", "1024000", 0); // 1MB buffer + av_dict_set(&outputOptions, "fflags", "nobuffer", 0); // Reduce latency + av_dict_set(&outputOptions, "muxdelay", "0.1", 0); // Reduce delay + av_dict_set(&outputOptions, "max_delay", "500000", 0); + av_dict_set(&outputOptions, "preset", "ultrafast", 0); + av_dict_set(&outputOptions, "tune", "zerolatency", 0); + av_dict_set(&outputOptions, "rtsp_flags", "prefer_tcp", 0); + + // Open output + if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) { + + // Output options + + // ret = avio_open(&outputFormatContext->pb, outputUrl.c_str(), AVIO_FLAG_WRITE); + ret = avio_open2(&outputFormatContext->pb, outputFormatContext->url, AVIO_FLAG_WRITE, NULL, &outputOptions); + + if (ret < 0) { + char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 }; + av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE); + std::cerr << "Could not open output URL: " << errbuf << std::endl; + avformat_close_input(&inputFormatContext); + avformat_free_context(outputFormatContext); + av_dict_free(&outputOptions); + return ret; + } + } + + // Write header + ret = avformat_write_header(outputFormatContext, &outputOptions); + av_dict_free(&outputOptions); + if (ret < 0) { + char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 }; + av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE); + std::cerr << "Error writing header: " << errbuf << std::endl; + avformat_close_input(&inputFormatContext); + if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) + avio_closep(&outputFormatContext->pb); + avformat_free_context(outputFormatContext); + return ret; + } + + // Main loop - read and write packets + AVPacket packet; + AVMediaType medaiType; + while (isRunning) { + ret = av_read_frame(inputFormatContext, &packet); + if (ret < 0) { + if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) { + std::cerr << "End of stream or timeout, reconnecting in " + << reconnectDelayMs << "ms" << std::endl; + std::this_thread::sleep_for(std::chrono::milliseconds(reconnectDelayMs)); + avformat_close_input(&inputFormatContext); + ret = avformat_open_input(&inputFormatContext, inputUrl.c_str(), nullptr, &inputOptions); + if (ret < 0) continue; + ret = avformat_find_stream_info(inputFormatContext, nullptr); + if (ret < 0) continue; + continue; + } + break; + } + + // Later when writing packets: + int original_stream_index = packet.stream_index; + if (streamMapping[original_stream_index] >= 0) { + packet.stream_index = streamMapping[original_stream_index]; + // Write packet... + } else { + // Skip this packet + av_packet_unref(&packet); + continue; + } + + // Skip audio packets + medaiType = inputFormatContext->streams[original_stream_index]->codecpar->codec_type; + if (medaiType == AVMEDIA_TYPE_AUDIO || medaiType == AVMEDIA_TYPE_DATA) + { + av_packet_unref(&packet); + continue; + } + + +#if 0 + // Fix timestamps if enabled + if (fixTimestamps) { + // Handle timestamp issues similar to FFmpeg warning + AVStream* inStream = inputFormatContext->streams[packet.stream_index]; + AVStream* outStream = outputFormatContext->streams[packet.stream_index]; + + if (packet.pts == AV_NOPTS_VALUE) { + // Generate PTS if missing + if (startTime == AV_NOPTS_VALUE) { + startTime = av_gettime(); + } + packet.pts = av_rescale_q(av_gettime() - startTime, + AV_TIME_BASE_Q, + inStream->time_base); + packet.dts = packet.pts; + } + + // Rescale timestamps to output timebase + packet.pts = av_rescale_q_rnd(packet.pts, + inStream->time_base, + outStream->time_base, + static_cast(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); + packet.dts = av_rescale_q_rnd(packet.dts, + inStream->time_base, + outStream->time_base, + static_cast(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); + packet.duration = av_rescale_q(packet.duration, + inStream->time_base, + outStream->time_base); + } + + // Write packet to output + ret = av_interleaved_write_frame(outputFormatContext, &packet); + av_packet_unref(&packet); + if (ret < 0) { + std::cerr << "Error writing frame: " << av_err2str(ret) << std::endl; + break; + } +#endif + + AVStream *in_stream = inputFormatContext->streams[original_stream_index]; + AVStream *out_stream = outputFormatContext->streams[packet.stream_index]; + av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base); + +// CRITICAL: Fix timestamp issues + if (packet.dts != AV_NOPTS_VALUE && packet.pts != AV_NOPTS_VALUE && packet.dts > packet.pts) { + packet.dts = packet.pts; + } + +// Handle missing timestamps + if (packet.pts == AV_NOPTS_VALUE) { + if (startTime == AV_NOPTS_VALUE) { + startTime = av_gettime(); + } + packet.pts = av_rescale_q(av_gettime() - startTime, + AV_TIME_BASE_Q, + out_stream->time_base); + packet.dts = packet.pts; + } + + packet.pos = -1; + +// Apply bitstream filter if it's H.264 + if (bsf_ctx && out_stream->codecpar->codec_id == AV_CODEC_ID_H264) { + ret = av_bsf_send_packet(bsf_ctx, &packet); + if (ret < 0) { + std::cerr << "Error sending packet to bitstream filter: " << av_err2str(ret) << std::endl; + break; + } + + while (ret >= 0) { + ret = av_bsf_receive_packet(bsf_ctx, &packet); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + // Need more input or end of file + break; + } else if (ret < 0) { + std::cerr << "Error receiving packet from bitstream filter: " << av_err2str(ret) << std::endl; + break; + } + + // Write the filtered packet + ret = av_interleaved_write_frame(outputFormatContext, &packet); + av_packet_unref(&packet); + if (ret < 0) { + char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 }; + av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE); + std::cerr << "Error writing frame: " << errbuf << std::endl; + break; + } + } + } else { + // Write the packet without filtering + ret = av_interleaved_write_frame(outputFormatContext, &packet); + av_packet_unref(&packet); + if (ret < 0) { + char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 }; + av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE); + std::cerr << "Error writing frame: " << errbuf << std::endl; + break; + } + } + + } + + cleanup: + // Free the bitstream filter context + if (bsf_ctx) { + av_bsf_free(&bsf_ctx); + } + + // Write trailer + av_write_trailer(outputFormatContext); + + // Cleanup + avformat_close_input(&inputFormatContext); + if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) + avio_closep(&outputFormatContext->pb); + avformat_free_context(outputFormatContext); + + return ret; +} diff --git a/app/src/main/cpp/media/Streaming.h b/app/src/main/cpp/media/Streaming.h new file mode 100644 index 00000000..2e5f6884 --- /dev/null +++ b/app/src/main/cpp/media/Streaming.h @@ -0,0 +1,90 @@ +// +// Created by Matthew on 2025/3/11. +// + +#ifndef MICROPHOTO_STREAMING_H +#define MICROPHOTO_STREAMING_H + +#include +#include +#include +#include +#include +#include + + +#include + +extern "C" { +#include +#include +#include +#include +} + +class Streaming +{ +public: + virtual ~Streaming() {} + virtual bool start() { return false; } + virtual bool stop() { return false; } + virtual bool isStreaming() const { return false; } + + void setAuth(const std::string& userName, const std::string& password) + { + m_userName = userName; + m_password = password; + } +protected: + std::string m_userName; + std::string m_password; +}; + + +#if 0 +class StreamForwarder : public Streaming +{ +private: + AVFormatContext* inputCtx = nullptr; + AVFormatContext* outputCtx = nullptr; + bool isRunning = false; + +public: + StreamForwarder() = default; + virtual ~StreamForwarder(); + + bool initialize(const std::string& inputUrl, const std::string& outputUrl); + virtual void start(); + virtual void stop(); + +private: + bool openInput(const std::string& inputUrl); + bool openOutput(const std::string& outputUrl); + void forwardPackets(); + void setFrameCallback(std::function callback); +}; +#endif + + +class RtspForwarder : public Streaming { +private: + std::string inputUrl; + std::string outputUrl; + std::atomic isRunning; + + // Options + int reconnectDelayMs = 5000; + bool fixTimestamps = true; + +public: + RtspForwarder(const std::string& input, const std::string& output); + + virtual bool start(); + virtual bool stop(); + virtual bool isStreaming() const; + + + int run(); +}; + +#endif //MICROPHOTO_STREAMING_H diff --git a/app/src/main/cpp/netcamera/HangYuCtrl.cpp b/app/src/main/cpp/netcamera/HangYuCtrl.cpp new file mode 100644 index 00000000..c852346f --- /dev/null +++ b/app/src/main/cpp/netcamera/HangYuCtrl.cpp @@ -0,0 +1,330 @@ +// +// Created by Matthew on 2025/3/4. +// + +#include "HangYuCtrl.h" +#include "netcamera.h" +#include "httpclient.h" +#include + +#include + +#include +#include + +HangYuCtrl::~HangYuCtrl() +{ + +} + +bool HangYuCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY) +{ + //流类型范围1-4,1为主流 + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Streams/%u/1", m_ip.c_str(), (uint32_t)channel); + + std::vector resData; + + int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + if (res != 0 || resData.empty()) + { + return 0; + } + + std::string xmlString(resData.begin(), resData.end()); + + size_t widthStart = xmlString.find(""); + size_t widthEnd = xmlString.find(""); + if (widthStart != std::string::npos && widthEnd != std::string::npos) { + widthStart += std::string("").length(); + xmlString.replace(widthStart, widthEnd - widthStart, std::to_string(resX)); + } + + size_t heightStart = xmlString.find(""); + size_t heightEnd = xmlString.find(""); + if (heightStart != std::string::npos && heightEnd != std::string::npos) { + heightStart += std::string("").length(); + xmlString.replace(heightStart, heightEnd - heightStart, std::to_string(resY)); + } + + res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData); + + if (res != 0) + { + return 0; + } + return true; +} + +bool HangYuCtrl::SetOsd(uint8_t channel, std::string osdstring, uint8_t pos) +{ + // /LAPI/V1.0/Channels//Media/OSDs/Contents + //左上OSD + + bool hasDateTime = (osdstring.find("$$DATETIME$$") != std::string::npos); + size_t posi = osdstring.find("$$DATETIME$$"); + if (posi != std::string::npos) { + size_t endPos = posi + 12; + while (endPos < osdstring.size() && (osdstring[endPos] == ' ' || osdstring[endPos] == '\n')) { + endPos++; + } + osdstring.erase(posi, endPos - posi); + } + + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel); + std::vector resData; + std::replace(osdstring.begin(), osdstring.end(), '\n', '^'); + string xmlString = "" + string(hasDateTime ? "true" : "false") + "801false"+ osdstring+ "8" + string(hasDateTime ? "24" : "0") + ""; + int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData); + return res; +} + +void HangYuCtrl::EnableOsd(bool enable, uint8_t channel) +{ + //航煜 只能显示时间和一个OSD + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel); + + std::vector resData; + + int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + if (res != 0 || resData.empty()) + { + return; + } + + std::string xmlString(resData.begin(), resData.end()); + + std::string enableStartTag = ""; + std::string enableEndTag = ""; + + size_t pos = 0; + while ((pos = xmlString.find(enableStartTag, pos)) != std::string::npos) { + size_t startPos = pos + enableStartTag.length(); + size_t endPos = xmlString.find(enableEndTag, startPos); + if (endPos == std::string::npos) { + break; + } + std::string newValue = enable ? "true" : "false"; + xmlString.replace(startPos, endPos - startPos, newValue); + pos = endPos + enableEndTag.length(); + } + + res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData); + + if (res != 0) + { +// return; + } +} + +std::string HangYuCtrl::GetStreamingUrl(uint8_t channel) +{ + // /LAPI/V1.0/Channels//Media/Video/Streams//LiveStreamURL?TransType=&TransProtocol= + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel); + + std::vector resData; + + int res = 0; + for (int idx = 0; idx < 10; idx++) + { + res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + if (res == 0 && !resData.empty()) + { + break; + } + } + if (res != 0 || resData.empty()) + { + return ""; + } + + resData.push_back(0); + const char* start = strstr((const char*)&resData[0], ""); + if (start == NULL) + { + return ""; + } + start += 9; + const char* end = strstr(start, ""); + if (end == NULL) + { + return ""; + } + + return std::string(start, end); +} + +bool HangYuCtrl::UpdateTime(time_t ts) +{ + // /LAPI/V1.0/System/Time + + // + // + + std::string reqData = ""; + + std::string url = "http://" + m_ip + "/System/Time"; + std::vector resData; + int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData); + + if (res != 0) + { + return false; + } + + return true; +} + +bool HangYuCtrl::TakePhoto(uint8_t streamID, std::vector& img) +{ + bool res = false; + + std::vector data; + + // /Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", m_ip.c_str(), (uint32_t)streamID); + + int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode); + if (0 == nRet) + { + bool qualityDowngraded = false; + std::string originalConfig; + if (img.size() < 1000) + { + qualityDowngraded = DowngradeQuality(originalConfig); + XYLOG(XYLOG_SEVERITY_INFO,"Reduce Img Quality"); + } + nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode); + if (!originalConfig.empty()) + { + UpdateQuality(originalConfig); + } + + std::vector header = {0xFF, 0xD8, 0xFF, 0xE0}; // JPEG + std::vector::iterator it = std::search(img.begin(), img.end(), header.begin(), header.end()); + if (it != img.end() && it != img.begin()) + { + img.erase(img.begin(), it); +#ifndef NDEBUG + int aa = 0; +#endif + } + } + + return nRet == 0; +} + +bool HangYuCtrl::DowngradeQuality(std::string& originalConfig) +{ + bool res = false; + char url[64] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str()); + + std::vector data; + int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data); + if (0 == nRet) + { + std::string str = ByteArrayToString(&data[0], data.size()); + originalConfig = str; + if (replaceAll(str, "middle", "low") == 0) + { + res = (replaceAll(str, "high", "middle") != 0); + } + else + { + res = true; + } + + if (!res) + { + return res; + } + + data.clear(); + if (res) + { + nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, str.c_str(), data); + return 0 == nRet; + } + } + + return false; +} + +bool HangYuCtrl::UpdateQuality(const std::string& originalConfig) +{ + std::vector data; + char url[64] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str()); + int nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, originalConfig.c_str(), data); + return 0 == nRet; +} + +bool HangYuCtrl::UpgradeQuality() +{ + bool res = false; + char url[64] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str()); + + std::vector data; + int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data); + if (0 == nRet) + { + std::string str = ByteArrayToString(&data[0], data.size()); + if (replaceAll(str, "low", "middle") == 0) + { + res = (replaceAll(str, "middle", "high") != 0); + } + else + { + res = true; + } + + if (!res) + { + return res; + } + + data.clear(); + if (res) + { + nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, str.c_str(), data); + return 0 == nRet; + } + } + + return false; +} + +bool HangYuCtrl::QueryQuality(std::string& qualityContents) +{ + char url[64] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str()); + + std::vector data; + int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data); + if (0 == nRet && !data.empty()) + { + qualityContents = ByteArrayToString(&data[0], data.size()); + } + return (0 == nRet); +} + +bool HangYuCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path) +{ + return false; +} \ No newline at end of file diff --git a/app/src/main/cpp/netcamera/HangYuCtrl.h b/app/src/main/cpp/netcamera/HangYuCtrl.h new file mode 100644 index 00000000..be5b2a84 --- /dev/null +++ b/app/src/main/cpp/netcamera/HangYuCtrl.h @@ -0,0 +1,34 @@ +// +// Created by Matthew on 2025/3/4. +// + +#ifndef __MICROPHOTO_HANGYUCTRL_H__ +#define __MICROPHOTO_HANGYUCTRL_H__ + +#include "VendorCtrl.h" + +class HangYuCtrl : public VendorCtrl +{ +public: + using VendorCtrl::VendorCtrl; + virtual ~HangYuCtrl(); + + virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos); + virtual void EnableOsd(bool enable, uint8_t channel); + virtual std::string GetStreamingUrl(uint8_t channel); + virtual bool UpdateTime(time_t ts); + virtual bool TakePhoto(uint8_t streamID, std::vector& img); + virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path); + virtual bool HasAuthOnStreaming() const { return true; } + virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY); + +private: + bool QueryQuality(std::string& qualityContents); + bool DowngradeQuality(std::string& originalConfig); + bool UpdateQuality(const std::string& originalConfig); + bool UpgradeQuality(); + +}; + + +#endif //__MICROPHOTO_HANGYUCTRL_H__ diff --git a/app/src/main/cpp/netcamera/HikonCtrl.cpp b/app/src/main/cpp/netcamera/HikonCtrl.cpp new file mode 100644 index 00000000..eaa194d5 --- /dev/null +++ b/app/src/main/cpp/netcamera/HikonCtrl.cpp @@ -0,0 +1,204 @@ +// +// Created by Matthew on 2025/3/4. +// + +#include "HikonCtrl.h" +#include "netcamera.h" +#include "httpclient.h" +#include + +#include + +#include +#include + +HikonCtrl::~HikonCtrl() +{ + +} + +bool HikonCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY) +{ + //流类型范围1-4,1为主流 + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Streams/%u/1", m_ip.c_str(), (uint32_t)channel); + + std::vector resData; + + int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + if (res != 0 || resData.empty()) + { + return 0; + } + + std::string xmlString(resData.begin(), resData.end()); + + size_t widthStart = xmlString.find(""); + size_t widthEnd = xmlString.find(""); + if (widthStart != std::string::npos && widthEnd != std::string::npos) { + widthStart += std::string("").length(); + xmlString.replace(widthStart, widthEnd - widthStart, std::to_string(resX)); + } + + size_t heightStart = xmlString.find(""); + size_t heightEnd = xmlString.find(""); + if (heightStart != std::string::npos && heightEnd != std::string::npos) { + heightStart += std::string("").length(); + xmlString.replace(heightStart, heightEnd - heightStart, std::to_string(resY)); + } + + res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData); + + if (res != 0) + { + return 0; + } + return true; +} + +bool HikonCtrl::SetOsd(uint8_t channel, std::string osdstring, uint8_t pos) +{ + // /LAPI/V1.0/Channels//Media/OSDs/Contents + //左上OSD + + bool hasDateTime = (osdstring.find("$$DATETIME$$") != std::string::npos); + size_t posi = osdstring.find("$$DATETIME$$"); + if (posi != std::string::npos) { + size_t endPos = posi + 12; + while (endPos < osdstring.size() && (osdstring[endPos] == ' ' || osdstring[endPos] == '\n')) { + endPos++; + } + osdstring.erase(posi, endPos - posi); + } + + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel); + std::vector resData; + std::replace(osdstring.begin(), osdstring.end(), '\n', '^'); + string xmlString = "" + string(hasDateTime ? "true" : "false") + "801false"+ osdstring+ "8" + string(hasDateTime ? "24" : "0") + ""; + int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData); + return res; +} + +void HikonCtrl::EnableOsd(bool enable, uint8_t channel) +{ + //航煜 只能显示时间和一个OSD + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel); + + std::vector resData; + + int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + if (res != 0 || resData.empty()) + { + return; + } + + std::string xmlString(resData.begin(), resData.end()); + + std::string enableStartTag = ""; + std::string enableEndTag = ""; + + size_t pos = 0; + while ((pos = xmlString.find(enableStartTag, pos)) != std::string::npos) { + size_t startPos = pos + enableStartTag.length(); + size_t endPos = xmlString.find(enableEndTag, startPos); + if (endPos == std::string::npos) { + break; + } + std::string newValue = enable ? "true" : "false"; + xmlString.replace(startPos, endPos - startPos, newValue); + pos = endPos + enableEndTag.length(); + } + + res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData); + + if (res != 0) + { +// return; + } + + +} + +std::string HikonCtrl::GetStreamingUrl(uint8_t channel) +{ + // /LAPI/V1.0/Channels//Media/Video/Streams//LiveStreamURL?TransType=&TransProtocol= + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel); + + std::vector resData; + + int res = 0; + for (int idx = 0; idx < 10; idx++) + { + res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + if (res == 0 && !resData.empty()) + { + break; + } + } + if (res != 0 || resData.empty()) + { + return ""; + } + + resData.push_back(0); + const char* start = strstr((const char*)&resData[0], ""); + if (start == NULL) + { + return ""; + } + start += 9; + const char* end = strstr(start, ""); + if (end == NULL) + { + return ""; + } + + return std::string(start, end); +} + +bool HikonCtrl::UpdateTime(time_t ts) +{ + // /LAPI/V1.0/System/Time + + // + // + + std::string reqData = ""; + + std::string url = "http://" + m_ip + "/System/Time"; + std::vector resData; + int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData); + + if (res != 0) + { + return false; + } + + return true; +} + +bool HikonCtrl::TakePhoto(uint8_t streamID, std::vector& img) +{ + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/ISAPI/Streaming/channels/1/picture?", m_ip.c_str()); + int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode); + return nRet == 0; +} + +bool HikonCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path) +{ + return false; +} \ No newline at end of file diff --git a/app/src/main/cpp/netcamera/HikonCtrl.h b/app/src/main/cpp/netcamera/HikonCtrl.h new file mode 100644 index 00000000..0acb9d15 --- /dev/null +++ b/app/src/main/cpp/netcamera/HikonCtrl.h @@ -0,0 +1,34 @@ +// +// Created by Matthew on 2025/3/4. +// + +#ifndef __MICROPHOTO_HIKONCTRL_H__ +#define __MICROPHOTO_HIKONCTRL_H__ + +#include "VendorCtrl.h" + +class HikonCtrl : public VendorCtrl +{ +public: + using VendorCtrl::VendorCtrl; + virtual ~HikonCtrl(); + + virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos); + virtual void EnableOsd(bool enable, uint8_t channel); + virtual std::string GetStreamingUrl(uint8_t channel); + virtual bool UpdateTime(time_t ts); + virtual bool TakePhoto(uint8_t streamID, std::vector& img); + virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path); + virtual bool HasAuthOnStreaming() const { return true; } + virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY); + +private: + bool QueryQuality(std::string& qualityContents); + bool DowngradeQuality(std::string& originalConfig); + bool UpdateQuality(const std::string& originalConfig); + bool UpgradeQuality(); + +}; + + +#endif //__MICROPHOTO_HIKONCTRL_H__ diff --git a/app/src/main/cpp/netcamera/VendorCtrl.cpp b/app/src/main/cpp/netcamera/VendorCtrl.cpp new file mode 100644 index 00000000..b54195ea --- /dev/null +++ b/app/src/main/cpp/netcamera/VendorCtrl.cpp @@ -0,0 +1,27 @@ +// +// Created by Matthew on 2025/3/4. +// +#include "VendorCtrl.h" +#include + +VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime/* = true*/) : + m_ip(ip), m_userName(userName), m_password(password), m_channel(channel), m_netHandle(netHandle) +{ + +} +std::string VendorCtrl::CvtJSONToString(const Json::Value& data) +{ + Json::StreamWriterBuilder builder; +#ifndef NDEBUG + builder["indentation"] = "\t"; // assume default for comments is None + builder["emitUTF8"] = true; +#else + builder["indentation"] = ""; +#endif + return Json::writeString(builder, data); +} + +bool VendorCtrl::IsTimeout() const +{ + return m_lastErrorCode == CURLE_OPERATION_TIMEDOUT; +} \ No newline at end of file diff --git a/app/src/main/cpp/netcamera/VendorCtrl.h b/app/src/main/cpp/netcamera/VendorCtrl.h new file mode 100644 index 00000000..d3d33060 --- /dev/null +++ b/app/src/main/cpp/netcamera/VendorCtrl.h @@ -0,0 +1,50 @@ +// +// Created by Matthew on 2025/3/4. +// + +#ifndef MICROPHOTO_VENDORCTRL_H +#define MICROPHOTO_VENDORCTRL_H + +#include +#include +#include + +#define LEFT_TOP 0 +#define RIGHT_TOP 1 +#define LEFT_BOTTOM 2 +#define RIGHT_BOTTOM 3 + +class VendorCtrl { +public: + VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime = true); + virtual ~VendorCtrl() {} + + virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos) = 0; + virtual void EnableOsd(bool enable, uint8_t channel) = 0; + virtual std::string GetStreamingUrl(uint8_t channel) = 0; + virtual bool UpdateTime(time_t ts) = 0; + virtual bool TakePhoto(uint8_t streamID, std::vector& img) = 0; + virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path) = 0; + virtual bool HasAuthOnStreaming() const { return false; } + virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY) = 0; + + + void UpdateNetHandle(net_handle_t netHandle) { m_netHandle = netHandle; } + int GetLastError() const { return m_lastErrorCode; } + bool IsTimeout() const; + +protected: + + std::string CvtJSONToString(const Json::Value& data); + +protected: + std::string m_ip; + std::string m_userName; + std::string m_password; + uint8_t m_channel; + net_handle_t m_netHandle; + int m_lastErrorCode; +}; + + +#endif //MICROPHOTO_VENDORCTRL_H diff --git a/app/src/main/cpp/netcamera/YuShiCtrl.cpp b/app/src/main/cpp/netcamera/YuShiCtrl.cpp new file mode 100644 index 00000000..7b7f0df4 --- /dev/null +++ b/app/src/main/cpp/netcamera/YuShiCtrl.cpp @@ -0,0 +1,237 @@ +// +// Created by Matthew on 2025/3/4. +// + +#include "YuShiCtrl.h" +#include "httpclient.h" +#include "netcamera.h" + +#include + +YuShiCtrl::~YuShiCtrl() +{ + +} + +bool YuShiCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY) +{ + return false; +} + +bool YuShiCtrl::SetOsd(uint8_t channel, std::string osd, uint8_t pos) +{ + // /LAPI/V1.0/Channels//Media/OSDs/Contents + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/OSDs/Contents", m_ip.c_str(), (uint32_t)channel); + std::vector resData; + + string jsonstring; + switch (pos) { + case LEFT_TOP: + { + OSDJson(0, 1, osd, 0, 0, true, jsonstring); + break; + } + case RIGHT_TOP: + { + OSDJson(1, 1, osd, 9900, 0, false, jsonstring); + break; + } + case LEFT_BOTTOM: + { + OSDJson(2, 1, osd, 0, 9900, false, jsonstring); + break; + } + case RIGHT_BOTTOM: + { + OSDJson(3, 1, osd, 9900, 9900, false, jsonstring); + break; + } + + } + int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, jsonstring.c_str(), resData); + return res; +} + +void YuShiCtrl::EnableOsd(bool enable, uint8_t channel) +{ + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/OSDs/Contents", m_ip.c_str(), (uint32_t)channel); + std::vector resData; + int res =DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + std::string jsonString(resData.begin(), resData.end()); + + Json::CharReaderBuilder reader; + Json::Value root; + std::string errors; + std::istringstream s(jsonString); + + if (!Json::parseFromStream(reader, s, &root, &errors)) { + XYLOG(XYLOG_SEVERITY_ERROR, "Failed to parse JSON:%s", errors.c_str()); + return; + } + + Json::Value& data = root["Response"]["Data"]; + if (data.isNull()) { + XYLOG(XYLOG_SEVERITY_ERROR,"Data not found in JSON"); + return; + } + + Json::Value& contentList = data["ContentList"]; + for (auto& content : contentList) { + content["Enabled"] = enable ? 1 : 0; + } + + Json::StreamWriterBuilder writer; + std::string putJsonString = Json::writeString(writer, data); + DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, putJsonString.c_str(), resData); + + +} + +std::string YuShiCtrl::GetStreamingUrl(uint8_t channel) +{ + // /LAPI/V1.0/Channels//Media/Video/Streams//LiveStreamURL?TransType=&TransProtocol= + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/0/LiveStreamURL", m_ip.c_str(), (uint32_t)channel); + + std::vector resData; + int res = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + if (res != 0 || resData.empty()) + { + return ""; + } + + resData.push_back(0); + Json::CharReaderBuilder builder; + std::unique_ptr reader(builder.newCharReader()); + + Json::Value json; + const char* doc = (const char*)&(resData[0]); + if (reader->parse(doc, doc + resData.size() - 1, &json, NULL)) + { + if (json.isMember("Response")) + { + Json::Value& jsonRes = json["Response"]; + if (jsonRes.isMember("Data")) + { + Json::Value& jsonData = jsonRes["Data"]; + if (jsonData.isMember("URL")) + { + return std::string(jsonData["URL"].asCString()); + } + } + } + } + + return ""; +} + +bool YuShiCtrl::UpdateTime(time_t ts) +{ + // /LAPI/V1.0/System/Time + +#if 0 + Json::Value jsonData(Json::objectValue); + + jsonData["TimeZone"] = "GMT+08:00"; + jsonData["DeviceTime"] = (int64_t)ts; + jsonData["DateFormat"] = 0; // YYYY-MM-DD + jsonData["HourFormat"] = 1; // 24H +#endif + + std::string contents = "{\"TimeZone\":\"GMT+08:00\",\"DateFormat\":0,\"HourFormat\":1,\"DeviceTime\":" + std::to_string(ts) + "}"; + + std::string url = "http://" + m_ip + "/LAPI/V1.0/System/Time"; + std::vector resData; + int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, contents.c_str(), resData); + + if (res != 0) + { + return false; + } + + return true; +} + +bool YuShiCtrl::TakePhoto(uint8_t streamID, std::vector& img) +{ + // Yu Shi + char url[128] = { 0 }; + int streamSid = 0; // should put into config + snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/%d/Snapshot", m_ip.c_str(), (uint32_t)streamID, streamSid); + + int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode); + return nRet == 0; +} + +bool YuShiCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path) { + + return false; +} + +void YuShiCtrl::OSDJson(int id, bool enabled, std::string osdString, int x, int y, bool timeOn, std::string& jsonString) +{ + Json::Value root; + root["Num"] = 1; + + Json::Value contentList(Json::arrayValue); + + Json::Value content; + content["ID"] = id; + content["Enabled"] = enabled; + + int row = 1; + for (char ch : osdString) { + if (ch == '\n') { + row++; + } + } + + content["Num"] = row; + Json::Value contentInfo(Json::arrayValue); + size_t start = 0; + size_t end = osdString.find('\n'); + + if(timeOn) + { + //如果在此位置显示时间 + Json::Value info; + info["ContentType"] = 2; + info["Value"] = ""; + contentInfo.append(info); + } + + for (int i = 0; i < row; i++) + { + std::string line; + if (end == std::string::npos) { + line = osdString.substr(start); + } else { + line = osdString.substr(start, end - start); + start = end + 1; + end = osdString.find('\n', start); + } + + Json::Value info; + info["ContentType"] = 1; + info["Value"] = line; + contentInfo.append(info); + } + content["ContentInfo"] = contentInfo; + + Json::Value area; + Json::Value topLeft; + topLeft["X"] = x; //9900 + topLeft["Y"] = y; + area["TopLeft"] = topLeft; + content["Area"] = area; + + contentList.append(content); + + root["ContentList"] = contentList; + + Json::StreamWriterBuilder writer; + jsonString = Json::writeString(writer, root); +} \ No newline at end of file diff --git a/app/src/main/cpp/netcamera/YuShiCtrl.h b/app/src/main/cpp/netcamera/YuShiCtrl.h new file mode 100644 index 00000000..dbc4c07a --- /dev/null +++ b/app/src/main/cpp/netcamera/YuShiCtrl.h @@ -0,0 +1,30 @@ +// +// Created by Matthew on 2025/3/4. +// + +#ifndef MICROPHOTO_YUSHICTRL_H +#define MICROPHOTO_YUSHICTRL_H + +#include "VendorCtrl.h" + +class YuShiCtrl : public VendorCtrl +{ +public: + using VendorCtrl::VendorCtrl; + virtual ~YuShiCtrl(); + + virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos); + virtual void EnableOsd(bool enable, uint8_t channel); + virtual std::string GetStreamingUrl(uint8_t streamID); + virtual bool UpdateTime(time_t ts); + virtual bool TakePhoto(uint8_t streamID, std::vector& img); + virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path); + virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY); + +private: + void OSDJson(int id, bool enabled, std::string osdString, int x, int y, bool timeOn, std::string& jsonString); + +}; + + +#endif //MICROPHOTO_YUSHICTRL_H diff --git a/app/src/main/cpp/netcamera/httpclient.cpp b/app/src/main/cpp/netcamera/httpclient.cpp index 20b3142d..f99d5ed4 100644 --- a/app/src/main/cpp/netcamera/httpclient.cpp +++ b/app/src/main/cpp/netcamera/httpclient.cpp @@ -10,6 +10,7 @@ static size_t OnWriteData(void* buffer, size_t size, size_t nmemb, void* lpVoid) std::vector* data = (std::vector*)lpVoid; if( NULL == data || NULL == buffer ) { + XYLOG(XYLOG_SEVERITY_ERROR,"OnWriteData callback -1"); return -1; } uint8_t* begin = (uint8_t *)buffer; @@ -27,11 +28,12 @@ static int SockOptCallback(void *clientp, curl_socket_t curlfd, curlsocktype pur { int errcode = errno; printf("android_setsocknetwork errno=%d", errcode); + XYLOG(XYLOG_SEVERITY_ERROR,"setsocknetwork -1, errcode=%d",errcode); } return res == 0 ? CURL_SOCKOPT_OK : CURL_SOCKOPT_ERROR; } -int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector& data) +int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector& data, int* curlResVal/* = NULL*/) { CURLcode nRet; std::string auth; @@ -61,8 +63,10 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char if (netHandle != NETWORK_UNSPECIFIED) { +#if 0 curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback); curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle); +#endif } curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1); @@ -83,6 +87,10 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10); nRet = curl_easy_perform(curl); + if (curlResVal != NULL) + { + *curlResVal = nRet; + } long responseCode = 0; if (CURLE_OK == nRet) @@ -108,7 +116,8 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char } else { - XYLOG(XYLOG_SEVERITY_WARNING, "Net Photo failure, nRet=%d", (int)nRet); + curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &responseCode); + XYLOG(XYLOG_SEVERITY_WARNING, "Net Photo failure, nRet=%d, code=%d", (int)nRet, (int)responseCode); // printf("GET err=%d", nRet); } curl_easy_cleanup(curl); @@ -116,7 +125,7 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char return ((0 == nRet) && (responseCode == 200)) ? 0 : 1; } -int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, char* data) +int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector& data, int* curlResVal/* = NULL*/) { std::string auth; @@ -145,8 +154,10 @@ int DoPutRequest(const char* url, int authType, const char* userName, const char if (netHandle != NETWORK_UNSPECIFIED) { +#if 0 curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback); curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle); +#endif } if(contents != NULL) @@ -163,6 +174,10 @@ int DoPutRequest(const char* url, int authType, const char* userName, const char curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10); CURLcode nRet = curl_easy_perform(curl); + if (curlResVal != NULL) + { + *curlResVal = nRet; + } if (CURLE_OK != nRet) { printf("GET err=%d", nRet); @@ -197,6 +212,7 @@ bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photo if (fp != NULL) { fwrite(&data[0], data.size(), 1, fp); + fdatasync(fileno(fp)); fclose(fp); res = true; } @@ -235,10 +251,11 @@ int UniviewResolutionSet(const NET_PHOTO_INFO& photoInfo, int channel, unsigned Json::StreamWriterBuilder writer; std::string sendbuf = Json::writeString(writer, outdata); - char respContent[1024]; + std::vector respContent; DoPutRequest(path.c_str(), photoInfo.authType, photoInfo.userName, photoInfo.password, photoInfo.netHandle, sendbuf.c_str(), respContent); - XYLOG(XYLOG_SEVERITY_INFO, "sendlen= %zu, respContent=%s", sendbuf.size(), respContent); + // respContent.push_back(0); + // XYLOG(XYLOG_SEVERITY_DEBUG, "Sendlen= %zu, respContent=%s", sendbuf.size(), (const char*)&respContent[0]); return 0; } diff --git a/app/src/main/cpp/netcamera/httpclient.h b/app/src/main/cpp/netcamera/httpclient.h index 496c8515..1d24d87b 100644 --- a/app/src/main/cpp/netcamera/httpclient.h +++ b/app/src/main/cpp/netcamera/httpclient.h @@ -18,7 +18,7 @@ bool setIPAddress(const char *if_name, const char *ip_addr, const char *net_mask, const char *gateway_addr); -int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector& data); -int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, char* data); +int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector& data, int* curlResVal = NULL); +int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector& data, int* curlResVal = NULL); #endif // __HTTP_CLIENT__ \ No newline at end of file diff --git a/app/src/main/cpp/serial/WeatherComm.cpp b/app/src/main/cpp/serial/WeatherComm.cpp index dabd6b61..b755d58d 100644 --- a/app/src/main/cpp/serial/WeatherComm.cpp +++ b/app/src/main/cpp/serial/WeatherComm.cpp @@ -112,37 +112,6 @@ int set_port_attr (int fd, int baudrate, int databit, const char *stopbit, char return (tcsetattr (fd, TCSANOW, &opt)); } -static void setInt(int cmd, int value) -{ - int fd = open("/dev/mtkgpioctrl", O_RDONLY); - IOT_PARAM param; - param.cmd = cmd; - param.value = value; - // LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value); - if( fd > 0 ) - { - int res = ioctl(fd, IOT_PARAM_WRITE, ¶m); - // LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result); - close(fd); - } - return; -} -static void setRS485Enable(bool z) { - setInt(CMD_SET_485_EN_STATE, z ? 1 : 0); -} - -static void set485WriteMode() { - setInt(CMD_SET_485_STATE, 1); -} - -static void set485ReadMode() { - setInt(CMD_SET_485_STATE, 0); -} -static void set12VEnable(bool z) { - setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0); -} - - /********************************************************************************* * 气象数据处理 * **********************************************************************************/ diff --git a/app/src/main/cpp/serial/WeatherComm.h b/app/src/main/cpp/serial/WeatherComm.h index 80c47f98..cd189441 100644 --- a/app/src/main/cpp/serial/WeatherComm.h +++ b/app/src/main/cpp/serial/WeatherComm.h @@ -8,10 +8,6 @@ #include #include "GPIOControl.h" -#define MAX_STRING_LEN 32 -#define IOT_PARAM_WRITE 0xAE -#define IOT_PARAM_READ 0xAF - #define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, "serial_port_comm", fmt, ##args) // 串口参数 @@ -34,14 +30,6 @@ typedef struct unsigned char m_au8RecvBuf[128];/* */ } SIO_PARAM_SERIAL_DEF; -typedef struct -{ - int cmd; - int value; - int result; - long value2; - char str[MAX_STRING_LEN]; -}IOT_PARAM; void PortDataProcess( void ); int serial_port_comm(); diff --git a/app/src/main/java/com/xypower/mpapp/BridgeProvider.java b/app/src/main/java/com/xypower/mpapp/BridgeProvider.java index 87ad2a67..8fee79c1 100644 --- a/app/src/main/java/com/xypower/mpapp/BridgeProvider.java +++ b/app/src/main/java/com/xypower/mpapp/BridgeProvider.java @@ -46,6 +46,9 @@ public class BridgeProvider extends ContentProvider { private final static String PATH_RECOG_PIC = "/recogPic"; + private final static String PATH_REQUEST_PWR_CTRL = "/requestPwrCtrl"; + private final static String PATH_RELEASE_PWR_CTRL = "/releasePwrCtrl"; + public BridgeProvider() { Log.i(TAG, "BridgeProvider"); } @@ -85,6 +88,9 @@ public class BridgeProvider extends ContentProvider { matcher.addURI(AUTHORITY, PATH_QUERY_SEC_VERSION, 1); matcher.addURI(AUTHORITY, PATH_QUERY_BATTERY_VOLTAGE, 2); matcher.addURI(AUTHORITY, PATH_RECOG_PIC, 3); + matcher.addURI(AUTHORITY, PATH_REQUEST_PWR_CTRL, 4); + matcher.addURI(AUTHORITY, PATH_RELEASE_PWR_CTRL, 5); + Cursor cursor = null; int matched = matcher.match(uri); @@ -98,6 +104,12 @@ public class BridgeProvider extends ContentProvider { case 3: cursor = recoganizePicture(uri, selection, selectionArgs); break; + case 4: + cursor = requestPowerControl(uri, selection, selectionArgs); + break; + case 5: + cursor = recoganizePicture(uri, selection, selectionArgs); + break; default: break; } @@ -169,6 +181,48 @@ public class BridgeProvider extends ContentProvider { return matrixCursor; } + private Cursor requestPowerControl(Uri uri, String selection, String[] selectionArgs) { + String decodedSelection = stringFromBase64(selection); + int type = 0; + if (!TextUtils.isEmpty(decodedSelection)) { + Uri u = Uri.parse("http://a.com/?" + decodedSelection); + String val = u.getQueryParameter("type"); + try { + type = Integer.parseInt(val); + } catch (Exception ex) { + ex.printStackTrace(); + } + } + + long nativeHandle = MicroPhotoService.requestPowerControl(type); + + String[] columns = { "pwrCtrl" }; + MatrixCursor matrixCursor = new MatrixCursor(columns, 1); + matrixCursor.addRow(new Object[] { Long.valueOf(nativeHandle) }); + return matrixCursor; + } + + private Cursor releasePowerControl(Uri uri, String selection, String[] selectionArgs) { + String decodedSelection = stringFromBase64(selection); + long nativeHandle = 0; + if (!TextUtils.isEmpty(decodedSelection)) { + Uri u = Uri.parse("http://a.com/?" + decodedSelection); + String val = u.getQueryParameter("handle"); + try { + nativeHandle = Long.parseLong(val); + } catch (Exception ex) { + ex.printStackTrace(); + } + } + + boolean res = MicroPhotoService.releasePowerControl(nativeHandle); + + String[] columns = { "result" }; + MatrixCursor matrixCursor = new MatrixCursor(columns, 1); + matrixCursor.addRow(new Object[] { Integer.valueOf(res ? 1 : 0) }); + return matrixCursor; + } + private Cursor recoganizePicture(Uri uri, String selection, String[] selectionArgs) { String decodedSelection = stringFromBase64(selection); diff --git a/app/src/main/java/com/xypower/mpapp/FloatingWindow.java b/app/src/main/java/com/xypower/mpapp/FloatingWindow.java deleted file mode 100644 index 3ce4025c..00000000 --- a/app/src/main/java/com/xypower/mpapp/FloatingWindow.java +++ /dev/null @@ -1,222 +0,0 @@ -package com.xypower.mpapp; - -import android.app.Service; -import android.content.Context; -import android.content.Intent; -import android.graphics.Rect; -import android.os.IBinder; -import android.text.Editable; -import android.text.TextWatcher; -import android.util.Log; -import android.view.LayoutInflater; -import android.view.View; -import android.view.WindowManager; -import android.view.inputmethod.InputMethodManager; -import android.widget.Button; -import android.widget.EditText; -import android.widget.TextView; - -public class FloatingWindow extends Service { - - private Context mContext; - private WindowManager mWindowManager; - private View mView; - - @Override - public IBinder onBind(Intent intent) { - return null; - } - - @Override - public void onCreate() { - super.onCreate(); - mContext = this; - } - - @Override - public int onStartCommand(Intent intent, int flags, int startId) { - mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE); - - allAboutLayout(intent); - moveView(); - - return super.onStartCommand(intent, flags, startId); - } - - @Override - public void onDestroy() { - - try { - if (mView != null) { - mWindowManager.removeView(mView); - } - } catch (Exception ex) { - // ex.printStackTrace(); - Log.e("FW", "Exception " + ex.getMessage()); - } - - super.onDestroy(); - } - - WindowManager.LayoutParams mWindowsParams; - private void moveView() { - /* - DisplayMetrics metrics = mContext.getResources().getDisplayMetrics(); - int width = (int) (metrics.widthPixels * 1f); - int height = (int) (metrics.heightPixels * 1f); - - mWindowsParams = new WindowManager.LayoutParams( - width,//WindowManager.LayoutParams.WRAP_CONTENT, - height,//WindowManager.LayoutParams.WRAP_CONTENT, - //WindowManager.LayoutParams.TYPE_SYSTEM_ALERT, - - (Build.VERSION.SDK_INT <= 25) ? WindowManager.LayoutParams.TYPE_PHONE : WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY - , - - //WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL, - WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL - | WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN // Not displaying keyboard on bg activity's EditText - | WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON - | WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD - | WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED - | WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON, - //WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, //Not work with EditText on keyboard - PixelFormat.TRANSLUCENT); - - - mWindowsParams.gravity = Gravity.TOP | Gravity.LEFT; - //params.x = 0; - mWindowsParams.y = 100; - mWindowManager.addView(mView, mWindowsParams); - - mView.setOnTouchListener(new View.OnTouchListener() { - private int initialX; - private int initialY; - private float initialTouchX; - private float initialTouchY; - - long startTime = System.currentTimeMillis(); - @Override - public boolean onTouch(View v, MotionEvent event) { - if (System.currentTimeMillis() - startTime <= 300) { - return false; - } - if (isViewInBounds(mView, (int) (event.getRawX()), (int) (event.getRawY()))) { - editTextReceiveFocus(); - } else { - editTextDontReceiveFocus(); - } - - switch (event.getAction()) { - case MotionEvent.ACTION_DOWN: - initialX = mWindowsParams.x; - initialY = mWindowsParams.y; - initialTouchX = event.getRawX(); - initialTouchY = event.getRawY(); - break; - case MotionEvent.ACTION_UP: - break; - case MotionEvent.ACTION_MOVE: - mWindowsParams.x = initialX + (int) (event.getRawX() - initialTouchX); - mWindowsParams.y = initialY + (int) (event.getRawY() - initialTouchY); - mWindowManager.updateViewLayout(mView, mWindowsParams); - break; - } - return false; - } - }); - - */ - } - - private boolean isViewInBounds(View view, int x, int y) { - Rect outRect = new Rect(); - int[] location = new int[2]; - view.getDrawingRect(outRect); - view.getLocationOnScreen(location); - outRect.offset(location[0], location[1]); - return outRect.contains(x, y); - } - - private void editTextReceiveFocus() { - if (!wasInFocus) { - mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH; - mWindowManager.updateViewLayout(mView, mWindowsParams); - wasInFocus = true; - } - } - - private void editTextDontReceiveFocus() { - if (wasInFocus) { - mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH; - mWindowManager.updateViewLayout(mView, mWindowsParams); - wasInFocus = false; - hideKeyboard(mContext, edt1); - } - } - - private boolean wasInFocus = true; - private EditText edt1; - private void allAboutLayout(Intent intent) { - - LayoutInflater layoutInflater = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE); - mView = layoutInflater.inflate(R.layout.ovelay_window, null); - - edt1 = (EditText) mView.findViewById(R.id.edt1); - final TextView tvValue = (TextView) mView.findViewById(R.id.tvValue); - Button btnClose = (Button) mView.findViewById(R.id.btnClose); - - edt1.setOnClickListener(new View.OnClickListener() { - @Override - public void onClick(View v) { - mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH; - // mWindowsParams.softInputMode = WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE; - mWindowManager.updateViewLayout(mView, mWindowsParams); - wasInFocus = true; - showSoftKeyboard(v); - } - }); - - edt1.addTextChangedListener(new TextWatcher() { - @Override - public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { - - } - - @Override - public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { - tvValue.setText(edt1.getText()); - } - - @Override - public void afterTextChanged(Editable editable) { - - } - }); - - btnClose.setOnClickListener(new View.OnClickListener() { - @Override - public void onClick(View view) { - stopSelf(); - } - }); - - } - - - private void hideKeyboard(Context context, View view) { - if (view != null) { - InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE); - imm.hideSoftInputFromWindow(view.getWindowToken(), 0); - } - } - - public void showSoftKeyboard(View view) { - if (view.requestFocus()) { - InputMethodManager imm = (InputMethodManager) - getSystemService(Context.INPUT_METHOD_SERVICE); - imm.showSoftInput(view, InputMethodManager.SHOW_IMPLICIT); - } - } - -} diff --git a/app/src/main/java/com/xypower/mpapp/HeartBeatResponseReceiver.java b/app/src/main/java/com/xypower/mpapp/HeartBeatResponseReceiver.java new file mode 100644 index 00000000..a8beab8d --- /dev/null +++ b/app/src/main/java/com/xypower/mpapp/HeartBeatResponseReceiver.java @@ -0,0 +1,19 @@ +package com.xypower.mpapp; + +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.util.Log; + +public class HeartBeatResponseReceiver extends BroadcastReceiver { + + @Override + public void onReceive(Context context, Intent intent) { + String action = intent.getAction(); + if ("com.systemui.ACTION_HEARTBEAT_RESPONSE".equals(action)) { + long timestamp = intent.getLongExtra("timestamp", 0); + Log.d("MpApp","系统广播监听 timestamp:"+timestamp); + MicroPhotoService.infoLog("收到heartbeat广播 timestamp:" + timestamp); + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/xypower/mpapp/MainActivity.java b/app/src/main/java/com/xypower/mpapp/MainActivity.java index 99ea14fc..a6baaffc 100644 --- a/app/src/main/java/com/xypower/mpapp/MainActivity.java +++ b/app/src/main/java/com/xypower/mpapp/MainActivity.java @@ -1,32 +1,32 @@ package com.xypower.mpapp; import android.Manifest; +import android.app.Activity; +import android.app.AlarmManager; +import android.app.PendingIntent; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.location.Location; import android.location.LocationListener; -import android.location.LocationManager; +import android.net.Uri; import android.os.Build; -import android.os.FileObserver; import android.os.Handler; -import android.os.Looper; -import android.os.Message; import android.os.Messenger; +import android.os.PowerManager; import android.os.StrictMode; -import androidx.annotation.NonNull; import androidx.appcompat.app.ActionBar; import androidx.appcompat.app.AlertDialog; import androidx.core.app.ActivityCompat; import androidx.appcompat.app.AppCompatActivity; import android.os.Bundle; +import android.os.SystemClock; +import android.provider.Settings; import android.telephony.SubscriptionManager; -import android.telephony.TelephonyManager; import android.text.TextUtils; -import android.text.method.ScrollingMovementMethod; import android.util.Log; import android.view.KeyEvent; import android.view.View; @@ -35,11 +35,10 @@ import android.widget.Toast; import com.dev.devapi.api.SysApi; import com.xypower.common.CameraUtils; +import com.xypower.common.FilesUtils; import com.xypower.common.MicroPhotoContext; import com.xypower.mpapp.databinding.ActivityMainBinding; import com.xypower.mpapp.utils.LocationUtil; -import com.xypower.mpapp.utils.RandomReader; - import java.io.File; import java.lang.reflect.Method; @@ -56,17 +55,12 @@ public class MainActivity extends AppCompatActivity { private Messenger mMessenger = null; + private long mConfigModificationTime = 0; + @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); - if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) { - int activeSubId = SubscriptionManager.getActiveDataSubscriptionId(); - if (activeSubId == -1) { - MicroPhotoContext.selectSimCard(getApplicationContext(), 1); - } - } - Log.d(TAG, "Start inflate"); binding = ActivityMainBinding.inflate(getLayoutInflater()); Log.d(TAG, "Finish inflate"); @@ -74,68 +68,164 @@ public class MainActivity extends AppCompatActivity { // getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN); getWindow().addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM); - ActionBar actionBar = getSupportActionBar(); + try { + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) { + int activeSubId = SubscriptionManager.getActiveDataSubscriptionId(); + if (activeSubId == -1) { + MicroPhotoContext.selectSimCard(getApplicationContext(), 1); + } + } - Date date = new Date(BuildConfig.BUILD_TIMESTAMP); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); - actionBar.setTitle(actionBar.getTitle().toString() + " v" + MicroPhotoContext.getVersionName(getApplicationContext()) + " " + sdf.format(date)); + ActionBar actionBar = getSupportActionBar(); + + Date date = new Date(BuildConfig.BUILD_TIMESTAMP); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); + String caption = "MP"; + switch (MicroPhotoService.getCustomAppId()) { + case 1: + caption = "RP"; + break; + case 2: + caption = "N938"; + break; + default: + break; + } + caption += " v" + MicroPhotoContext.getVersionName(getApplicationContext()) + " " + sdf.format(date); + sdf = new SimpleDateFormat("MM-dd HH:mm:ss"); + caption += " / " + sdf.format(new Date()); + actionBar.setTitle(caption); - StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); - StrictMode.setThreadPolicy(policy); + StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); + StrictMode.setThreadPolicy(policy); - initListener(); + initListener(); - Context appContext = getApplicationContext(); - String appPath = MicroPhotoContext.buildMpAppDir(appContext); - File appPathFile = new File(appPath); - if (!appPathFile.exists()) { - try { - appPathFile.mkdirs(); - } catch (Exception ex) { - ex.printStackTrace(); + Context appContext = getApplicationContext(); + String appPath = MicroPhotoContext.buildMpAppDir(appContext); + File appPathFile = new File(appPath); + if (!appPathFile.exists()) { + try { + appPathFile.mkdirs(); + } catch (Exception ex) { + ex.printStackTrace(); + } } - } - if (!MicroPhotoContext.hasMpAppConfig(appContext)) { + if (!MicroPhotoContext.hasMpAppConfig(appContext)) { + + String mstPath = MicroPhotoContext.buildMpResAppDir(appContext); + File mstPathFile = new File(mstPath); + File mpdataFile = new File(mstPathFile, "mpdata"); - String mstPath = MicroPhotoContext.buildMasterAppDir(appContext); - File mstPathFile = new File(mstPath); - File mpdataFile = new File(mstPathFile, "mpdata"); + if (mpdataFile.exists()) { + File dataFile = new File(appPathFile, "data"); + if (dataFile.exists()) { + try { + FilesUtils.delete(dataFile); + } catch (Exception ex) { + ex.printStackTrace(); + } + } - if (mpdataFile.exists()) { - File dataFile = new File(appPathFile, "data"); - if (dataFile.exists()) { try { - dataFile.delete(); + mpdataFile.renameTo(dataFile); } catch (Exception ex) { ex.printStackTrace(); } } + else { + Intent resIntent = getPackageManager().getLaunchIntentForPackage(MicroPhotoContext.PACKAGE_NAME_MPRES); + if (resIntent != null) { + resIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); + resIntent.putExtra("initres", 1); + String sn = MicroPhotoService.getSerialNumber(); + if (!TextUtils.isEmpty(sn)) { + resIntent.putExtra("sn", sn); + } + startActivity(resIntent); + } + } + } + + Intent intent = getIntent(); + final int noDelay = intent.getIntExtra("noDelay", 0); + int rebootFlag = intent.getIntExtra("reboot", 0); + String reason = intent.getStringExtra("reason"); + if (!TextUtils.isEmpty(reason)) { + Log.w(TAG, "App Started with reason: " + reason); + } + if (rebootFlag == 1) { + Log.i(TAG, "After Reboot"); + } + Log.d(TAG, "MainActivity: reboot=" + rebootFlag + " noDelay=" + noDelay); + + MicroPhotoContext.AppConfig appConfig = loadConfigInfo(); + + binding.btnStartServ.setEnabled(!MicroPhotoService.isRunning); + binding.btnStopServ.setEnabled(MicroPhotoService.isRunning); + + if (MicroPhotoService.isRunning) { + Intent intent2 = new Intent(MainActivity.this, MicroPhotoService.class); try { - mpdataFile.renameTo(dataFile); + // stopService(intent2); } catch (Exception ex) { ex.printStackTrace(); } } - } - Intent intent = getIntent(); - final int noDelay = intent.getIntExtra("noDelay", 0); - int rebootFlag = intent.getIntExtra("reboot", 0); - String reason = intent.getStringExtra("reason"); - if (!TextUtils.isEmpty(reason)) { - Log.w(TAG, "App Started with reason: " + reason); - } - if (rebootFlag == 1) { - Log.i(TAG, "After Reboot"); + if (MicroPhotoContext.hasMpAppConfig(appContext)) { + final Runnable runnable = new Runnable() { + @Override + public void run() { + + if (!MicroPhotoService.isRunning && !TextUtils.isEmpty(appConfig.cmdid) && !TextUtils.isEmpty(appConfig.server) && appConfig.port != 0) { + if (binding.btnStartServ.isEnabled()) { + Log.i(TAG, "Perform AutoStart"); + binding.btnStartServ.performClick(); + } + } + } + }; + + long timeout = 500; + if (SystemClock.elapsedRealtime() < 180000) { + // In 3 minutes + timeout = 10000; // in 10 seconds + } + Handler handler = new Handler(); + handler.postDelayed(runnable, timeout); + Log.i(TAG, "Set AutoStart after " + Long.toString(timeout) + "ms"); + } + } catch (Exception ex) { + ex.printStackTrace(); } + } - Log.d(TAG, "MainActivity: reboot=" + rebootFlag + " noDelay=" + noDelay); + @Override + protected void onDestroy() { + super.onDestroy(); + } + @Override + protected void onResume() { + super.onResume(); + try { + File file = MicroPhotoContext.getMpAppConfigFile(getApplicationContext()); + if (file.lastModified() > mConfigModificationTime) { + loadConfigInfo(); + } + } catch (Exception ex) { + ex.printStackTrace(); + } + } + + protected MicroPhotoContext.AppConfig loadConfigInfo() { + final MicroPhotoContext.AppConfig appConfig = MicroPhotoContext.getMpAppConfig(getApplicationContext()); + mConfigModificationTime = appConfig.modificationTime; - final MicroPhotoContext.AppConfig appConfig = MicroPhotoContext.getMpAppConfig(appContext); if (TextUtils.isEmpty(appConfig.cmdid)) { appConfig.cmdid = MicroPhotoService.getSerialNumber(); binding.cmdid.setText(appConfig.cmdid); @@ -153,9 +243,15 @@ public class MainActivity extends AppCompatActivity { } } - if (appConfig.networkProtocol < binding.networkProtocol.getCount()) { - binding.networkProtocol.setSelection(appConfig.networkProtocol); + protocolStr = appConfig.networkProtocol + "-"; + for (int idx = 0; idx < binding.networkProtocol.getCount(); idx++) { + String item = binding.networkProtocol.getItemAtPosition(idx).toString(); + if (item.startsWith(protocolStr)) { + binding.networkProtocol.setSelection(idx); + break; + } } + if (appConfig.encryption < binding.encryptions.getCount()) { binding.encryptions.setSelection(appConfig.encryption); } @@ -166,41 +262,7 @@ public class MainActivity extends AppCompatActivity { binding.network.setSelection(appConfig.network); } - binding.btnStartServ.setEnabled(!MicroPhotoService.isRunning); - binding.btnStopServ.setEnabled(MicroPhotoService.isRunning); - - if (MicroPhotoService.isRunning) { - Intent intent2 = new Intent(MainActivity.this, MicroPhotoService.class); - try { - stopService(intent2); - } catch (Exception ex) { - ex.printStackTrace(); - } - } - - - - if (MicroPhotoContext.hasMpAppConfig(appContext)) { - Runnable runnable = new Runnable() { - @Override - public void run() { - - if (!MicroPhotoService.isRunning && !TextUtils.isEmpty(appConfig.cmdid) && !TextUtils.isEmpty(appConfig.server) && appConfig.port != 0) { - if (binding.btnStartServ.isEnabled()) { - binding.btnStartServ.performClick(); - } - } - } - }; - - Handler handler = new Handler(); - handler.postDelayed(runnable, 500); - } - } - - @Override - protected void onDestroy() { - super.onDestroy(); + return appConfig; } protected void initListener() { @@ -234,6 +296,7 @@ public class MainActivity extends AppCompatActivity { startMicroPhotoService(appContext, curAppConfig, mMessenger); + Log.i(TAG, "Service auto-started"); binding.btnStartServ.setEnabled(false); binding.btnStopServ.setEnabled(true); } @@ -337,6 +400,7 @@ public class MainActivity extends AppCompatActivity { @Override public void onClick(View view) { + MicroPhotoService.infoLog("Call stopTerminalService Manually"); MicroPhotoService.stopTerminalService(getApplicationContext()); binding.btnStartServ.setEnabled(true); @@ -370,8 +434,53 @@ public class MainActivity extends AppCompatActivity { binding.btnRestartApp.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { - Context context = v.getContext().getApplicationContext(); - MicroPhotoService.restartApp(context, context.getPackageName(), "Manual Restart From MainActivity"); + restartSelfWithStartActivity(); + // restartSelfWithAlarmManager(); + } + + private void restartSelfWithStartActivity() { + + final Context context = getApplicationContext(); + + Intent intent = new Intent(context, MainActivity.class); + intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); + + int noDelay = 1; + intent.putExtra("noDelay", noDelay); + intent.putExtra("reason", "Manual Restart From MainActivity"); + + context.startActivity(intent); + + final Handler handler = new Handler(); + finish(); + handler.postDelayed(new Runnable() { + @Override + public void run() { + System.exit(0); + } + }, 0); + } + + private void restartSelfWithAlarmManager() { + Intent intent = new Intent(MainActivity.this, MainActivity.class); + intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); + + int noDelay = 1; + intent.putExtra("noDelay", noDelay); + intent.putExtra("reason", "Manual Restart From MainActivity"); + + // Create PendingIntent + PendingIntent pendingIntent = PendingIntent.getActivity( + MainActivity.this, 12312, intent, PendingIntent.FLAG_UPDATE_CURRENT/* | PendingIntent.FLAG_IMMUTABLE*/); + + AlarmManager alarmManager = (AlarmManager) MainActivity.this.getSystemService(Context.ALARM_SERVICE); + + if (alarmManager != null) { + alarmManager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + 200, pendingIntent); + } + + MainActivity.this.finish(); + System.exit(0); } }); @@ -403,8 +512,6 @@ public class MainActivity extends AppCompatActivity { binding.btnCameraInfo.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { - MicroPhotoService.setOtgState(true); - MicroPhotoService.setCam3V3Enable(true); Runnable runnable = new Runnable() { @@ -415,7 +522,6 @@ public class MainActivity extends AppCompatActivity { Log.d(TAG, cameraInfo); MicroPhotoService.setCam3V3Enable(false); - MicroPhotoService.setOtgState(false); MicroPhotoService.infoLog(cameraInfo); Toast.makeText(view.getContext(), cameraInfo, Toast.LENGTH_LONG).show(); @@ -592,4 +698,6 @@ public class MainActivity extends AppCompatActivity { } + + } \ No newline at end of file diff --git a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java index 188f85ec..ccd9b111 100644 --- a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java +++ b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java @@ -20,6 +20,10 @@ import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.ImageDecoder; import android.graphics.Matrix; +import android.hardware.usb.UsbConstants; +import android.hardware.usb.UsbDevice; +import android.hardware.usb.UsbInterface; +import android.hardware.usb.UsbManager; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; @@ -33,7 +37,6 @@ import android.net.NetworkInfo; import android.net.NetworkRequest; import android.net.Uri; import android.net.wifi.WifiManager; -import android.os.BatteryManager; import android.os.Build; import android.os.Bundle; import android.os.Handler; @@ -50,13 +53,14 @@ import androidx.core.app.NotificationCompat; import androidx.core.content.FileProvider; import androidx.localbroadcastmanager.content.LocalBroadcastManager; -import android.provider.MediaStore; +import android.telephony.CellSignalStrength; import android.telephony.SignalStrength; import android.telephony.SubscriptionManager; import android.telephony.TelephonyManager; import android.text.TextUtils; import android.text.format.DateFormat; import android.util.Log; +import android.view.Gravity; import android.widget.RemoteViews; import android.widget.Toast; @@ -71,43 +75,49 @@ import com.xypower.mpapp.v2.Camera2VideoActivity; import com.xypower.mpapp.video.RawActivity; import java.io.BufferedReader; +import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.io.InputStreamReader; -import java.io.RandomAccessFile; import java.io.Reader; import java.lang.reflect.Method; import java.net.Inet4Address; import java.net.InetAddress; -import java.net.URI; -import java.nio.channels.FileLock; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; public class MicroPhotoService extends Service { public static final String TAG = "MPLOG"; // Used to load the 'microphoto' library on application startup. static { + loadLibrary("microphoto"); } public static final int MSG_WHAT_LOG = 10; - public final static int MSG_WHAT_SENDING_HB = 40; - public final static int MSG_WHAT_MAX = 1000; + public final static int MEDIA_TYPE_PHOTO = 0; + public final static int MEDIA_TYPE_VIDEO = 1; + + public final static int MEDIA_TYPE_LOG = 2; + + public final static int MEDIA_TYPE_STREAMING = 0x10; + public final static int MEDIA_TYPE_STREAMING_OFF = 0x11; + + public final static int BROADCAST_REQUEST_CODE_HEARTBEAT = 1; public final static int BROADCAST_REQUEST_CODE_TAKING_PHOTO = 2; - public final static int BROADCAST_REQUEST_CODE_GPS = 2; + public final static int BROADCAST_REQUEST_CODE_GPS = 3; + public final static int BROADCAST_REQUEST_CODE_STOP_SERVICE = 4; - public static final int NOTIFICATION_ID_FOREGROUND_SERVICE = 8466503; + public static final int NOTIFICATION_ID_FOREGROUND_SERVICE = 8466508; public static final String ACTION_MSG_BROADCAST = "ACT_MSG_BROADCAST"; public static final String ACTION_START = "com.xypower.mpapp.ACT_START"; @@ -121,21 +131,28 @@ public class MicroPhotoService extends Service { private static final String ACTION_IMP_PUBKRY = "com.xypower.mpapp.ACT_IMP_PUBKEY"; private static final String ACTION_TAKE_PHOTO_MANUALLY = "com.xypower.mpapp.ACT_TP_M"; + + private static final String ACTION_CAMERA_CTRL = "com.xypower.mpapp.ACT_CAM_CTRL"; + private static final String ACTION_HEARTBEAT_MANUALLY = "com.xypower.mpapp.ACT_HB_M"; private static final String ACTION_UPDATE_CONFIGS = "com.xypower.mpapp.ACT_UPD_CFG"; public static final String ACTION_VIDEO_FINISHED = "com.xypower.mpapp.ACT_V_FINISHED"; + private static final String EXTRA_PARAM_CHANNEL = "Channel"; private static final String EXTRA_PARAM_PRESET = "Preset"; private static final String EXTRA_PARAM_PHOTO_OR_VIDEO = "PhotoOrVideo"; + + private static final String EXTRA_PARAM_MEDIA_TYPE = "MediaType"; + + private static final String EXTRA_PARAM_URL = "Url"; private static final String EXTRA_PARAM_SCHEDULES = "Schedules"; private static final String EXTRA_PARAM_SCHEDULE = "Schedule_"; private static final String EXTRA_PARAM_TAKING_TIME = "TakingTime"; private static final String EXTRA_PARAM_TIME = "Time"; - // private static final String EXTRA_PARAM_TIMER_UID = "TimerUid"; - // private static final String EXTRA_PARAM_TIMEOUT = "Timeout"; - // private static final String EXTRA_PARAM_TIMES = "Times"; - // private static final String EXTRA_PARAM_ELASPED_TIMES = "ElapsedTimes"; + private static final String FOREGROUND_CHANNEL_ID = "fg_mpapp"; + private HeartBeatResponseReceiver mHeartBeatReceiver; + public static class STATE_SERVICE { public static final int CONNECTED = 10; public static final int NOT_CONNECTED = 0; @@ -153,7 +170,6 @@ public class MicroPhotoService extends Service { protected long mNativeHandle = 0; private AlarmReceiver mAlarmReceiver = null; private AlarmReceiver mLocalMsgReceiver = null; - private ScreenActionReceiver mScreenaAtionReceiver = null; private NetworkChangedReceiver mNetworkChangedReceiver = null; private long mGpsTimeout = 60000; // 1 minute @@ -163,15 +179,10 @@ public class MicroPhotoService extends Service { private ServiceHandler mHander = null; private Messenger mMessenger = null; - private static AtomicInteger mPendingIntentFeed = new AtomicInteger(); - private String mModelName = null; public static boolean isRunning = false; - FileOutputStream mAppRunningFile; - FileLock mAppLock; - private ConnectivityManager mConnectivityManager = null; private ConnectivityManager.NetworkCallback mNetworkCallback = null; @@ -186,6 +197,14 @@ public class MicroPhotoService extends Service { public MicroPhotoService() { } + private static void sleep(long ms) { + try { + Thread.sleep(ms); + } catch (Exception ex) { + ex.printStackTrace(); + } + } + public void convertDngToPng(String dngFile, String pngFile) { ImageDecoder.Source src = ImageDecoder.createSource(new File(dngFile)); Bitmap bmp = null; @@ -250,35 +269,42 @@ public class MicroPhotoService extends Service { public void onCreate() { super.onCreate(); - try { - final String appPath = MicroPhotoContext.buildMpAppDir(this); - - File lockerFile = new File(appPath); - lockerFile = new File(lockerFile, "data/alive"); - try { - lockerFile.mkdirs(); - } catch (Exception ex) { - ex.printStackTrace(); - } - lockerFile = new File(lockerFile, "running"); - mAppRunningFile = new FileOutputStream(lockerFile); + Log.i(TAG, "MicroPhotoService::onCreate"); - for (int idx = 0; idx < 3; idx++) { + Context context = getApplicationContext(); + try { + if (usingEthernet()) { try { - mAppLock = mAppRunningFile.getChannel().tryLock(); - if (mAppLock != null && mAppLock.isValid()) { - infoLog("App Locked"); - break; - }else { - infoLog("Lock App Failed"); + File filesDir = context.getFilesDir(); + File ethShellFile = new File(filesDir, "eth.sh"); + if (ethShellFile.exists()) { + ethShellFile.delete(); } - try { - Thread.sleep(16); - } catch (Exception ex) { + FilesUtils.copyAndNormalizeTextAssetsFile(context, "eth.sh", ethShellFile.getAbsolutePath()); + + // sed -i 's/\r$//' eth.sh + + File ethToolFile = new File(filesDir, "ethtool"); + if (ethToolFile.exists()) { + ethToolFile.delete(); } + String srcFileName = android.os.Process.is64Bit() ? "ethtool" : "ethtool-v7a"; + FilesUtils.copyAssetsFile(context, srcFileName, ethToolFile.getAbsolutePath()); + ethToolFile.setExecutable(true); } catch (Exception ex) { ex.printStackTrace(); } + + mConnectivityManager = (ConnectivityManager)context.getSystemService(Context.CONNECTIVITY_SERVICE); + + Network[] nws = mConnectivityManager.getAllNetworks(); + for (Network nw : nws) { + NetworkInfo ni = mConnectivityManager.getNetworkInfo(nw); + if (ni.getType() == ConnectivityManager.TYPE_ETHERNET) { + updateEhernet(mNativeHandle, nw.getNetworkHandle(), true); + } + + } } } catch (Exception ex) { ex.printStackTrace(); @@ -286,12 +312,14 @@ public class MicroPhotoService extends Service { mHander = new ServiceHandler(); - mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); + mNotificationManager = (NotificationManager) context.getSystemService(NOTIFICATION_SERVICE); mStateService = STATE_SERVICE.NOT_CONNECTED; - DeviceUtil.getPhoneState(this.getApplicationContext()); - - mScreenaAtionReceiver = new ScreenActionReceiver(); + try { + DeviceUtil.getPhoneState(context); + } catch (Exception ex) { + ex.printStackTrace(); + } // 注册广播接受者 { @@ -301,8 +329,18 @@ public class MicroPhotoService extends Service { intentFilter.addAction(ACTION_UPDATE_CONFIGS); intentFilter.addAction(ACTION_IMP_PUBKRY); intentFilter.addAction(ACTION_TAKE_PHOTO_MANUALLY); + intentFilter.addAction(ACTION_CAMERA_CTRL); + intentFilter.addAction(ACTION_HEARTBEAT_MANUALLY); intentFilter.addAction(ACTION_GPS_TIMEOUT); intentFilter.addAction(ACTION_RESTART); + intentFilter.addAction(Intent.ACTION_TIME_CHANGED); + if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.P) { + if (usingEthernet()) { + intentFilter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); + intentFilter.addAction(UsbManager.ACTION_USB_DEVICE_ATTACHED); + intentFilter.addAction(UsbManager.ACTION_USB_DEVICE_DETACHED); + } + } getApplicationContext().registerReceiver(mAlarmReceiver, intentFilter, Context.RECEIVER_EXPORTED | Context.RECEIVER_VISIBLE_TO_INSTANT_APPS); } { @@ -317,7 +355,7 @@ public class MicroPhotoService extends Service { intentFilter.addAction(ACTION_STOP); intentFilter.addAction(ACTION_UPDATE_CONFIGS); - LocalBroadcastManager.getInstance(getApplicationContext()).registerReceiver (mLocalMsgReceiver, intentFilter); + LocalBroadcastManager.getInstance(context).registerReceiver (mLocalMsgReceiver, intentFilter); } { @@ -329,39 +367,25 @@ public class MicroPhotoService extends Service { getApplicationContext().registerReceiver(mNetworkChangedReceiver, filter); } - /* - AlarmManager alarmManager = (AlarmManager) getApplicationContext().getSystemService(ALARM_SERVICE); - - while (true) { - AlarmManager.AlarmClockInfo aci = alarmManager.getNextAlarmClock(); - if (aci == null) { - break; - } + { + mHeartBeatReceiver = new HeartBeatResponseReceiver(); + IntentFilter filter = new IntentFilter(); + filter.addAction("com.systemui.ACTION_HEARTBEAT_RESPONSE"); + getApplicationContext().registerReceiver(mHeartBeatReceiver, filter); } - */ - - enableGps(true); - requestPosition(); - } - @Override - public void onDestroy() { - try { - - if (mAppLock != null) { - mAppLock.close(); - } - if (mAppRunningFile != null) { - mAppRunningFile.close(); - } + enableGps(true); + requestPosition(); } catch (Exception ex) { ex.printStackTrace(); } - + } + @Override + public void onDestroy() { mStateService = STATE_SERVICE.NOT_CONNECTED; - Log.w(TAG, "MicroPhotoService::onDestroy called"); + infoLog("MicroPhotoService::onDestroy called"); if (mNativeHandle != 0) { uninit(mNativeHandle); @@ -370,8 +394,14 @@ public class MicroPhotoService extends Service { } getApplicationContext().unregisterReceiver(mAlarmReceiver); - getApplicationContext().unregisterReceiver(mScreenaAtionReceiver); getApplicationContext().unregisterReceiver(mNetworkChangedReceiver); + getApplicationContext().unregisterReceiver(mHeartBeatReceiver); + + if (mConnectivityManager != null) { + if (mNetworkCallback != null) { + mConnectivityManager.unregisterNetworkCallback(mNetworkCallback); + } + } LocalBroadcastManager.getInstance(getApplicationContext()).unregisterReceiver(mLocalMsgReceiver); @@ -428,6 +458,42 @@ public class MicroPhotoService extends Service { mService = service; } public void onReceive(Context context, Intent intent) { + try { + ProcessReceivedAction(context, intent); + } catch (Exception ex) { + ex.printStackTrace(); + } + } + private void restartSelfImpl(Context context, String reason) { + Intent intent = context.getPackageManager().getLaunchIntentForPackage(context.getPackageName()); + int noDelay = 1; + intent.putExtra("noDelay", noDelay); + if (!TextUtils.isEmpty(reason)) { + intent.putExtra("reason", reason); + } + + intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); + context.startActivity(intent); + Log.i(TAG, "Restart Self"); + System.exit(0); + } + + private boolean isMpMasterAlive(Context context) { + if (Build.TIME < MicroPhotoContext.BUILD_TIME_WO_SID_20250418) { + // 2025-04-18 old firmware + // Check Log file time + File file = new File(MicroPhotoContext.buildMasterAppDir(context) + "logs/mlog.txt"); + if (file.exists()) { + return ((System.currentTimeMillis() - file.lastModified()) < 1800000); + } else { + return false; + } + } + + return MicroPhotoContext.isAppAlive(context, MicroPhotoContext.PACKAGE_NAME_MPMASTER, MicroPhotoContext.SERVICE_NAME_MPMASTER); + } + + private void ProcessReceivedAction(final Context context, final Intent intent) { String action = intent.getAction(); if (TextUtils.equals(ACTION_HEARTBEAT, action)) { long ts = System.currentTimeMillis(); @@ -436,7 +502,7 @@ public class MicroPhotoService extends Service { infoLog("HB Timer Fired ACTION=" + action + " ExpTS=" + Long.toString(expectedHbTime)); Runnable runnable = new Runnable() { public void run() { - mService.sendHeartbeat(mService.mNativeHandle, mService.getSignalLevel()); + mService.sendHeartbeat(mService.mNativeHandle, mService.getSignalLevel(), true); } }; Thread th = new Thread(runnable); @@ -453,16 +519,36 @@ public class MicroPhotoService extends Service { mService.detectGpsStatus(); + /* ConnectivityManager connectivityManager = (ConnectivityManager)context.getSystemService(Context.CONNECTIVITY_SERVICE); if (connectivityManager != null) { if (!connectivityManager.isDefaultNetworkActive()) { infoLog("DefaultNetwork is NOT Active"); } } + */ } catch (Exception ex) { + ex.printStackTrace(); + } + try { + boolean mpmstAlive = isMpMasterAlive(context); + if (!mpmstAlive) { + Intent launchIntent = context.getPackageManager().getLaunchIntentForPackage(MicroPhotoContext.PACKAGE_NAME_MPMASTER); + launchIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); + if (launchIntent != null) { + context.startActivity(launchIntent); + } + } + } catch (Exception ex) { + ex.printStackTrace(); } + Intent heartintent = new Intent("com.xy.xsetting.action"); + heartintent.putExtra("cmd", "heartbeat"); + heartintent.setPackage("com.android.systemui"); + context.sendBroadcast(heartintent); + } else if (TextUtils.equals(ACTION_TAKE_PHOTO, action)) { long ts = intent.getLongExtra(EXTRA_PARAM_TIME, 0); int cnt = intent.getIntExtra(EXTRA_PARAM_SCHEDULES, 0); @@ -472,9 +558,22 @@ public class MicroPhotoService extends Service { int channel = (int) ((val & 0xFFFF000L) >> 12); int preset = (int) ((val & 0xFF0L) >> 4); - boolean photoOrVideo = ((val & 0xFL) == 0); + int mediaType = (int)(val & 0xFL); - if (channel >= 256) + if (channel == 0x200) + { + // Heartbeat + long expectedHbTime = intent.getLongExtra("HeartbeatTime", ts); + infoLog("HB Timer Fired ACTION=" + action + " ExpTS=" + Long.toString(expectedHbTime)); + Runnable runnable = new Runnable() { + public void run() { + mService.sendHeartbeat(mService.mNativeHandle, mService.getSignalLevel(), true); + } + }; + Thread th = new Thread(runnable); + th.start(); + } + else if (channel >= 256) { infoLog("SERIAL Timer Fired: CH=" + (channel - 256) + " PR=" + preset); } @@ -482,42 +581,96 @@ public class MicroPhotoService extends Service { { infoLog("IMG Timer Fired: CH=" + channel + " PR=" + preset); } - mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, ts, photoOrVideo); + mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, ts, null, mediaType); + + if (channel == 0x200) + { + // Try to turn off GPS + try { + mService.detectGpsStatus(); + } catch (Exception ex) { + ex.printStackTrace(); + } + + try { + boolean mpmstAlive = isMpMasterAlive(context); + if (!mpmstAlive) { + if (Build.TIME >= MicroPhotoContext.BUILD_TIME_WO_SID_20250418) { + int pid = MicroPhotoContext.getProcessIdOfService(context, MicroPhotoContext.PACKAGE_NAME_MPMASTER, MicroPhotoContext.SERVICE_NAME_MPMASTER); + if (pid != 0) { + android.os.Process.killProcess(pid); + } + } + Intent launchIntent = context.getPackageManager().getLaunchIntentForPackage(MicroPhotoContext.PACKAGE_NAME_MPMASTER); + if (launchIntent != null) { + launchIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); + mService.startActivity(launchIntent); + } + } + } catch (Exception ex) { + ex.printStackTrace(); + } + } } } // Register Next Photo Timer - Date date = new Date(); - long startTime = (date.getTime() + 999) / 1000 + 1; // Add one second - mService.updateCaptureSchedule(startTime); + long startTime = System.currentTimeMillis() / 1000; + if (startTime < ts) { + // if current time is earlier schedule time + // we will register schedule time after ts + startTime = ts; + } + mService.updateCaptureSchedule(startTime + 1); } else if (TextUtils.equals(ACTION_HEARTBEAT_MANUALLY, action)) { - Log.i(TAG, "HB Timer Fired ACTION=" + action); - mService.sendHeartbeat(mService.mNativeHandle, mService.getSignalLevel()); + String actionType = intent.getStringExtra("type"); + if (TextUtils.isEmpty(actionType) || TextUtils.equals(actionType, "hb")) { + Log.i(TAG, "HB Timer Fired ACTION=" + action); + mService.sendHeartbeat(mService.mNativeHandle, mService.getSignalLevel(), false); + } else if (TextUtils.equals(actionType, "basicInfo")) { + mService.sendBasicInfo(mService.mNativeHandle); + } else if (TextUtils.equals(actionType, "workStatus")) { + mService.sendWorkStatus(mService.mNativeHandle); + } else if (TextUtils.equals(actionType, "fault")) { + String faultCode = intent.getStringExtra("faultCode"); + String faultInfo = intent.getStringExtra("faultInfo"); + mService.sendFault(mService.mNativeHandle, faultCode, faultInfo); + } } else if (TextUtils.equals(ACTION_TAKE_PHOTO_MANUALLY, action)) { int channel = intent.getIntExtra(EXTRA_PARAM_CHANNEL, 0); int preset = intent.getIntExtra(EXTRA_PARAM_PRESET, 0xFF); + String url = intent.getStringExtra(EXTRA_PARAM_URL); // long ts = intent.getLongExtra(EXTRA_PARAM_TIME, 0); - boolean photoOrVideo = intent.getBooleanExtra(EXTRA_PARAM_PHOTO_OR_VIDEO, true); + + int mediaType = 0; + if (intent.hasExtra(EXTRA_PARAM_MEDIA_TYPE)) { + mediaType = intent.getIntExtra(EXTRA_PARAM_MEDIA_TYPE, 0); + } else if (intent.hasExtra(EXTRA_PARAM_PHOTO_OR_VIDEO)) { + boolean photoOrVideo = intent.getBooleanExtra(EXTRA_PARAM_PHOTO_OR_VIDEO, true); + mediaType = photoOrVideo ? 0 : 1; + } long ts = System.currentTimeMillis() / 1000; Log.i(TAG, "Take Photo CH=" + channel + " PR=" + preset + " Mannually"); - mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, 0, photoOrVideo); - } else if (TextUtils.equals(ACTION_MSG_BROADCAST, action)) { - - int what = intent.getIntExtra("what", 0); - int data = intent.getIntExtra("data", 0); - - if (what == MSG_WHAT_SENDING_HB) { - mService.sendHeartbeat(mService.mNativeHandle, mService.getSignalLevel()); - } + mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, 0, url, mediaType); + } else if (TextUtils.equals(ACTION_CAMERA_CTRL, action)) { + int channel = intent.getIntExtra("channel", 0); + int preset = intent.getIntExtra("preset", 0); + int cmd = intent.getIntExtra("cmd", 0); + mService.sendCameraCtrl(mService.mNativeHandle, channel, preset, cmd); } else if (TextUtils.equals(ACTION_UPDATE_CONFIGS, action)) { - int restart = intent.getIntExtra("restart", 0); + final int restart = intent.getIntExtra("restart", 0); + final int channelToClean = intent.getIntExtra("channelToClean", -1); Log.i(TAG, "UPD CFG Fired ACTION=" + action + " restart=" + restart); - if (restart != 0) { - MicroPhotoService.restartApp(context, context.getPackageName(), "Config Updated"); - } else if (mService.mNativeHandle != 0) { - mService.reloadConfigs(mService.mNativeHandle); - } + new Thread(new Runnable() { + @Override + public void run() { + mService.reloadConfigs(mService.mNativeHandle, channelToClean); + if (restart != 0) { + restartSelfImpl(context, "Cfg Updated"); + } + } + }).start(); } else if (TextUtils.equals(ACTION_VIDEO_FINISHED, action)) { final boolean photoOrVideo = intent.getBooleanExtra("photoOrVideo", false); final boolean result = intent.getBooleanExtra("result", false); @@ -628,17 +781,57 @@ public class MicroPhotoService extends Service { Log.i(TAG, "After disable GPS"); mService.mPreviousGpsTimer = null; } else if (TextUtils.equals(ACTION_RESTART, action)) { - String reason = intent.getStringExtra("reason"); MicroPhotoService.infoLog("Recv RESTART APP cmd, reason=" + (TextUtils.isEmpty(reason) ? "" : reason)); - try { - Thread.sleep(100); - } catch (Exception ex) { - ex.printStackTrace(); + sleep(100); + restartSelfImpl(context, reason); + } else if (TextUtils.equals(Intent.ACTION_TIME_CHANGED, action)) { + mService.notifyTimeUpdated(mService.mNativeHandle); + Date date = new Date(); + long startTime = (date.getTime() + 999) / 1000; + mService.updateCaptureSchedule(startTime); + } else if (TextUtils.equals(ConnectivityManager.CONNECTIVITY_ACTION, action)) { + // mService.onNetworkChanged(intent); + } else if (TextUtils.equals(UsbManager.ACTION_USB_DEVICE_ATTACHED, action)) { + UsbDevice device = intent.getParcelableExtra(UsbManager.EXTRA_DEVICE); + if (isEthernetAdapter(device)) { + // 尝试设置以太网IP + // mService.onNetworkChanged(intent); } - MicroPhotoService.restartApp(context.getApplicationContext(), MicroPhotoContext.PACKAGE_NAME_MPAPP, "Restart Cmd"); + } else if (TextUtils.equals(UsbManager.ACTION_USB_DEVICE_DETACHED, action)) { + } } + + private boolean isEthernetAdapter(UsbDevice device) { + // 根据设备VID/PID或类型判断是否为以太网适配器 + // 例如常见的以太网适配器类型为USB Class 0x02 (Communications) 或 0x0a (CDC Data) + int usbDevClass = device.getDeviceClass(); + if (usbDevClass == UsbConstants.USB_CLASS_COMM || + usbDevClass == UsbConstants.USB_CLASS_CDC_DATA) { + return true; + } + + int interfaceCount = device.getInterfaceCount(); + for (int i = 0; i < interfaceCount; i++) { + UsbInterface usbInterface = device.getInterface(i); + + // CDC通讯类 (0x02)或CDC数据类 + if (usbInterface.getInterfaceClass() == UsbConstants.USB_CLASS_COMM || + usbInterface.getInterfaceClass() == UsbConstants.USB_CLASS_CDC_DATA) { + return true; + } + + // 特定CDC-ECM/NCM子类判断 + if (usbInterface.getInterfaceClass() == 0x02 && + (usbInterface.getInterfaceSubclass() == 0x06 || // ECM + usbInterface.getInterfaceSubclass() == 0x0D)) { // NCM + return true; + } + } + + return false; + } } // Will be called fron native @@ -668,6 +861,9 @@ public class MicroPhotoService extends Service { private void registerHeartbeatTimer(long timeoutMs) { + if (true) { + return; + } // 创建延迟意图 long triggerTime = System.currentTimeMillis() + timeoutMs; triggerTime -= (triggerTime % 1000); @@ -699,7 +895,13 @@ public class MicroPhotoService extends Service { val = schedules.get(idx).longValue(); channel = ((val & 0XFFFF000) >> 12); intent.putExtra(EXTRA_PARAM_SCHEDULE + idx, schedules.get(idx).longValue()); - if (channel > 0xFF) + if (channel == 0x200) + { + channelStr.append("(HB) "); + intent.putExtra("HeartbeatDuration", (int)((val & 0XFF0) >> 4) * 60000); + intent.putExtra("HeartbeatTime", scheduleTime * 1000); + } + else if (channel > 0xFF) { channel &= 0xFF; channelStr.append("(" + channel + "-" + Long.toString (((val & 0XFF0) >> 4), 16).toUpperCase() + "/SERIAL) "); @@ -844,6 +1046,8 @@ public class MicroPhotoService extends Service { @Override public int onStartCommand(Intent intent, int flags, int startId) { + Log.d(TAG, "MicroPhotoService::onStartCommand"); + if (intent == null) { stopForeground(true); stopSelf(); @@ -858,7 +1062,6 @@ public class MicroPhotoService extends Service { connect(); - getApplicationContext().registerReceiver(mScreenaAtionReceiver, mScreenaAtionReceiver.getFilter()); if (intent.hasExtra("messenger")) { mMessenger = intent.getParcelableExtra("messenger"); } @@ -913,11 +1116,6 @@ public class MicroPhotoService extends Service { break; case ACTION_STOP: - try { - getApplicationContext().unregisterReceiver(mScreenaAtionReceiver); - } catch (Exception ex) { - - } stopForeground(true); stopSelf(); break; @@ -929,6 +1127,128 @@ public class MicroPhotoService extends Service { return START_NOT_STICKY; } + long getDefaultNetworkHandle() { + long defaultNetHandle = 0; + if (mConnectivityManager != null) { + Network network = mConnectivityManager.getActiveNetwork(); + if (network != null) { + NetworkInfo networkInfo = mConnectivityManager.getNetworkInfo(network); + int type = networkInfo.getType(); + if (type == ConnectivityManager.TYPE_MOBILE || type == ConnectivityManager.TYPE_VPN || + type == ConnectivityManager.TYPE_MOBILE_DUN || type == ConnectivityManager.TYPE_MOBILE_HIPRI) { + defaultNetHandle = network.getNetworkHandle(); + } + } + } + + return defaultNetHandle; + } + + protected void onNetworkChanged(final Intent intent) { + try { + new Thread(new Runnable() { + @Override + public void run() { + for (int idx = 0; idx < 8; idx++) { + if (checkNetworkInterfaces("eth0")) { + break; + } + sleep(40); + } + setStaticNetwork("eth0", "192.168.68.91", "192.168.68.91", "192.168.68.0", 24); + } + + private boolean checkNetworkInterfaces(String iface) { + Network[] networks = mConnectivityManager.getAllNetworks(); + + for (Network network : networks) { + LinkProperties lp = mConnectivityManager.getLinkProperties(network); + if (lp != null) { + if (TextUtils.equals(iface, lp.getInterfaceName())) { + return true; + } + } + } + + return false; + } + + }).start(); + + + } catch (Exception ex) { + ex.printStackTrace(); + } + } + + protected class EhternetCallback extends ConnectivityManager.NetworkCallback { + @Override + public void onLost(Network network) { + infoLog("Network Lost " + network.toString()); + updateEhernet(mNativeHandle, network.getNetworkHandle(), false); + updateDefaultNetwork(); + } + @Override + public void onAvailable(final Network network) { + String ip = ""; + try { + NetworkInfo ni = mConnectivityManager.getNetworkInfo(network); + LinkProperties lp = mConnectivityManager.getLinkProperties(network); + if (lp != null) { + + final String iface = lp.getInterfaceName(); + List addresses = lp.getLinkAddresses(); + if (addresses != null && addresses.size() > 0) { + for (LinkAddress linkAddress : addresses) { + InetAddress inetAddress = linkAddress.getAddress(); + if (inetAddress != null && inetAddress instanceof Inet4Address) { + ip = inetAddress.getHostAddress(); + break; + } + } + } + } + } catch (Exception ex) { + ex.printStackTrace(); + } + + infoLog("Network Available " + network.toString() + " IP=" + ip + " Handle=" + Long.toString(network.getNetworkHandle())); + updateEhernet(mNativeHandle, network.getNetworkHandle(), true); + updateDefaultNetwork(); + } + + private void updateDefaultNetwork() { + MicroPhotoService thisService = MicroPhotoService.this; + long defaultNetHandle = thisService.getDefaultNetworkHandle(); + if (defaultNetHandle != 0) { + thisService.updateActiveNetwork(thisService.mNativeHandle, defaultNetHandle, true); + } + } + + @Override + public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) { + String interfaceName = linkProperties.getInterfaceName(); + if (interfaceName != null && + (interfaceName.startsWith("eth") || + interfaceName.startsWith("usb") || + interfaceName.startsWith("rndis"))) { + // 检测到以太网接口,可以尝试设置 IP + // configureEthernetIp(interfaceName); + + Log.d("onLinkPropertiesChanged", "Ethernet hardware detected"); + } + } + + @Override + public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) { + // 检查是否是以太网,即使没有完整的网络功能 + if (networkCapabilities.hasTransport(NetworkCapabilities.TRANSPORT_ETHERNET)) { + // 以太网硬件检测到 + Log.d("EthernetMonitor", "Ethernet hardware detected"); + } + } + } + private void startTerminalService(Intent intent) { if (MicroPhotoService.this.mNativeHandle != 0) { @@ -982,8 +1302,9 @@ public class MicroPhotoService extends Service { ex.printStackTrace(); } + long defaultNetHandle = service.getDefaultNetworkHandle(); service.mNativeHandle = init(appPath, server, port, cmdid, protocol, networkProtocol, - encryptData, 0, service.getSignalLevel(), versionCode, + encryptData, defaultNetHandle, service.getSignalLevel(), versionCode, BuildConfig.BUILD_TIMESTAMP, simcard, tfCardPath, nativeLibraryDir); if (service.mNativeHandle != 0) { @@ -994,6 +1315,29 @@ public class MicroPhotoService extends Service { long startTime = (date.getTime() + 999) / 1000; service.updateCaptureSchedule(startTime); + try { + if (usingEthernet()) { + mNetworkCallback = new EhternetCallback(); + NetworkRequest request = new NetworkRequest.Builder() + .addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET) + .addTransportType(NetworkCapabilities.TRANSPORT_ETHERNET) + .build(); + + mConnectivityManager.registerNetworkCallback(request, mNetworkCallback); + + Network[] nws = mConnectivityManager.getAllNetworks(); + for (Network nw : nws) { + NetworkInfo ni = mConnectivityManager.getNetworkInfo(nw); + if (ni.getType() == ConnectivityManager.TYPE_ETHERNET) { + updateEhernet(mNativeHandle, nw.getNetworkHandle(), true); + } + + } + } + } catch (Exception ex) { + ex.printStackTrace(); + } + if (mPreviousLocation != null) { service.updatePosition(mNativeHandle, mPreviousLocation.getLongitude(), mPreviousLocation.getLatitude(), mPreviousLocation.getAccuracy(), mPreviousLocation.getTime() / 1000); @@ -1025,7 +1369,7 @@ public class MicroPhotoService extends Service { alarmIntent.setPackage(context.getPackageName()); alarmIntent.setAction(ACTION_STOP); - PendingIntent pendingIntent = PendingIntent.getBroadcast(context.getApplicationContext(), mPendingIntentFeed.getAndIncrement(), alarmIntent, 0); + PendingIntent pendingIntent = PendingIntent.getBroadcast(context.getApplicationContext(), BROADCAST_REQUEST_CODE_STOP_SERVICE, alarmIntent, PendingIntent.FLAG_UPDATE_CURRENT); AlarmManager alarmManager = (AlarmManager) context.getApplicationContext().getSystemService(ALARM_SERVICE); alarmManager.setExactAndAllowWhileIdle(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + 100, pendingIntent); @@ -1042,11 +1386,6 @@ public class MicroPhotoService extends Service { uninit(mNativeHandle); mNativeHandle = 0; - try { - getApplicationContext().unregisterReceiver(mScreenaAtionReceiver); - } catch (Exception ex) { - - } stopForeground(true); stopSelf(); } @@ -1123,11 +1462,13 @@ public class MicroPhotoService extends Service { new Runnable() { public void run() { // Log.d(TAG, "Bluetooth Low Energy device is connected!!"); - Toast.makeText(getApplicationContext(), "MP Connected!", Toast.LENGTH_SHORT).show(); + Toast toast = Toast.makeText(getApplicationContext(), "MP Connected!", Toast.LENGTH_SHORT); + toast.setGravity(Gravity.TOP, 0, 0); + toast.show(); mStateService = STATE_SERVICE.CONNECTED; startForeground(NOTIFICATION_ID_FOREGROUND_SERVICE, prepareNotification()); } - }, 10000); + }, 8000); } @@ -1181,7 +1522,7 @@ public class MicroPhotoService extends Service { } notificationBuilder .setContent(remoteViews) - .setSmallIcon(R.mipmap.ic_launcher) + .setSmallIcon(R.drawable.ic_notification_mp) .setCategory(NotificationCompat.CATEGORY_SERVICE) .setOnlyAlertOnce(true) .setOngoing(true) @@ -1288,69 +1629,52 @@ public class MicroPhotoService extends Service { SignalStrength ss = telephonyManager.getSignalStrength(); if (ss != null) { - return ss.getLevel(); + int ssVal = -1; + List cellSignalStrengths = null; + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.Q) { + cellSignalStrengths = ss.getCellSignalStrengths(); + } else { + int asu = ss.getGsmSignalStrength(); + if (asu != 99) { + ssVal = -113 + 2 * asu; + } + } + for (CellSignalStrength cellSignalStrength : cellSignalStrengths) { + ssVal = cellSignalStrength.getDbm(); + break; + } + return (ssVal << 8) | (ss.getLevel() & 0xFF); } } catch (Exception ex) { } return -1; } - public String getSystemInfo() { - - boolean isXyPlatform = mModelName.startsWith("tb8788"); - - StringBuilder sb = new StringBuilder(); - - IntentFilter intentFilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED); - Intent intent = getApplicationContext().registerReceiver(null, intentFilter); - - int batteryStatus = intent.getIntExtra(BatteryManager.EXTRA_STATUS, -1); - int isCahrging = ((batteryStatus == BatteryManager.BATTERY_STATUS_CHARGING) || - (batteryStatus == BatteryManager.BATTERY_STATUS_FULL)) ? 1 : 0; - - int level = intent.getIntExtra("level", 0); ///电池剩余电量 - int scale = intent.getIntExtra("scale", 0); ///获取电池满电量数值 - // intent.getStringExtra("technology"); ///获取电池技术支持 - // intent.getIntExtra("status",BatteryManager.BATTERY_STATUS_UNKNOWN); ///获取电池状态 - // intent.getIntExtra("plugged", 0); ///获取电源信息 - // intent.getIntExtra("health",BatteryManager.BATTERY_HEALTH_UNKNOWN); ///获取电池健康度 - int bv = intent.getIntExtra("voltage", 0); /// mv - int temp = intent.getIntExtra("temperature", 0); ///获取电池温度 - - BatteryManager manager = (BatteryManager) getSystemService(BATTERY_SERVICE); - // manager.getIntProperty(BatteryManager.BATTERY_PROPERTY_CHARGE_COUNTER); - int bca = manager.getIntProperty(BatteryManager.BATTERY_PROPERTY_CURRENT_AVERAGE); - int bc = manager.getIntProperty(BatteryManager.BATTERY_PROPERTY_CURRENT_NOW); - level = manager.getIntProperty(BatteryManager.BATTERY_PROPERTY_CAPACITY); - - float bcaVal = (bca < 0) ? ((-bca)/1000000000) : (bca / 1000000000); - - sb.append("&BC=" + Float.toString(bcaVal)); - sb.append("&BV=" + Float.toString(((float)bv) / 1000)); - sb.append("&BP=" + level); - sb.append("&BS=" + scale); - sb.append("&CS=" + isCahrging); - - ConnectivityManager cm = (ConnectivityManager)getApplicationContext().getSystemService(Context.CONNECTIVITY_SERVICE); - boolean isMetered = cm.isActiveNetworkMetered(); + public String getFlowInfo() { - sb.append("&NS=" + (isMetered ? "1" : "0")); + Date dt = new Date(); + dt.setDate(1); + dt.setHours(0); + dt.setMinutes(0); + dt.setSeconds(0); - final TelephonyManager telephonyManager = (TelephonyManager) getApplicationContext().getSystemService(Context.TELEPHONY_SERVICE); - - SignalStrength ss = telephonyManager.getSignalStrength(); - // List css = ss.getCellSignalStrengths(); - - if (ss != null) { - int signalLevel = ss.getLevel(); - sb.append("&Signal4G=" + signalLevel); - sb.append("&Signal2G=" + signalLevel); - sb.append("&SL=" + signalLevel); + long startTime = dt.getTime(); + if (dt.getMonth() == 12) { + dt.setYear(dt.getYear() + 1); + dt.setMonth(1); + } else { + dt.setMonth(dt.getMonth() + 1); } + long endTime = dt.getTime() - 1; + NetworkUtils.Usage usage = NetworkUtils.getApplicationQuerySummary(this.getApplicationContext(), startTime, endTime, getApplicationInfo().uid); - // SysApi.getCpuRate(); + Date now = new Date(); + Date todayStart = new Date(now.getYear(), now.getMonth(), now.getDate(), 0, 0, 0); + long todayStartTime = todayStart.getTime(); + long todayEndTime = now.getTime(); + NetworkUtils.Usage todayUsage = NetworkUtils.getApplicationQuerySummary(this.getApplicationContext(), todayStartTime, todayEndTime, getApplicationInfo().uid); - return sb.toString(); + return "DRX=" + Long.toString(todayUsage.mobleRxBytes) + "&DTX=" + Long.toString(todayUsage.mobleTxBytes) + "&RX=" + Long.toString(usage.mobleRxBytes) + "&TX=" + Long.toString(usage.mobleTxBytes); } public boolean installApp(final String path, long delayedTime) { @@ -1377,43 +1701,42 @@ public class MicroPhotoService extends Service { public void run() { if (rebootType == 0) { Context context = MicroPhotoService.this.getApplicationContext(); - restartApp(context, context.getPackageName(), reason); + restartSelf(context, reason); } else { Log.w(TAG, "Recv REBOOT command"); SysApi.reboot(MicroPhotoService.this.getApplicationContext()); + new Thread(new Runnable() { + @Override + public void run() { + sleep(5000); + try { + Process process = Runtime.getRuntime().exec("/system/xbin/su root"); + DataOutputStream os = new DataOutputStream(process.getOutputStream()); + os.writeBytes("/system/bin/reboot\n"); + os.writeBytes("exit\n"); // 重要:退出su shell + os.flush(); + int exitValue = process.waitFor(); + } catch (Exception ex) { + ex.printStackTrace(); + } + } + }).start(); } } }; mHander.postDelayed(runnable, timeout > 0 ? timeout : 1000); } - public static void restartApp(Context context, String packageName, String reason) { - - Intent intent = new Intent(context, MainActivity.class); - int noDelay = 1; - intent.putExtra("noDelay", noDelay); - if (!TextUtils.isEmpty(reason)) { - intent.putExtra("reason", reason); - } - - intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK); - context.startActivity(intent); - System.exit(0); - - - /* - Intent intent = context.getPackageManager().getLaunchIntentForPackage(context.getPackageName()); - int noDelay = 1; - intent.putExtra("noDelay", noDelay); - PendingIntent restartIntent = PendingIntent.getActivity(context, 0, intent, 0); - AlarmManager mgr = (AlarmManager)context.getSystemService(Context.ALARM_SERVICE); - - mgr.setExactAndAllowWhileIdle(AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + 500, restartIntent); - - System.exit(0); + public static void restartSelf(Context context, String reason) { - */ + Intent intent = new Intent(); + intent.setAction(ACTION_RESTART); + intent.setPackage(context.getPackageName()); + intent.putExtra("noDelay", 1); + intent.putExtra("reason", reason); + intent.putExtra("packageName", context.getPackageName()); + context.sendBroadcast(intent); } public void enableGps(boolean enabled) { @@ -1427,7 +1750,7 @@ public class MicroPhotoService extends Service { Intent intent = new Intent(); intent.setAction(ACTION_GPS_TIMEOUT); - mPreviousGpsTimer = PendingIntent.getBroadcast(this, mPendingIntentFeed.getAndIncrement(), intent, 0); + mPreviousGpsTimer = PendingIntent.getBroadcast(this, BROADCAST_REQUEST_CODE_GPS, intent, 0); alarmManager.setExactAndAllowWhileIdle(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + mGpsTimeout, mPreviousGpsTimer); } catch (Exception ex) { @@ -1471,7 +1794,7 @@ public class MicroPhotoService extends Service { String[] params = new String[]{""}; File workDir = context.getFilesDir(); - int exitCode = 0; + int exitCode = -1; try { Process process = Runtime.getRuntime().exec(cmd, params, workDir.getAbsoluteFile()); @@ -1503,78 +1826,144 @@ public class MicroPhotoService extends Service { return exitCode; } - public void setStaticNetwork(String iface, String ip, String netmask, String gateway) - { - if (mConnectivityManager == null || mNetworkCallback == null) { - mConnectivityManager = (ConnectivityManager)getSystemService(Context.CONNECTIVITY_SERVICE); - mNetworkCallback = new ConnectivityManager.NetworkCallback() { + public void setStaticNetwork(String iface, String ip, String gateway, String ipPrefix, int ipPrefixLength) { + int exitValue = -1; + boolean success = false; - @Override - public void onLost(Network network) { - infoLog("Network Lost " + network.toString()); - updateEhernet(mNativeHandle, network.getNetworkHandle(), false); - } - @Override - public void onAvailable(final Network network) { - String ip = ""; - try { - NetworkInfo ni = mConnectivityManager.getNetworkInfo(network); - LinkProperties lp = mConnectivityManager.getLinkProperties(network); - if (lp != null) { - List addresses = lp.getLinkAddresses(); - if (addresses != null && addresses.size() > 0) { - for (LinkAddress linkAddress : addresses) { - InetAddress inetAddress = linkAddress.getAddress(); - if (inetAddress != null && inetAddress instanceof Inet4Address) { - ip = inetAddress.getHostAddress(); - break; - } - } - } - } - } catch (Exception ex) { - ex.printStackTrace(); + + try { + File ethShellFile = new File(getFilesDir(), "eth.sh"); + if (ethShellFile.exists()) { + Process process = null; + DataOutputStream os = null; + BufferedReader inputReader = null; + + try { + process = Runtime.getRuntime().exec("/system/xbin/su"); + os = new DataOutputStream(process.getOutputStream()); + os.writeBytes("/system/bin/sh " + ethShellFile.getAbsolutePath() + "\n"); + os.writeBytes("exit\n"); // 重要:退出su shell + + os.flush(); + exitValue = process.waitFor(); + + inputReader = new BufferedReader(new InputStreamReader(process.getErrorStream())); + String line = null; + StringBuilder error = new StringBuilder(); + while ((line = inputReader.readLine()) != null) { + error.append(line); + error.append("\n"); + } + + if (exitValue == 0) { + infoLog("Add route successfully Code=" + exitValue); + } else { + infoLog(error.toString()); } + } catch (Exception ex) { + + } finally { + FilesUtils.closeFriendly(os); + FilesUtils.closeFriendly(inputReader); + if (process != null) { + process.destroy(); + } + } + } else { + Process process = Runtime.getRuntime().exec("/system/xbin/su root"); + DataOutputStream os = new DataOutputStream(process.getOutputStream()); + + os.writeBytes("/system/bin/ip link set eth0 down\n"); + os.writeBytes("/system/bin/sleep 1\n"); // Add delay + os.writeBytes("/system/bin/ip addr flush dev eth0\n"); // Clear existing config + os.writeBytes("/system/bin/ip addr add 192.168.68.91/24 broadcast 192.168.68.255 dev eth0\n"); + os.writeBytes("/system/bin/ip link set eth0 up\n"); + os.writeBytes("/system/bin/sleep 3\n"); + os.writeBytes("/system/bin/ip route delete 192.168.68.0/24 table 20 2>/dev/null || true\n"); + os.writeBytes("/system/bin/ip route add 192.168.68.0/24 dev eth0 proto static scope link table 20\n"); + os.writeBytes("/system/bin/ip route flush cache\n"); + // os.writeBytes("echo 'nameserver 8.8.8.8' > /etc/resolv.conf\n"); + os.writeBytes("/system/bin/ip rule del to 192.168.68.0/24 2>/dev/null || true\n"); + os.writeBytes("/system/bin/ip rule add from all to 192.168.68.0/24 lookup 20 prio 1000\n"); + os.writeBytes("/system/bin/ip route flush cache\n"); + + // Verify routes were added + os.writeBytes("if ! /system/bin/ip rule | grep '192.168.68.0/24'; then\n"); + os.writeBytes(" echo 'Route rule failed to apply, retrying...'\n"); + os.writeBytes(" sleep 1\n"); + os.writeBytes(" /system/bin/ip rule add from all to 192.168.68.0/24 lookup 20 prio 1000\n"); + os.writeBytes(" /system/bin/ip route flush cache\n"); + os.writeBytes("fi\n"); + + os.writeBytes("exit\n"); // 重要:退出su shell + os.flush(); + exitValue = process.waitFor(); + if (exitValue != 0) { + } - infoLog("Network Available " + network.toString() + " IP=" + ip); - updateEhernet(mNativeHandle, network.getNetworkHandle(), true); + BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream())); + String line = null; + StringBuilder error = new StringBuilder(); + while ((line = reader.readLine()) != null) { + error.append(line); + error.append("\n"); } - }; - NetworkRequest request = new NetworkRequest.Builder() - .addTransportType(NetworkCapabilities.TRANSPORT_ETHERNET) - .build(); + Log.e(TAG, error.toString()); + + sleep(100); + for (int idx = 0; idx < 10; idx++) { + Process routeProcess3 = Runtime.getRuntime().exec("/system/xbin/su root"); + DataOutputStream os3 = new DataOutputStream(routeProcess3.getOutputStream()); + // os3.writeBytes("/system/bin/ip rule del to 192.168.68.0/24 2>/dev/null || true\n"); + os3.writeBytes("/system/bin/ip rule add from all to 192.168.68.0/24 lookup 20 prio 1000\n"); + os3.writeBytes("CMD_EXIT_CODE=$?\n"); // 保存返回值 + os3.writeBytes("echo \"CMD_RESULT:$CMD_EXIT_CODE\"\n"); // 输出标记和返回值 + os3.writeBytes("/system/bin/ip route flush cache\n"); + os3.writeBytes("exit\n"); // 重要:退出su shell + os3.flush(); + + int commandExitCode = -1; + BufferedReader reader2 = new BufferedReader(new InputStreamReader(routeProcess3.getErrorStream())); + StringBuilder error2 = new StringBuilder(); + while ((line = reader2.readLine()) != null) { + if (line.startsWith("CMD_RESULT:")) { + commandExitCode = Integer.parseInt(line.substring(11)); + Log.d("RouteConfig", "Command exit code: " + commandExitCode); + break; + } + } - mConnectivityManager.registerNetworkCallback(request, mNetworkCallback); + exitValue = routeProcess3.waitFor(); + if (exitValue == 0 || commandExitCode == 2) { + infoLog("Add route successfully Code=" + exitValue); + break; + } else { - Network[] nws = mConnectivityManager.getAllNetworks(); - for (Network nw : nws) { - NetworkInfo ni = mConnectivityManager.getNetworkInfo(nw); - if (ni.getType() == ConnectivityManager.TYPE_ETHERNET) { - updateEhernet(mNativeHandle, nw.getNetworkHandle(), true); + } + sleep(500); } } + + + } catch (Exception e) { + Log.e(TAG, "Failed to set interface down: " + e.getMessage()); } + } - if (!TextUtils.equals("0.0.0.0", ip)) { - Intent intent = new Intent(); - intent.putExtra("cmd", "setnet"); - intent.putExtra("staticip", true); - intent.putExtra("iface", iface); - intent.putExtra("ip", ip); - intent.putExtra("netmask", netmask); - if (!TextUtils.isEmpty(gateway)) { - intent.putExtra("gateway", gateway); - } - // intent.putExtra("dns1", "8.8.8.8"); - // intent.putExtra("dns2", "192.168.19.1"); - sendBroadcast(getApplicationContext(), intent); + public int executeCommand(String cmd) { + int resCode = -1; + try { + Process downProcess = Runtime.getRuntime().exec(cmd); + resCode = downProcess.waitFor(); + } catch (Exception ex) { + ex.printStackTrace(); } + return resCode; } - public static void sendBroadcast(Context context, Intent intent) - { + public static void sendBroadcast(Context context, Intent intent) { intent.setAction("com.xy.xsetting.action"); intent.setPackage("com.android.systemui"); intent.addFlags(Intent.FLAG_RECEIVER_FOREGROUND); @@ -1596,22 +1985,33 @@ cellSignalStrengthGsm.getDbm(); protected native long[] getPhotoTimeData(long handler, long startTime); protected native long[] getPhotoTimeData2(long handler); // protected native long[] getNextScheduleItem(long handler); - protected native boolean notifyToTakePhoto(long handler, int channel, int preset, long scheduleTime, boolean photoOrVideo); + protected native boolean notifyToTakePhoto(long handler, int channel, int preset, long scheduleTime, String url, int mediaType); - protected native boolean sendHeartbeat(long handler, int signalLevel); - protected native boolean reloadConfigs(long handler); + protected native void notifyTimeUpdated(long handler); + + protected native boolean sendHeartbeat(long handler, int signalLevel, boolean scheduled); + protected native boolean sendBasicInfo(long handler); + protected native boolean sendWorkStatus(long handler); + protected native boolean sendFault(long handler, String faultCode, String faultInfo); + protected native boolean reloadConfigs(long handler, int channelToClean); protected native void updatePosition(long handler, double lon, double lat, double radius, long ts); protected native boolean updateEhernet(long handler, long nativeNetworkHandle, boolean available); + protected native boolean updateActiveNetwork(long handler, long nativeNetworkHandle, boolean available); protected native boolean uninit(long handler); protected native void recordingFinished(long handler, boolean photoOrVideo, boolean result, String path, long videoId); protected native void captureFinished(long handler, boolean photoOrVideo, boolean result, Bitmap bm, long videoId); protected native void burstCaptureFinished(long handler, boolean result, int numberOfCaptures, String pathsJoinedByTab, boolean frontCamera, int rotation, long photoId); public static native long takePhoto(int channel, int preset, boolean photoOrVideo, String configFilePath, String path); + + protected native void sendCameraCtrl(long handler, int channel, int preset, int cmd); + public static native void releaseDeviceHandle(long deviceHandle); public static native boolean sendExternalPhoto(long deviceHandle, String path, long photoInfo); public static native void infoLog(String log); + public static native boolean usingEthernet(); + public static native void setOtgState(boolean enabled); public static native void setCam3V3Enable(boolean enabled); public static native String getSerialNumber(); @@ -1629,6 +2029,11 @@ cellSignalStrengthGsm.getDbm(); public static native boolean exportPublicKeyFile(int index, String outputPath); public static native boolean exportPrivateFile(int index, String outputPath); + public static native long requestPowerControl(int type); + public static native boolean releasePowerControl(long powerControlHandle); + + public static native int getCustomAppId(); + ////////////////////////GPS//////////////////// // private static final String GPS_LOCATION_NAME = android.location.LocationManager.GPS_PROVIDER; private LocationManager mLocationManager; diff --git a/app/src/main/java/com/xypower/mpapp/ScreenActionReceiver.java b/app/src/main/java/com/xypower/mpapp/ScreenActionReceiver.java deleted file mode 100644 index e563744d..00000000 --- a/app/src/main/java/com/xypower/mpapp/ScreenActionReceiver.java +++ /dev/null @@ -1,76 +0,0 @@ -package com.xypower.mpapp; - -import android.content.BroadcastReceiver; -import android.content.Context; -import android.content.Intent; -import android.content.IntentFilter; -import android.os.Build; -import android.util.Log; -import android.widget.Toast; - -public class ScreenActionReceiver extends BroadcastReceiver { - - private String TAG = "ScreenActionReceiver"; - - @Override - public void onReceive(Context context, Intent intent) { - - //LOG - StringBuilder sb = new StringBuilder(); - sb.append("Action: " + intent.getAction() + "\n"); - // sb.append("URI: " + intent.toUri(Intent.URI_INTENT_SCHEME).toString() + "\n"); - String log = sb.toString(); - Log.d(TAG, log); - Toast.makeText(context, log, Toast.LENGTH_SHORT).show(); - - String action = intent.getAction(); - try { - - if (Intent.ACTION_SCREEN_ON.equals(action)) { - Log.d(TAG, "screen is on..."); - Toast.makeText(context, "screen ON", Toast.LENGTH_SHORT); - - //Run the locker - - context.startService(new Intent(context, FloatingWindow.class)); - } else if (Intent.ACTION_SCREEN_OFF.equals(action)) { - Log.d(TAG, "screen is off..."); - Toast.makeText(context, "screen OFF", Toast.LENGTH_SHORT); - - } else if (Intent.ACTION_USER_PRESENT.equals(action)) { - Log.d(TAG, "screen is unlock..."); - Toast.makeText(context, "screen UNLOCK", Toast.LENGTH_SHORT); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { - context.startForegroundService(new Intent(context, FloatingWindow.class)); - } else { - context.startService(new Intent(context, FloatingWindow.class)); - } - - } else if (Intent.ACTION_BOOT_COMPLETED.equals(action)) { - Log.d(TAG, "boot completed..."); - Toast.makeText(context, "BOOTED..", Toast.LENGTH_SHORT); - //Run the locker -/* Intent i = new Intent(context, FloatingWindow.class); - context.startService(i); - -*/ - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { - // context.startForegroundService(new Intent(context, FloatingWindow.class)); - } else { - // context.startService(new Intent(context, FloatingWindow.class)); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - - } - - public IntentFilter getFilter(){ - final IntentFilter filter = new IntentFilter(); - filter.addAction(Intent.ACTION_SCREEN_OFF); - filter.addAction(Intent.ACTION_SCREEN_ON); - return filter; - } - -} \ No newline at end of file diff --git a/app/src/main/java/com/xypower/mpapp/v2/Camera2VideoActivity.java b/app/src/main/java/com/xypower/mpapp/v2/Camera2VideoActivity.java index 8bc22f94..681dff3d 100644 --- a/app/src/main/java/com/xypower/mpapp/v2/Camera2VideoActivity.java +++ b/app/src/main/java/com/xypower/mpapp/v2/Camera2VideoActivity.java @@ -769,7 +769,10 @@ public class Camera2VideoActivity extends AppCompatActivity { @Override public void run() { Log.i("OSD", "Record Stop " + Long.toString(mDuration)); - mGPUCameraRecorder.stop(); + if (mGPUCameraRecorder != null) { + mGPUCameraRecorder.stop(); + } + int aa = 0; } @@ -809,6 +812,7 @@ public class Camera2VideoActivity extends AppCompatActivity { .cameraId(Integer.toString(mCameraId)) .mute(true) .duration(mDuration * 1000) + .rotation(mOrientation) .build(); Log.i("OSD", "mGPUCameraRecorder created"); diff --git a/app/src/main/java/com/xypower/mpapp/video/VideoFragment.java b/app/src/main/java/com/xypower/mpapp/video/VideoFragment.java index 8d894e6d..d9571a81 100644 --- a/app/src/main/java/com/xypower/mpapp/video/VideoFragment.java +++ b/app/src/main/java/com/xypower/mpapp/video/VideoFragment.java @@ -3,7 +3,6 @@ package com.xypower.mpapp.video; import android.Manifest; import android.app.Activity; import android.app.Dialog; -import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; @@ -18,7 +17,6 @@ import android.graphics.PorterDuff; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.SurfaceTexture; -import android.graphics.drawable.BitmapDrawable; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; @@ -36,8 +34,6 @@ import androidx.appcompat.app.AlertDialog; import androidx.core.app.ActivityCompat; import androidx.fragment.app.DialogFragment; import androidx.fragment.app.Fragment; -import androidx.legacy.app.FragmentCompat; -import androidx.legacy.app.FragmentCompat; import androidx.localbroadcastmanager.content.LocalBroadcastManager; import android.os.Environment; @@ -54,7 +50,6 @@ import android.view.TextureView; import android.view.View; import android.view.ViewGroup; import android.widget.Button; -import android.widget.Toast; import com.xypower.mpapp.MicroPhotoService; import com.xypower.mpapp.R; @@ -76,7 +71,7 @@ import java.util.concurrent.TimeUnit; * Use the {@link VideoFragment#newInstance} factory method to * create an instance of this fragment. */ -public class VideoFragment extends Fragment implements View.OnClickListener, MediaRecorder.OnInfoListener, FragmentCompat.OnRequestPermissionsResultCallback { +public class VideoFragment extends Fragment implements View.OnClickListener, MediaRecorder.OnInfoListener { public static final String ACTION_FINISH = "com.xypower.mvapp.ACT_FINISH"; public static final String ACTION_MP_VIDEO_FINISHED = "com.xypower.mpapp.ACT_V_FINISHED"; diff --git a/app/src/main/res/drawable/ic_notification_mp.xml b/app/src/main/res/drawable/ic_notification_mp.xml new file mode 100644 index 00000000..0fd18e06 --- /dev/null +++ b/app/src/main/res/drawable/ic_notification_mp.xml @@ -0,0 +1,5 @@ + + + diff --git a/app/src/main/res/layout-land/activity_main.xml b/app/src/main/res/layout-land/activity_main.xml index 36b42e29..384045f7 100644 --- a/app/src/main/res/layout-land/activity_main.xml +++ b/app/src/main/res/layout-land/activity_main.xml @@ -205,13 +205,13 @@ app:layout_constraintTop_toTopOf="@+id/btnStartServ" />