Compare commits

..

2 Commits

Author SHA1 Message Date
Matthew e9a5df4b5a 供应商接口封装 4 months ago
Matthew 28a09335f5 流媒体实现 4 months ago

@ -5,7 +5,7 @@ plugins {
// 10,00,000 major-minor-build
def AppMajorVersion = 1
def AppMinorVersion = 3
def AppBuildNumber = 196
def AppBuildNumber = 40
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber
@ -98,7 +98,7 @@ android {
def abi = output.getFilter(com.android.build.OutputFile.ABI)
if (abi == null) abi = "all"
if (abi.contains("v7a")) prevFileName = "N938"
def fileName = "${prevFileName}_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}.apk"
def fileName = "${prevFileName}_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}_${abi}.apk"
outputFileName = fileName
}
}
@ -126,10 +126,10 @@ android {
dependencies {
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'androidx.legacy:legacy-support-v13:1.0.0'
// implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.0.0'
// implementation "androidx.core:core:1.10.0" // 使
implementation 'androidx.fragment:fragment:1.3.6'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
implementation 'com.google.android.material:material:1.8.0'
implementation project(path: ':common')

@ -1,8 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:sharedUserId="com.xypower.mp"
tools:ignore="Deprecated">
xmlns:tools="http://schemas.android.com/tools">
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
@ -12,10 +10,9 @@
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.MANAGE_NETWORK_POLICY"
tools:ignore="ProtectedPermissions" />
<uses-permission
android:name="android.permission.READ_PRIVILEGED_PHONE_STATE"
tools:ignore="ProtectedPermissions" />
@ -58,7 +55,6 @@
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
<uses-permission android:name="android.permission.USB_PERMISSION" />
<uses-permission
android:name="android.permission.DEVICE_POWER"
tools:ignore="ProtectedPermissions" />
@ -67,23 +63,14 @@
tools:ignore="ProtectedPermissions" />
<uses-permission
android:name="android.permission.START_ACTIVITIES_FROM_BACKGROUND"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.KILL_BACKGROUND_PROCESSES" />
tools:ignore="ProtectedPermissions" /> <!-- WiFi AP startTethering -->
<uses-permission
android:name="android.permission.TETHER_PRIVILEGED"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.CONNECTIVITY_INTERNAL"
tools:ignore="ProtectedPermissions" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="com.mediatek.camera.feature.mfnr" />
<uses-permission android:name="android.hardware.usb.accessory" />
<uses-feature android:name="android.hardware.usb.host" />
<uses-feature
android:name="android.hardware.telephony"
android:required="false" />
<queries>
<provider
@ -99,9 +86,6 @@
<intent>
<action android:name="android.media.action.STILL_IMAGE_CAMERA" />
</intent>
<intent>
<action android:name="android.intent.action.TIME_CHANGED" />
</intent>
<package android:name="com.xypower.mplive" />
</queries>
@ -174,10 +158,11 @@
<category android:name="android.intent.category.default" />
</intent-filter>
</service>
<service android:name=".FloatingWindow" />
<receiver
android:name=".MicroPhotoService$AlarmReceiver"
android:exported="true" >
</receiver>
android:exported="true" />
<receiver
android:name=".BootBroadcastReceiver"
android:enabled="true"
@ -191,7 +176,17 @@
</intent-filter>
</receiver>
<receiver android:name=".NetworkChangedReceiver" />
<receiver
android:name=".ScreenActionReceiver"
android:exported="true">
<intent-filter android:priority="90000">
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.BOOT_COMPLETED" />
<action android:name="android.intent.action.SCREEN_ON" />
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.USER_UNLOCKED" />
</intent-filter>
</receiver>
<receiver
android:name="com.xypower.common.UpdateReceiver"
android:enabled="true"
@ -204,14 +199,7 @@
<data android:scheme="package" />
</intent-filter>
</receiver>
<receiver
android:name=".HeartBeatResponseReceiver"
android:enabled="true"
android:exported="true">
<intent-filter >
<action android:name="com.systemui.ACTION_HEARTBEAT_RESPONSE" />
</intent-filter>
</receiver>
<activity
android:name=".MainActivity"
android:exported="true"

@ -1,227 +0,0 @@
#!/system/bin/sh
# ==============================================
# Configuration parameters - modify as needed
# ==============================================
ETH_IP="192.168.68.91" # Ethernet IP address
ETH_NETMASK="24" # Subnet mask (CIDR format)
ETH_NETWORK="192.168.68.0" # Network address
ETH_BROADCAST="192.168.68.255" # Broadcast address
ETH_GATEWAY="192.168.68.1" # Default gateway
ROUTE_TABLE="20" # Routing table number
MAX_INIT_WAIT=150 # Maximum seconds to wait for ethernet interface
MAX_UP_WAIT=10 # Maximum seconds to wait for interface to come UP
MAX_ROUTE_WAIT=5 # Maximum seconds to wait for routing rules
# For debugging only - comment out in production
# set -x
ANDROID_VERSION=$(getprop ro.build.version.release 2>/dev/null | cut -d '.' -f1)
# Record script start time
SCRIPT_START=$(date +%s)
# Cleanup function - handles unexpected interruptions
cleanup() {
echo "Script interrupted, cleaning up..." >&2
# Add additional cleanup code here if needed
exit 1
}
trap cleanup INT TERM
# Get script directory for finding tools like ethtool
SCRIPT_PATH="$0"
# Ensure path is absolute
case "$SCRIPT_PATH" in
/*) ;; # Already absolute path
*) SCRIPT_PATH="$PWD/$SCRIPT_PATH" ;;
esac
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
echo "Script directory detected as: $SCRIPT_DIR"
# Only configure rp_filter for eth0 interface
echo 0 > /proc/sys/net/ipv4/conf/eth0/rp_filter 2>/dev/null || true
# Wait for eth0 interface to appear
WAITED=0
while [ $WAITED -lt $MAX_INIT_WAIT ]; do
if [ -d "/sys/class/net/eth0" ]; then
echo "eth0 found after $WAITED seconds"
break
fi
echo "Wait eth0... ($WAITED/$MAX_INIT_WAIT)"
sleep 0.1
WAITED=$((WAITED+1))
done
# Check if eth0 exists
if ! [ -d "/sys/class/net/eth0" ]; then
echo "Error: eth0 not exists" >&2
exit 1
fi
# Check physical connection status
if [ -f "/sys/class/net/eth0/carrier" ]; then
CARRIER=$(cat /sys/class/net/eth0/carrier)
echo "Physical connection status: $CARRIER (1=connected, 0=disconnected)"
if [ "$CARRIER" != "1" ]; then
echo "Warning: Ethernet physical connection may have issues, please check the cable" >&2
fi
fi
# Clear previous configuration
/system/bin/ip link set eth0 down
/system/bin/ip addr flush dev eth0
/system/bin/ip route flush dev eth0
/system/bin/ip route flush table $ROUTE_TABLE
/system/bin/ip rule del to $ETH_NETWORK/$ETH_NETMASK 2>/dev/null || true
# Configure physical layer with ethtool (while interface is DOWN)
if [ -x "$SCRIPT_DIR/ethtool" ]; then
echo "Using ethtool from script directory: $SCRIPT_DIR/ethtool"
"$SCRIPT_DIR/ethtool" -s eth0 speed 10 duplex full autoneg off
# Try alternative path next
elif [ -x "/data/data/com.xypower.mpapp/files/ethtool" ]; then
echo "Configuring eth0 to 10Mbps full duplex..."
/data/data/com.xypower.mpapp/files/ethtool -s eth0 speed 10 duplex full autoneg off
else
echo "Warning: ethtool not found, falling back to sysfs configuration" >&2
# Try sysfs configuration as fallback
if [ -f "/sys/class/net/eth0/speed" ]; then
echo "off" > /sys/class/net/eth0/autoneg 2>/dev/null || true
echo "10" > /sys/class/net/eth0/speed 2>/dev/null || true
echo "full" > /sys/class/net/eth0/duplex 2>/dev/null || true
fi
fi
# ====================================================
# MTK Android 9 IP configuration with loss prevention
# ====================================================
# Configure IP address first while interface is DOWN
echo "Setting IP address while interface is DOWN..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
PRE_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP configuration before UP: $PRE_UP_IP (1=configured, 0=missing)"
# Enable interface and wait for UP
echo "Bringing up interface..."
/system/bin/ip link set eth0 up
if [ "$ANDROID_VERSION" = "9" ]; then
sleep 3
else
# Use standard configuration for other devices
sleep 1
fi
# Check if IP was lost after interface UP (common issue on MTK devices)
POST_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP configuration after UP: $POST_UP_IP (1=retained, 0=lost)"
# IP address lost detection and recovery
if [ "$PRE_UP_IP" = "1" ] && [ "$POST_UP_IP" = "0" ]; then
echo "Warning: IP address was lost after bringing interface up - MTK issue detected"
echo "Reapplying IP configuration..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
# Check if reapplied configuration worked
FIXED_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP reapplication result: $FIXED_IP (1=success, 0=still missing)"
# If standard method fails, try MTK-specific approaches
if [ "$FIXED_IP" = "0" ]; then
echo "Standard IP configuration failed, trying MTK-specific methods"
# Try ifconfig if available (works better on some MTK devices)
if command -v ifconfig >/dev/null 2>&1; then
echo "Using ifconfig method..."
ifconfig eth0 $ETH_IP netmask 255.255.255.0 up
sleep 1
fi
# Try Android's netd service if available
if [ -x "/system/bin/ndc" ]; then
echo "Using MTK netd service..."
/system/bin/ndc network interface setcfg eth0 $ETH_IP 255.255.255.0 up
sleep 1
fi
fi
fi
# Use loop to wait for interface UP instead of fixed sleep
WAITED=0
while [ $WAITED -lt $MAX_UP_WAIT ]; do
# Check both link status and IP configuration
IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP")
IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "1" ]; then
echo "Interface is UP with correct IP after $WAITED seconds"
break
fi
echo "Waiting for interface UP with IP... ($WAITED/$MAX_UP_WAIT)"
# If interface is UP but IP is missing, reapply IP
if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "0" ]; then
echo "Interface UP but IP missing, reapplying IP..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
fi
sleep 0.5
WAITED=$((WAITED+1))
done
# Final status check
FINAL_IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP")
FINAL_IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
if [ "$FINAL_IF_STATUS" != "1" ] || [ "$FINAL_IP_STATUS" != "1" ]; then
echo "Warning: Failed to achieve stable interface state with IP" >&2
echo "Final interface status: $FINAL_IF_STATUS (1=UP, 0=DOWN)"
echo "Final IP status: $FINAL_IP_STATUS (1=configured, 0=missing)"
/system/bin/ip addr show eth0
else
echo "Successfully configured eth0 with IP $ETH_IP"
fi
# First add to main routing table
/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link
# Then add to specified routing table
/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link table $ROUTE_TABLE
ADD_ROUTE_STATUS=$?
if [ $ADD_ROUTE_STATUS -eq 0 ]; then
echo "Add route successfully"
else
echo "Failed to add route: $ADD_ROUTE_STATUS" >&2
fi
# Only clear ARP and neighbor cache for eth0
/system/bin/ip neigh flush dev eth0
# Add routing rules - only flush cache once after rule is added
/system/bin/ip rule add from all to $ETH_NETWORK/$ETH_NETMASK lookup $ROUTE_TABLE prio 1000
/system/bin/ip route flush cache dev eth0
# Only enable forwarding for eth0 interface
echo 1 > /proc/sys/net/ipv4/conf/eth0/forwarding 2>/dev/null || true
# Wait for routing rules to take effect - using loop check instead of fixed wait
WAITED=0
while [ $WAITED -lt $MAX_ROUTE_WAIT ]; do
if /system/bin/ip rule | grep -q "$ETH_NETWORK/$ETH_NETMASK"; then
echo "Routing rules are now effective after $WAITED seconds"
break
fi
echo "Waiting for routing rules to take effect... ($WAITED/$MAX_ROUTE_WAIT)"
sleep 0.5
WAITED=$((WAITED+1))
done
# Display execution time
SCRIPT_END=$(date +%s)
TOTAL_TIME=$((SCRIPT_END - SCRIPT_START))
echo "Total script execution time: $TOTAL_TIME seconds"
exit 0

Binary file not shown.

Binary file not shown.

@ -21,19 +21,17 @@ if(ANDROID_ABI STREQUAL "armeabi-v7a")
add_definitions(-DUSING_N938)
elseif(ANDROID_ABI STREQUAL "arm64-v8a")
# add_definitions(-DUSING_N938)
# add_definitions(-DUSING_PTZ)
add_definitions(-DUSING_PLZ)
endif()
# OUTPUT_DBG_INFO:
add_definitions(-DOUTPUT_DBG_INFO)
# OUTPUT_SOCKET_DBG_INFO Depends ON OUTPUT_DBG_INFO
# TerminalService.cpp
# add_definitions(-DOUTPUT_SOCKET_DBG_INFO)
add_definitions(-DOUTPUT_SOCKET_DBG_INFO)
# OUTPUT_DB_DBG_INFO Depends ON OUTPUT_DBG_INFO
# Database.cpp
# add_definitions(-DOUTPUT_DB_DBG_INFO)
add_definitions(-DUSING_FFMPEG)
add_definitions(-DOUTPUT_DB_DBG_INFO)
IF (CMAKE_BUILD_TYPE STREQUAL Debug)
ADD_DEFINITIONS(-D_DEBUG)
@ -63,8 +61,8 @@ add_definitions(-DENABLE_3V3_ALWAYS)
add_definitions(-DCURL_STATICLIB)
add_definitions(-DUSING_HDRPLUS)
add_definitions(-DUSING_EXEC_HDRP=0)
#set(USING_EXEC_HDRP 1)
add_definitions(-DUSING_EXEC_HDRP=1)
set(USING_EXEC_HDRP 1)
# include_directories(${OpenCV_DIR}/include)
# add_library( lib_opencv SHARED IMPORTED )
@ -95,8 +93,6 @@ set(ncnn_DIR ${NCNN_ROOT}/${ANDROID_ABI}/lib/cmake/ncnn)
find_package(ncnn REQUIRED)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libcutils/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libutils/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/img_utils/include)
@ -158,11 +154,11 @@ add_definitions(-DDISABLE_RTTI)
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLToolKit/src/ )
# SET(ZLMEDIAKIT_LIBS ${ZLMEDIAKIT_LIBS} zlmediakit zltoolkit)
SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp media/Streaming.cpp )
SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp )
#SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
#SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
SET(HDRPLUS_SOURCES
hdrplus/src/align.cpp
@ -180,7 +176,6 @@ SET(HDRPLUS2_SOURCES
hdrplus2/src/InputSource.cpp
hdrplus2/src/LibRaw2DngConverter.cpp
hdrplus2/${ANDROID_ABI}/hdrplus_pipeline.registration.cpp)
SET(HDRPLUS2_SOURCES )
SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
@ -190,6 +185,8 @@ SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp)
SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include)
SET(BREAKPAD_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2)
SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype)
@ -197,6 +194,7 @@ SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype)
# SET(EVPP_SRC_DIR ${EVPP_ROOT}/evpp)
include_directories(${YAMC_INC_DIR})
include_directories(${BREAKPAD_ROOT} ${BREAKPAD_ROOT}/common/android/include)
include_directories(${ASIO_ROOT}/include)
# SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
@ -204,6 +202,52 @@ include_directories(${ASIO_ROOT}/include)
# add_library(sqlite3 STATIC ${SQLITE_SRC_DIR}/sqlite3.c )
# INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR})
file(GLOB BREAKPAD_SOURCES_COMMON
native-lib.cpp
${BREAKPAD_ROOT}/client/linux/crash_generation/crash_generation_client.cc
${BREAKPAD_ROOT}/client/linux/dump_writer_common/thread_info.cc
${BREAKPAD_ROOT}/client/linux/dump_writer_common/ucontext_reader.cc
${BREAKPAD_ROOT}/client/linux/handler/exception_handler.cc
${BREAKPAD_ROOT}/client/linux/handler/minidump_descriptor.cc
${BREAKPAD_ROOT}/client/linux/log/log.cc
${BREAKPAD_ROOT}/client/linux/microdump_writer/microdump_writer.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_dumper.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_ptrace_dumper.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/minidump_writer.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/pe_file.cc
${BREAKPAD_ROOT}/client/minidump_file_writer.cc
${BREAKPAD_ROOT}/common/convert_UTF.cc
${BREAKPAD_ROOT}/common/md5.cc
${BREAKPAD_ROOT}/common/string_conversion.cc
${BREAKPAD_ROOT}/common/linux/elfutils.cc
${BREAKPAD_ROOT}/common/linux/file_id.cc
${BREAKPAD_ROOT}/common/linux/guid_creator.cc
${BREAKPAD_ROOT}/common/linux/linux_libc_support.cc
${BREAKPAD_ROOT}/common/linux/memory_mapped_file.cc
${BREAKPAD_ROOT}/common/linux/safe_readlink.cc
)
file(GLOB BREAKPAD_ASM_SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S)
set_property(SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S PROPERTY LANGUAGE C)
# set_source_files_properties(${BREAKPAD_ASM_SOURCE} PROPERTIES LANGUAGE C)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
breakpad
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${BREAKPAD_SOURCES_COMMON}
${BREAKPAD_ASM_SOURCE}
)
INCLUDE_DIRECTORIES(${JSONCPP_INCLUDE_DIR})
SET(PUBLIC_HEADERS
@ -339,7 +383,6 @@ add_library( # Sets the name of the library.
GPIOControl.cpp
MicroPhoto.cpp
PhoneDevice.cpp
PtzController.cpp
# PhoneDevice2.cpp
Camera.cpp
Camera2Reader.cpp
@ -353,10 +396,6 @@ add_library( # Sets the name of the library.
ncnn/yolov5ncnn.cpp
netcamera/httpclient.cpp
netcamera/VendorCtrl.cpp
netcamera/YuShiCtrl.cpp
netcamera/HangYuCtrl.cpp
netcamera/HikonCtrl.cpp
${STREAMING_SRCS}
@ -378,11 +417,9 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/SpecData_I1_JS.cpp
${TERM_CORE_ROOT}/SpecData_I1_HN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN_TY.cpp
${TERM_CORE_ROOT}/SpecData_I1_HENZZ.cpp
${TERM_CORE_ROOT}/SpecData_I1_SHX.cpp
${TERM_CORE_ROOT}/SpecData_I1_NX.cpp
${TERM_CORE_ROOT}/SpecData_I1_SX_ZY.cpp
${TERM_CORE_ROOT}/SpecData_XY.cpp
${TERM_CORE_ROOT}/SpecData_ZJ.cpp
${TERM_CORE_ROOT}/SpecData_NW.cpp
@ -400,18 +437,14 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/Client/Terminal_AH.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN_ZZ.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN_TY.cpp
${TERM_CORE_ROOT}/Client/Terminal_SHX.cpp
${TERM_CORE_ROOT}/Client/Terminal_JS.cpp
${TERM_CORE_ROOT}/Client/Terminal_NX.cpp
${TERM_CORE_ROOT}/Client/Terminal_SX_ZY.cpp
${TERM_CORE_ROOT}/Client/Terminal_ZJ.cpp
${TERM_CORE_ROOT}/Client/Terminal_NW.cpp
${TERM_CORE_ROOT}/Client/DataController.cpp
${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp
${TERM_CORE_ROOT}/Client/Database.cpp
# ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp
${TERM_CORE_ROOT}/Client/DataController.cpp
${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp
)
@ -436,7 +469,7 @@ target_link_libraries( # Specifies the target library.
${PROJECT_NAME}
jsoncpp
freetype
breakpad
# breakpad
# Links the target library to the log library
# included in the NDK.
avcodec avfilter avformat avutil swresample swscale x264

@ -2509,8 +2509,8 @@ void DngCreator::writeInputStream(std::vector<uint8_t>& outStream,
uint64_t uOffset = static_cast<uint32_t>(offset);
ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, "
"rowStride=%d, pixStride=%d, offset=%lld", __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, (int64_t)offset);
"rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, offset);
ByteVectorOutput out(outStream);
// std::vector<uint8_t>& out = outStream;
@ -2578,8 +2578,8 @@ void DngCreator::writeInputBuffer(std::vector<uint8_t>& outStream,
uint64_t uOffset = static_cast<uint32_t>(offset);
ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, "
"rowStride=%d, pixStride=%d, offset=%lld", __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, (int64_t)offset);
"rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, offset);
ByteVectorOutput out(outStream);
// std::vector<uint8_t>& out = outStream;

@ -14,6 +14,7 @@
#include <climits>
#include "GPIOControl.h"
#include <LogThread.h>
#ifdef _DEBUG
#include <AndroidHelper.h>
@ -23,52 +24,12 @@
#define IOT_PARAM_READ 0xAF
std::mutex GpioControl::m_locker;
std::mutex GpioControl::m_gpioLocker;
CSemaphore GpioControl::m_semaphore;
std::vector<GpioControl::ITEM> GpioControl::m_items;
std::thread GpioControl::m_thread;
bool GpioControl::m_exitSignal = false;
bool GpioControl::m_cameraPowerStatus = false;
#define ENABLE_GPIO_TRACING
#ifdef ENABLE_GPIO_TRACING
class GpioDebugLogger
{
public:
GpioDebugLogger(int cmd, int value)
{
m_startTime = GetMicroTimeStamp();
m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_val." + std::to_string(value);
CreateEmptyFile(m_path + ".enter");
}
GpioDebugLogger(int cmd)
{
m_startTime = GetMicroTimeStamp();
m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_get";
CreateEmptyFile(m_path + ".enter");
}
~GpioDebugLogger()
{
uint64_t ts = (GetMicroTimeStamp() - m_startTime);
if (ts > 1000)
{
CreateEmptyFile(m_path + ".leave." + std::to_string(ts));
}
else
{
std::string path = m_path + ".enter";
std::remove(path.c_str());
}
}
private:
std::string m_path;
uint64_t m_startTime;
};
#endif
size_t GpioControl::turnOnImpl(const IOT_PARAM& param)
{
size_t oldRef = 0;
@ -96,29 +57,23 @@ size_t GpioControl::turnOnImpl(const IOT_PARAM& param)
if (it == m_items.end())
{
oldRef = 0;
ITEM item = {param.cmd, references, now};
ITEM item = {param.cmd, references, 0, 0, now};
m_items.push_back(item);
SetCamerastatus(param.cmd, true);
}
if (oldRef == 0/* || param.cmd != CMD_SET_3V3_PWR_EN*/)
if (oldRef == 0)
{
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(param.cmd, param.value);
#endif
m_gpioLocker.lock();
fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
#ifdef OUTPUT_DBG_INFO
// int realVal = getInt(param.cmd);
// XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, param.value, param.result/*, realVal*/);
XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
#endif
}
m_gpioLocker.unlock();
#ifdef _DEBUG
ALOGI("PWR TurnOn cmd=%d,result=%d ref=%u\r\n",param.cmd, param.result, (uint32_t)references);
#endif
@ -134,30 +89,20 @@ void GpioControl::setInt(int cmd, int value)
// param.cmd = cmd;
// param.value = value;
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(cmd, value);
#endif
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if (fd > 0)
{
int res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
#ifdef OUTPUT_DBG_INFO
// int realVal = getInt(param.cmd);
// XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, value, param.result/*, realVal*/);
XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, value, param.result);
int realVal = getInt(param.cmd);
XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, param.value, param.result, realVal);
#endif
}
m_gpioLocker.unlock();
}
int GpioControl::getInt(int cmd)
{
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(cmd);
#endif
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -169,34 +114,29 @@ int GpioControl::getInt(int cmd)
ALOGI("getInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
#endif
close(fd);
m_gpioLocker.unlock();
return param.value;
}
m_gpioLocker.unlock();
return -1;
}
void GpioControl::setLong(int cmd, long value)
{
int fd = open(GPIO_NODE_MP, O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value2 = value;
// LOGE("set_long fd=%d,cmd=%d,value2=%ld\r\n",fd, param.cmd, param.value2);
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result);
close(fd);
}
m_gpioLocker.unlock();
}
long GpioControl::getLong(int cmd)
{
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_long fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -206,37 +146,32 @@ long GpioControl::getLong(int cmd)
ioctl(fd, IOT_PARAM_READ, &param);
// LOGE("get_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result);
close(fd);
m_gpioLocker.unlock();
return param.value2;
}
m_gpioLocker.unlock();
return -1;
}
void GpioControl::setString(int cmd, const std::string& value)
{
IOT_PARAM param;
int fd = open(GPIO_NODE_MP, O_RDONLY);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
param.cmd = cmd;
memset(param.str, 0, MAX_STRING_LEN);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
memcpy(param.str, value.c_str(), len);
// LOGE("set_string fd=%d,cmd=%d,str=%s\r\n",fd, param.cmd, param.str);
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result);
close(fd);
}
m_gpioLocker.unlock();
return;
}
std::string GpioControl::getString(int cmd)
{
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_string fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -246,10 +181,8 @@ std::string GpioControl::getString(int cmd)
ioctl(fd, IOT_PARAM_READ, &param);
// LOGE("get_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result);
close(fd);
m_gpioLocker.unlock();
return std::string(param.str);
}
m_gpioLocker.unlock();
return "";
}
@ -277,10 +210,6 @@ size_t GpioControl::TurnOn(const std::vector<int>& cmds)
m_locker.lock();
for (it = cmds.cbegin(); it != cmds.cend(); ++it)
{
if (*it == 0)
{
continue;
}
param.cmd = *it;
turnOnImpl(param);
}
@ -299,17 +228,14 @@ size_t GpioControl::TurnOffImmediately(int cmd)
{
if (it->cmd == cmd)
{
if (it->references > 0)
{
it->references = 0;
SetCamerastatus(cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
ref = it->references;
it->closeCmds++;
it->closeTime = ts;
break;
}
}
m_locker.unlock();
m_semaphore.release();
#ifdef _DEBUG
ALOGI("PWR TurnOffNow cmd=%d ref=%u", cmd, (uint32_t)ref);
#endif
@ -325,36 +251,22 @@ size_t GpioControl::TurnOff(int cmd, uint32_t delayedCloseTime/* = 0*/)
}
size_t ref = 0;
std::vector<ITEM>::iterator it;
if (delayedCloseTime > 0)
{
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(cmd);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
m_locker.lock();
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd)
{
if (it->references > 0)
ref = it->references;
it->closeCmds++;
if (ts > it->closeTime)
{
it->references--;
if (it->references == 0)
{
SetCamerastatus(cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
it->closeTime = ts;
}
break;
}
}
m_locker.unlock();
m_semaphore.release();
#ifdef _DEBUG
ALOGI("PWR TurnOff cmd=%d ref=%u", cmd, (uint32_t)ref);
#endif
@ -370,17 +282,6 @@ size_t GpioControl::TurnOff(const std::vector<int>& cmds, uint32_t delayedCloseT
}
std::vector<ITEM>::iterator it;
std::vector<int>::const_reverse_iterator itCmd;
if (delayedCloseTime > 0)
{
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(cmds);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
m_locker.lock();
// turnOnImpl(param);
for (itCmd = cmds.crbegin(); itCmd != cmds.crend(); ++itCmd)
@ -389,41 +290,25 @@ size_t GpioControl::TurnOff(const std::vector<int>& cmds, uint32_t delayedCloseT
{
if (it->cmd == *itCmd)
{
if (it->references > 0)
{
it->references--;
if (it->references == 0)
it->closeCmds++;
if (ts > it->closeTime)
{
SetCamerastatus(it->cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
it->closeTime = ts;
}
break;
}
}
}
m_locker.unlock();
m_semaphore.release();
return 0;
}
size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds)
{
for (auto itCmd = cmds.cbegin(); itCmd != cmds.end(); ++itCmd)
{
if (itCmd->second > 0)
{
uint32_t delayedCloseTime = itCmd->second;
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(itCmd->first);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
}
time_t ts = time(NULL);
time_t ts2;
std::vector<ITEM>::iterator it;
std::vector<std::pair<int, uint32_t> >::const_iterator itCmd;
m_locker.lock();
@ -433,14 +318,14 @@ size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds)
{
if (it->cmd == itCmd->first)
{
if (it->references > 0)
it->closeCmds++;
if (itCmd->second != 0)
{
it->references--;
if (it->references == 0)
ts2 = itCmd->second + ts;
if (ts2 > it->closeTime)
{
SetCamerastatus(it->cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
it->closeTime = ts2;
}
}
break;
@ -448,6 +333,7 @@ size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds)
}
}
m_locker.unlock();
m_semaphore.release();
return 0;
}
@ -457,7 +343,7 @@ bool GpioControl::SetCamerastatus(int cmd, bool status)
if(cmd == CMD_SET_PIC1_POWER)
m_cameraPowerStatus = status;
#endif
#ifdef USING_PTZ
#ifdef USING_PLZ
if(cmd == CMD_SET_PTZ_PWR_ENABLE)
{
m_cameraPowerStatus = status;
@ -477,14 +363,14 @@ bool GpioControl::GetSelftestStatus(time_t wait_time)
#ifdef USING_N938
cmd = CMD_SET_PIC1_POWER;
#endif
#ifdef USING_PTZ
#ifdef USING_PLZ
cmd = CMD_SET_PTZ_PWR_ENABLE;
#endif
time_t now = time(NULL);
std::vector<ITEM>::iterator it;
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd && it->references > 0 && it->openTime!=0 && (now - it->openTime >= wait_time))
if (it->cmd == cmd && it->openTime!=0 && (now - it->openTime >= wait_time))
{
return true;//自检完成
}
@ -499,20 +385,131 @@ time_t GpioControl::GetSelfTestRemain(time_t wait_time)
#ifdef USING_N938
cmd = CMD_SET_PIC1_POWER;
#endif
#ifdef USING_PTZ
#ifdef USING_PLZ
cmd = CMD_SET_PTZ_PWR_ENABLE;
#endif
time_t now = time(NULL);
std::vector<ITEM>::iterator it;
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd && it->references > 0)
if (it->cmd == cmd && it->openTime!=0 && (now - it->openTime <= wait_time))
{
time_t remaintime = (now - it->openTime);
remaintime = (wait_time > remaintime) ? (wait_time - remaintime) : 0;
time_t remaintime =wait_time - (now - it->openTime);
return remaintime;//自检完成
}
}
return 0;
}
void GpioControl::PowerControlThreadProc()
{
time_t ts = 0;
std::vector<ITEM>::iterator it;
std::vector<int> items;
time_t minDelayTime = 0;
time_t delayTime = 0;
int fd = -1;
int res = -1;
m_cameraPowerStatus = 0;
while(1)
{
// Check if there is close cmd
ts = time(NULL);
minDelayTime = std::numeric_limits<time_t>::max();
m_locker.lock();
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->references == 0 && it->closeCmds == 0 && it->closeTime == 0)
{
#ifdef _DEBUG
ALOGI("PWR THREAD cmd=%d ref=%u closeCmds=%u", it->cmd, (uint32_t)it->references, (uint32_t)it->closeCmds);
#endif
continue;
}
if (it->closeCmds > 0)
{
if (it->references <= it->closeCmds)
{
it->references = 0;
}
else
{
it->references -= it->closeCmds;
if(it->references < 0)
it->references = 0;
}
it->closeCmds = 0;
}
if (it->references == 0)
{
// Should turn off the power
if ((it->closeTime == 0) || (it->closeTime <= ts))
{
// close it directly
setInt(it->cmd, 0);
it->closeTime = 0;
it->openTime = 0;
#ifdef _DEBUG
ALOGI("PWR THREAD DO TurnOff cmd=%d", it->cmd);
#endif
SetCamerastatus(it->cmd, false);
}
else
{
// Check Time
delayTime = ts - it->closeTime;
if (delayTime < minDelayTime)
{
minDelayTime = delayTime;
}
}
}
#ifdef _DEBUG
ALOGI("PWR THREAD cmd=%d ref=%u closeCmds=%u", it->cmd, (uint32_t)it->references, (uint32_t)it->closeCmds);
#endif
}
m_locker.unlock();
if (minDelayTime < std::numeric_limits<time_t>::max())
{
m_semaphore.try_acquire_for(std::chrono::seconds(1));
}
else
{
m_semaphore.acquire();
}
if (m_exitSignal)
{
break;
}
}
}
bool GpioControl::Startup()
{
// if (m_thread.)
m_exitSignal = false;
m_thread = std::thread(PowerControlThreadProc);
#ifdef _DEBUG
pthread_t nativeHandle = m_thread.native_handle();
pthread_setname_np(nativeHandle, "gpioclose");
#endif
return true;
}
void GpioControl::Stop()
{
// Notify
m_exitSignal = true;
m_semaphore.release();
m_thread.detach();
}

@ -13,11 +13,10 @@
#include <utility>
#include <SemaphoreEx.h>
#include <LogThread.h>
#ifndef USING_N938
#ifndef USING_PTZ // MicroPhoto
#ifndef USING_PLZ // MicroPhoto
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
@ -44,7 +43,6 @@
#define CMD_SET_PWM_BEE_STATE 126 // Removed
#define CMD_SET_ALM_MODE 128 // Removed
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_12V_EN_STATE 133
#if 1
@ -52,18 +50,7 @@
#define CMD_SET_3V3_PWR_EN 132
#endif
#define CMD_GET_CAMERA_STATUS 310
#define CMD_SET_MADA_INIT_STATUS 312
#define CMD_SET_MADA_CLOSE_STATUS 313
#define CMD_SET_MADA_REG 314
#define CMD_GET_MADA_REG 315
#define CMD_SET_INIT_STATUS 401
#define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_NEW_OTG_STATE 507
#else // defined(USING_PTZ)
#else // defined(USING_PLZ)
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
@ -73,18 +60,19 @@
#define CMD_SET_12V_EN_STATE 0 // TO BE ADDED
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
// #define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
// #define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 117
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 112
#define CMD_SET_SPI_MODE 0 // TO BE ADDED
#define CMD_SET_SPI_BITS_PER_WORD 0 // TO BE ADDED
#define CMD_SET_SPI_MAXSPEEDHZ 0 // TO BE ADDED
#define CMD_SET_485_ENABLE 131
#define CMD_SET_3V3_PWR_EN 132
// #define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_485_ENABLE 512
#define CMD_SET_3V3_PWR_EN 516
#define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_SENSOR_ENABLE 504
#define CMD_SET_SENSOR_PWR_ENABLE 505
#define CMD_SET_SENSOR2_ENABLE 506
@ -111,31 +99,20 @@
#define CMD_SET_LIGHT1_RESISTOR_ENABLE 524
#define CMD_SET_100M_RESET 526
#define CMD_GET_CAMERA_STATUS 310
#define CMD_SET_MADA_MOVE_STATUS 311
#define CMD_SET_MADA_INIT_STATUS 312
#define CMD_SET_MADA_CLOSE_STATUS 313
#define CMD_SET_MADA_REG 314
#define CMD_GET_MADA_REG 315
#define CMD_SET_INIT_STATUS 401
#endif // USING_PTZ
#endif // USING_PLZ
#else // defined(USING_N938)
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_SET_485_EN1 302
#define CMD_SET_3V3_PWR_EN 132
#define CMD_SET_3V3_PWR_EN 360
#define CMD_SET_UART0_EN 361
#define CMD_SET_485_EN0 301
#define CMD_SET_NETWORK_POWER_EN 362
#define CMD_SET_485_EN3 304
#define CMD_SET_485_EN2 303
#define CMD_SET_SPI_POWER 129
// #define CMD_SET_5V_EN 363
#define CMD_SET_5V_EN 363
#define CMD_SDCARD_DETECT_EN 364
#define CMD_SET_PIC1_POWER 494
#define CMD_SET_OTHER_POWER 493
@ -156,8 +133,6 @@
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_SET_INIT_STATUS 0 // 401
#endif // USING_N938
@ -180,16 +155,19 @@ public:
{
int cmd;
size_t references;
size_t closeCmds;
time_t closeTime;
time_t openTime;
};
private:
static std::mutex m_locker;
static CSemaphore m_semaphore;
static std::vector<ITEM> m_items;
static bool m_exitSignal;
static std::thread m_thread;
static bool m_cameraPowerStatus;
static std::mutex m_gpioLocker;
protected:
static size_t turnOnImpl(const IOT_PARAM& param);
static size_t turnOffImpl(const IOT_PARAM& param);
@ -208,6 +186,11 @@ public:
static bool GetSelftestStatus(time_t wait_time);
static time_t GetSelfTestRemain(time_t wait_time);
static void PowerControlThreadProc();
static bool Startup();
static void Stop();
public:
static void setInt(int cmd, int value);
static int getInt(int cmd);
@ -232,7 +215,7 @@ public:
static void setCam3V3Enable(bool enabled, uint32_t delayedCloseTime)
{
#ifdef USING_PTZ
#ifdef USING_PLZ
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime);
#else
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime);
@ -241,7 +224,7 @@ public:
static void setCam3V3Enable(bool enabled)
{
#ifdef USING_PTZ
#ifdef USING_PLZ
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN);
#else
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN);
@ -251,7 +234,7 @@ public:
static void setBeeOn(bool z)
{
#ifndef USING_N938
#ifndef USING_PTZ
#ifndef USING_PLZ
z ? TurnOn(CMD_SET_PWM_BEE_STATE) : TurnOff(CMD_SET_PWM_BEE_STATE);
#endif
#endif
@ -259,7 +242,7 @@ public:
static void setJidianqiState(bool z) {
#ifndef USING_N938
#ifndef USING_PTZ
#ifndef USING_PLZ
z ? TurnOn(CMD_SET_ALM_MODE) : TurnOff(CMD_SET_ALM_MODE);
#endif
#endif
@ -276,7 +259,7 @@ public:
static void setRS485Enable(bool z, uint32_t delayedCloseTime)
{
#ifndef USING_N938
#ifdef USING_PTZ
#ifdef USING_PLZ
z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE, delayedCloseTime);
#else
z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE, delayedCloseTime);
@ -294,7 +277,7 @@ public:
static void setRS485Enable(bool z)
{
#ifndef USING_N938
#ifdef USING_PTZ
#ifdef USING_PLZ
z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE);
#else
z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE);
@ -314,15 +297,10 @@ public:
setInt(CMD_SET_SYSTEM_RESET, 1);
}
static void reboot2()
{
setInt(CMD_SET_SYSTEM_RESET2, 1);
}
static void setLightAdc(int i)
{
#ifndef USING_N938
#ifdef USING_PTZ
#ifdef USING_PLZ
setInt(CMD_SET_LIGHT1_RESISTOR_ENABLE, i);
#else
setInt(CMD_SET_LIGHT_ADC, i);
@ -333,7 +311,7 @@ public:
static int getLightAdc()
{
#ifndef USING_N938
#ifdef USING_PTZ
#ifdef USING_PLZ
return getInt(CMD_SET_LIGHT1_RESISTOR_ENABLE);
#else
return getInt(CMD_GET_LIGHT_ADC);
@ -364,7 +342,11 @@ public:
#endif
static int getChargingBusVoltage() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE);
#else
return -1;
#endif
}
#if 0
@ -462,12 +444,6 @@ public:
m_cmds.resize(1, cmd1);
TurnOn();
}
PowerControl(const std::vector<int>& cmds) : m_delayCloseTime(0)
{
m_cmds = cmds;
TurnOn();
}
PowerControl(int cmd1, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(1, cmd1);
@ -526,52 +502,14 @@ public:
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, int cmd7, int cmd8, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(8, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
m_cmds[6] = cmd7;
m_cmds[7] = cmd8;
TurnOn();
}
virtual ~PowerControl()
{
GpioControl::TurnOff(m_cmds, m_delayCloseTime);
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
std::string status = GetStatus();
XYLOG(XYLOG_SEVERITY_INFO, "PWR After TurnOff %s, DelayCloseTime=%u", status.c_str(), m_delayCloseTime);
#endif
}
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
std::string GetStatus()
{
std::string result;
for (auto it = m_cmds.cbegin(); it != m_cmds.cend(); ++it)
{
if (*it == 0)
{
continue;
}
result += std::to_string(*it) + "=" + std::to_string(GpioControl::getInt(*it)) + " ";
}
return result;
}
#endif // #if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
protected:
void TurnOn()
{
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
// std::string status = GetStatus();
// XYLOG(XYLOG_SEVERITY_INFO, "PWR Before TurnOn %s", status.c_str());
#endif
GpioControl::TurnOn(m_cmds);
}
@ -587,11 +525,11 @@ public:
#ifdef USING_N938
PowerControl(0, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
#ifdef USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime)
#else // USING_PTZ
#else // USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime)
#endif // USING_PTZ
#endif // USING_PLZ
#endif // USING_N938
{
}
@ -602,14 +540,14 @@ class NetCameraPowerCtrl : public PowerControl
public:
NetCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
PowerControl(CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
#ifdef USING_PLZ
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ
#else // USING_PLZ
// MicroPhoto
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PTZ
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PLZ
#endif // USING_N938
{
}
@ -620,13 +558,13 @@ class PlzCameraPowerCtrl : public PowerControl
public:
PlzCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_485_ENABLE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_12V_EN_STATE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
#else // USING_PTZ
#ifdef USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_485_ENABLE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PLZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_PLZ
#endif // USING_N938
{
}
@ -639,13 +577,11 @@ public:
#ifdef USING_N938
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
// PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_5V_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, closeDelayTime)
#else // USING_PTZ
// Micro Photo
PowerControl(CMD_SET_OTG_STATE, CMD_SET_485_EN_STATE/* Only for wp6*/, closeDelayTime)
#endif // USING_PTZ
#ifdef USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
#else // USING_PLZ
PowerControl(CMD_SET_OTG_STATE, closeDelayTime)
#endif // USING_PLZ
#endif // USING_N938
{
}
@ -658,11 +594,11 @@ public:
#ifdef USING_N938
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
#ifdef USING_PLZ
PowerControl(CMD_SET_PTZ_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ
#else // USING_PLZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_PLZ
#endif // USING_N938
{
}
@ -675,11 +611,11 @@ public:
#ifdef USING_N938
PowerControl(CMD_SET_SPI_POWER, CMD_SPI2SERIAL_POWER_EN, CMD_RS485_3V3_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN4, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
#ifdef USING_PLZ
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_ENABLE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_PTZ_PWR_ENABLE, closeDelayTime)
#else // USING_PTZ
#else // USING_PLZ
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_PLZ
#endif // USING_N938
{
}

@ -1,7 +1,6 @@
#include <jni.h>
#include <string>
#include <thread>
#include <chrono>
#include <Factory.h>
#include <Client/Terminal.h>
#include "PhoneDevice.h"
@ -20,18 +19,8 @@
#endif
#ifdef USING_BREAK_PAD
#include <client/linux/handler/exception_handler.h>
#include <client/linux/handler/minidump_descriptor.h>
#endif
#ifdef USING_MQTT
#include <mosquitto.h>
#endif
#ifdef USING_FFMPEG
extern "C" {
#include <libavformat/avformat.h>
}
#include "client/linux/handler/exception_handler.h"
#include "client/linux/handler/minidump_descriptor.h"
#endif
#include <android/native_window.h>
@ -41,7 +30,6 @@ extern "C" {
#include "GPIOControl.h"
#ifdef USING_BREAK_PAD
static google_breakpad::ExceptionHandler* g_breakpad_handler = nullptr;
bool DumpCallback(const google_breakpad::MinidumpDescriptor& descriptor,
void* context,
bool succeeded) {
@ -173,27 +161,11 @@ void Runner::RequestCapture(CTerminal* pTerminal, unsigned int channel, unsigned
pTerminal->RequestCapture(channel, preset, type, scheduleTime);
}
#include <signal.h>
#include <android/log.h>
#if 0
void sighandler(int sig) {
__android_log_print(ANDROID_LOG_ERROR, "NativeCrash", "Caught signal %d", sig);
exit(1);
}
#endif
jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
JNIEnv* env = NULL;
jint result = -1;
// 在 JNI_OnLoad 或其他初始化函数中注册
#if 0
signal(SIGSEGV, sighandler);
#endif
#if defined(JNI_VERSION_1_6)
if (result==-1 && vm->GetEnv((void**)&env, JNI_VERSION_1_6) == JNI_OK)
{
@ -219,15 +191,8 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
}
#ifdef USING_BREAK_PAD
google_breakpad::MinidumpDescriptor descriptor("/sdcard/com.xypower.mpapp/logs/");
g_breakpad_handler = new google_breakpad::ExceptionHandler(
descriptor,
nullptr, // Filter callback
DumpCallback, // Minidump callback
nullptr, // Context
true, // Install handlers
-1 // Server FD (not used)
);
google_breakpad::MinidumpDescriptor descriptor("/sdcard/Android/data/com.xypower.mpapp/files/logs/");
google_breakpad::ExceptionHandler eh(descriptor, NULL, DumpCallback, NULL, true, -1);
#endif
#if 0
@ -262,40 +227,9 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
curl_global_init(CURL_GLOBAL_ALL);
#ifdef USING_MQTT
mosquitto_lib_init();
#endif
#ifdef USING_FFMPEG
// av_register_all();
avformat_network_init();
#endif
return result;
}
JNIEXPORT void JNICALL JNI_OnUnload(JavaVM* vm, void* reserved)
{
#ifdef USING_MQTT
mosquitto_lib_cleanup();
#endif
curl_global_cleanup();
#ifdef USING_FFMPEG
// av_register_all();
avformat_network_deinit();
#endif
#ifdef USING_BREAKPAD
// Clean up breakpad handler
if (g_breakpad_handler) {
delete g_breakpad_handler;
g_breakpad_handler = nullptr;
}
#endif
}
bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread)
{
didAttachThread = false;
@ -331,10 +265,12 @@ Java_com_xypower_mpapp_MainActivity_takePhoto(
unsigned char id = (unsigned char)channel - 1;
Camera2Reader *camera = new Camera2Reader(id);
std::string pathStr = jstring2string(env, path);
std::string fileNameStr = jstring2string(env, fileName);
const char *pathStr = env->GetStringUTFChars(path, 0);
const char *fileNameStr = env->GetStringUTFChars(fileName, 0);
camera->Open(pathStr.c_str(), fileNameStr.c_str());
camera->Open(pathStr, fileNameStr);
env->ReleaseStringUTFChars(fileName, fileNameStr);
env->ReleaseStringUTFChars(path, pathStr);
camera->start();
@ -365,12 +301,13 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
jstring modelName = env->NewStringUTF(model);
env->SetObjectField(pThis, fieldId, modelName);
std::string appPathStr = jstring2string(env, appPath);
std::string ipStr = jstring2string(env, ip);
std::string cmdidStr = jstring2string(env, cmdid);
std::string simcardStr = jstring2string(env, simcard);
std::string tfCardPathStr = jstring2string(env, tfCardPath);
std::string nativeLibraryDirStr = jstring2string(env, nativeLibraryDir);
bool udpOrTcp = (networkProtocol != 0); // 0: tcp
const char *appPathStr = appPath == NULL ? NULL : env->GetStringUTFChars(appPath, 0);
const char *ipStr = ip == NULL ? NULL : env->GetStringUTFChars(ip, 0);
const char *cmdidStr = cmdid == NULL ? NULL : env->GetStringUTFChars(cmdid, 0);
const char *simcardStr = simcard == NULL ? NULL : env->GetStringUTFChars(simcard, 0);
const char *tfCardPathStr = tfCardPath == NULL ? NULL : env->GetStringUTFChars(tfCardPath, 0);
const char *nativeLibraryDirStr = nativeLibraryDir == NULL ? NULL : env->GetStringUTFChars(nativeLibraryDir, 0);
JavaVM* vm = NULL;
jint ret = env->GetJavaVM(&vm);
@ -381,14 +318,14 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
CTerminal* pTerminal = NewTerminal(protocol);
CPhoneDevice* device = new CPhoneDevice(vm, pThis, appPathStr, (uint64_t)netHandle, versionCode, nativeLibraryDirStr);
CPhoneDevice* device = new CPhoneDevice(vm, pThis, MakeString(appPathStr), (uint64_t)netHandle, versionCode, MakeString(nativeLibraryDirStr));
device->SetListener(pTerminal);
device->UpdateSignalLevel(signalLevel);
device->SetBuildTime(buildTime / 1000);
device->UpdateSimcard(simcardStr);
device->UpdateTfCardPath(tfCardPathStr);
device->UpdateSimcard(MakeString(simcardStr));
device->UpdateTfCardPath(MakeString(tfCardPathStr));
pTerminal->InitServerInfo(appPathStr, cmdidStr, ipStr, port, networkProtocol, encryptData);
pTerminal->InitServerInfo(MakeString(appPathStr), MakeString(cmdidStr), MakeString(ipStr), port, udpOrTcp, encryptData);
// pTerminal->SetPacketSize(1 * 1024); // 1K
#if defined(USING_NRSEC) && !defined(USING_NRSEC_VPN)
pTerminal->InitEncryptionInfo(simcardStr, "/dev/spidev0.0", "");
@ -400,6 +337,12 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
#ifdef _DEBUG
ALOGD("Finish Startup");
#endif
if (appPathStr != NULL) env->ReleaseStringUTFChars(appPath, appPathStr);
if (ipStr != NULL) env->ReleaseStringUTFChars(ip, ipStr);
if (cmdidStr != NULL) env->ReleaseStringUTFChars(cmdid, cmdidStr);
if (simcardStr != NULL) env->ReleaseStringUTFChars(simcard, simcardStr);
if (tfCardPathStr != NULL) env->ReleaseStringUTFChars(tfCardPath, tfCardPathStr);
if (nativeLibraryDirStr != NULL) env->ReleaseStringUTFChars(nativeLibraryDir, nativeLibraryDirStr);
if (!res)
{
@ -413,7 +356,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
JNIEnv* env,
jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jstring url, jint mediaType) {
jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jboolean photoOrVideo) {
if (channel < 0 || channel > 0xFFFF)
{
@ -425,44 +368,24 @@ Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
return JNI_FALSE;
}
uint8_t type = (uint8_t)mediaType;
unsigned char type = photoOrVideo ? 0 : 1;
// std::thread th(&Runner::RequestCapture, pTerminal, (unsigned int)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
// th.detach();
if (channel == 0x200)
if (channel < 0x100)
{
// Heartbeat
pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
}
else if (channel >= 0x100)
else
{
uint32_t packetType = channel;
packetType &= 0xFF;
pTerminal->RequestSampling(packetType, (uint64_t)scheduleTime, 0);
}
else
{
if (mediaType == XY_MEDIA_TYPE_PHOTO || mediaType == XY_MEDIA_TYPE_VIDEO)
{
pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
}
else if (mediaType == XY_MEDIA_TYPE_STREAM)
{
// virtual bool StartStream(unsigned char channel, unsigned char preset, const std::string& url, uint32_t* photoId = NULL);
// virtual bool StopStream(unsigned char channel, unsigned char preset, uint32_t photoId);
uint32_t photoId = 0;
std::string urlStr = jstring2string(env, url);
pTerminal->StartStream(channel, preset, urlStr, &photoId);
}
else if (mediaType == XY_MEDIA_TYPE_STREAM_OFF)
{
pTerminal->StopStream(channel, preset, 0);
}
}
return JNI_TRUE;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
JNIEnv* env,
@ -506,8 +429,11 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
osds[2].text = cfg.osd.rightBottom;
osds[3].text = cfg.osd.leftBottom;
std::string pathStr = jstring2string(env, path);
device->TakePhoto(photoInfo, osds, pathStr);
const char* pathStr = env->GetStringUTFChars(path, 0);
device->TakePhoto(photoInfo, osds, MakeString(pathStr));
env->ReleaseStringUTFChars(path, pathStr);
// device->TurnOffCameraPower(NULL);
// if (photoInfo.usbCamera)
@ -533,7 +459,7 @@ extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat(
JNIEnv* env,
jobject pThis,
jlong handler, jint signalLevel, jboolean scheduled) {
jlong handler, jint signalLevel) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
@ -547,25 +473,7 @@ Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat(
device->UpdateSignalLevel(signalLevel);
}
pTerminal->SendHeartbeat(scheduled != JNI_FALSE);
#ifdef OUTPUT_DBG_INFO
#if 0
std::thread t([]()
{
time_t ts = time(NULL);
int ldr = GpioControl::getLightAdc();
char buf[64] = { 0 };
snprintf(buf, sizeof(buf), "%s %d\r\n", FormatLocalDateTime(ts).c_str(), ldr);
appendFile("/sdcard/com.xypower.mpapp/tmp/ldr.txt", (const unsigned char* )buf, strlen(buf));
});
t.detach();
#endif
#endif
pTerminal->SendHeartbeat();
return JNI_TRUE;
}
@ -759,7 +667,9 @@ Java_com_xypower_mpapp_MicroPhotoService_recoganizePicture(
JNIEnv* env,
jclass cls, jstring paramPath, jstring binPath, jstring blobName8, jstring blobName16, jstring blobName32, jstring picPath) {
std::string paramPathStr = jstring2string(env, paramPath);
const char* pParamPathStr = env->GetStringUTFChars(paramPath, 0);
std::string paramPathStr = MakeString(pParamPathStr);
env->ReleaseStringUTFChars(paramPath, pParamPathStr);
const char* pBinPathStr = env->GetStringUTFChars(binPath, 0);
std::string binPathStr = MakeString(pBinPathStr);
@ -967,21 +877,19 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished(
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs(
JNIEnv* env,
jobject pThis, jlong handler, jint channelToClean) {
jobject pThis, jlong handler) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
if (channelToClean != -1)
{
pTerminal->CleanCaptureSchedules((uint32_t)((int)channelToClean));
}
bool res = pTerminal->LoadAppConfigs(true);
bool res = pTerminal->LoadAppConfigs();
return res ? JNI_TRUE : JNI_FALSE;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto(
JNIEnv* env, jclass cls, jlong handler, jstring path, jlong photoInfo) {
@ -1028,8 +936,9 @@ Java_com_xypower_mpapp_MicroPhotoService_infoLog(
return;
}
std::string str = jstring2string(env, msg);
XYLOG(XYLOG_SEVERITY_INFO, str.c_str());
const char *msgStr = env->GetStringUTFChars(msg, 0);
XYLOG(XYLOG_SEVERITY_INFO, msgStr);
env->ReleaseStringUTFChars(msg, msgStr);
}
extern "C" JNIEXPORT jboolean JNICALL
@ -1495,84 +1404,3 @@ Java_com_xypower_mpapp_MicroPhotoService_releasePowerControl(
return JNI_TRUE;
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_xypower_mpapp_MicroPhotoService_getCustomAppId(JNIEnv *env, jobject thiz) {
#ifdef USING_N938
return 2;
#elif defined(USING_PTZ)
return 1;
#else
return 0;
#endif
}
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendCameraCtrl(
JNIEnv* env, jobject pThis, jlong handle, jint channel, jint preset, jint cmd) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return;
}
pTerminal->SendCameraCtrl(channel, preset, cmd);
}
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_notifyTimeUpdated(
JNIEnv* env, jobject pThis, jlong handle) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return;
}
std::thread t([pTerminal]()
{
pTerminal->OnTimeUpdated();
});
t.detach();
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendBasicInfo(JNIEnv *env, jobject thiz, jlong handler) {
// TODO: implement sendBasicInfo()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
pTerminal->SendBasicInfo();
return JNI_TRUE;
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendWorkStatus(JNIEnv *env, jobject thiz, jlong handler) {
// TODO: implement sendWorkStatus()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
pTerminal->SendWorkStatus();
return JNI_TRUE;
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendFault(JNIEnv *env, jobject thiz, jlong handler, jstring faultCode, jstring faultInfo) {
// TODO: implement sendFault()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
std::string faultInfoStr = jstring2string(env, faultInfo);
pTerminal->SendFaultInfo(faultInfoStr);
return JNI_TRUE;
}

File diff suppressed because it is too large Load Diff

@ -31,8 +31,6 @@
#include <android/multinetwork.h>
#include "SensorsProtocol.h"
#include "PtzController.h"
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "error", __VA_ARGS__))
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, "debug", __VA_ARGS__))
@ -156,22 +154,11 @@ void MatToBitmap(JNIEnv *env, cv::Mat& mat, jobject& bitmap) {
#endif
class PowerControl;
class VendorCtrl;
class Streaming;
struct STREAMING_CONTEXT
{
std::shared_ptr<Streaming> stream;
std::shared_ptr<PowerControl> powerCtrl;
std::shared_ptr<PowerControl> ethernetPowerCtrl;
};
class CPhoneDevice : public IDevice
{
public:
friend PtzController;
struct NETWORK
{
std::string iface;
@ -234,19 +221,16 @@ public:
virtual bool InstallAPP(const std::string& path, unsigned int delayedTime);
virtual bool Reboot(int resetType, bool manually, const std::string& reason, uint32_t timeout = 1000);
virtual bool EnableGPS(bool enabled);
virtual int QueryBattaryVoltage(int timesForAvg, int* isCharging);
virtual uint32_t QueryLdr();
virtual float QueryBattaryVoltage(int timesForAvg, bool* isCharging);
virtual bool RequestPosition();
virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout, time_t tsForNextPhoto);
virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path);
virtual bool CloseCamera();
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times = 1);
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times = 0);
virtual bool UnregisterTimer(timer_uid_t uid);
virtual uint64_t RequestWakelock(uint64_t timeout);
virtual bool ReleaseWakelock(uint64_t wakelock);
virtual std::string GetVersion() const;
virtual int GetWData(WEATHER_INFO *weatherInfo, D_SENSOR_PARAM *sensorParam);
virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, D_SENSOR_PARAM *sensorParam);
virtual bool OpenSensors(int sensortype);
@ -280,19 +264,18 @@ public:
net_handle_t GetEthnetHandle() const;
VendorCtrl* MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle, bool syncTime);
protected:
std::string GetFileName() const;
std::string GetVersion() const;
bool SendBroadcastMessage(std::string action, int value);
// bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
bool TakePhotoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool TakeVideoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool StartPushStreaming(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat, time_t takingTime);
bool TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool StartPushStreaming(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat);
inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const
{
if (m_listener != NULL)
@ -334,7 +317,7 @@ protected:
return false;
}
void QueryFlowInfo(std::map<std::string, std::string>& powerInfo);
void QueryPowerInfo(std::map<std::string, std::string>& powerInfo);
std::string QueryCpuTemperature();
bool OnImageReady(cv::Mat mat);
@ -361,10 +344,6 @@ protected:
void SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway);
void ConvertDngToPng(const std::string& dngPath, const std::string& pngPath);
void SetStaticIp();
void ShutdownEthernet();
int ExecuteCommand(const std::string& cmd);
static std::string BuildCaptureResultInfo(ACameraMetadata* result, uint32_t ldr, uint32_t duration, bool burst);
protected:
@ -388,7 +367,7 @@ protected:
jmethodID mRequestWakelockMid;
jmethodID mReleaseWakelockMid;
jmethodID mGetFlowInfoMid;
jmethodID mGetSystemInfoMid;
jmethodID mRebootMid;
jmethodID mInstallAppMid;
@ -396,7 +375,6 @@ protected:
jmethodID mRequestPositionMid;
jmethodID mExecHdrplusMid;
jmethodID mSetStaticIpMid;
jmethodID mExecuteCmdMid;
jmethodID mConvertDngToPngMid;
@ -424,8 +402,6 @@ protected:
std::thread m_threadClose;
std::shared_ptr<PowerControl> m_powerCtrlPtr;
uint32_t m_ethernetFailures;
int m_signalLevel;
time_t m_signalLevelUpdateTime;
@ -444,10 +420,6 @@ protected:
std::atomic<bool> m_collecting;
unsigned long long localDelayTime;
std::map<uint8_t, STREAMING_CONTEXT > m_streamings;
PtzController* m_ptzController;
};

@ -1,462 +0,0 @@
//
// Created by Matthew on 2025/3/5.
//
#include "PtzController.h"
#include "SensorsProtocol.h"
#include "GPIOControl.h"
#include "PhoneDevice.h"
#include "time.h"
#include <memory>
PtzController::PtzController(CPhoneDevice* pPhoneDevice) : m_pPhoneDevice(pPhoneDevice)
{
m_exit = false;
}
void PtzController::Startup()
{
m_thread = std::thread(PtzThreadProc, this);
}
void PtzController::PtzThreadProc(PtzController* pThis)
{
pThis->PtzProc();
}
void PtzController::AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr)
{
SERIAL_CMD cmd = { 0 };
cmd.channel = channel;
cmd.preset = preset;
cmd.cmdidx = cmdidx;
cmd.bImageSize = bImageSize;
strcpy(cmd.serfile, serfile);
cmd.baud = baud;
cmd.addr = addr;
cmd.ts = time(NULL);
m_locker.lock();
m_cmds.push_back(cmd);
m_locker.unlock();
m_sem.release();
}
void PtzController::AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds)
{
IDevice::SerialsPhotoParam param = { "", 0, 0 };
m_pPhoneDevice->GetPhotoSerialsParamCb(param);
SERIAL_CMD cmdPreset = { 0 };
time_t ts = time(NULL);
#if 1
// if (photoInfo.preset != 0 && photoInfo.preset != 0xFF)
{
cmdPreset.ts = photoInfo.selfTestingTime;
cmdPreset.delayTime = photoInfo.closeDelayTime;
cmdPreset.channel = photoInfo.channel;
cmdPreset.channel = photoInfo.preset;
cmdPreset.cmdidx = PHOTO_OPEN_POWER;
strcpy(cmdPreset.serfile, param.serfile);
cmdPreset.baud = param.baud;
cmdPreset.addr = param.addr;
}
#endif
SERIAL_CMD cmd = { 0 };
cmd.ts = ts;
cmd.delayTime = photoInfo.closeDelayTime;
cmd.channel = photoInfo.channel;
cmd.preset = photoInfo.preset;
cmd.cmdidx = Take_Photo;
cmd.bImageSize = photoInfo.resolution;
strcpy(cmd.serfile, param.serfile);
cmd.baud = param.baud;
cmd.addr = param.addr;
PtzPhotoParams* ppp = new PtzPhotoParams(photoInfo, path, osds);
cmd.photoParams.reset(ppp);
// cmd.delayTime;
// uint8_t bImageSize;
// char serfile[128];
// uint32_t baud;
// int addr;
m_locker.lock();
#if 1
if (cmdPreset.cmdidx != 0)
{
m_cmds.push_back(cmdPreset);
}
#endif
m_cmds.push_back(cmd);
m_locker.unlock();
m_sem.release();
m_sem.release();
}
void PtzController::ExitAndWait()
{
m_exit = true;
m_sem.release();
if (m_thread.joinable())
{
m_thread.join();
}
}
void PtzController::PtzProc()
{
PROC_PTZ_STATE state = PTZS_POWER_OFF;
SERIAL_CMD cmd;
PTZ_STATE ptz_state;
bool hasCmd = false;
int i=0;
int closecmd=0;
std::shared_ptr<PowerControl> powerCtrl;
time_t selfTestingStartTime = 0;
time_t selfTestingWaitTime = 0;
time_t PTZ_preset_start_time = 0;
time_t PTZ_preset_wait_time = 0;
time_t close_delay_time = CAMERA_CLOSE_DELAYTIME;
time_t start_delay_time = 0;
time_t auto_delay_time = 0;
time_t auto_wait_time = WAIT_TIME_AUTO_CLOSE;
time_t photo_move_preset_time = 0;
int iwaitime = 0;
while(true)
{
m_sem.acquire();
if (m_exit)
{
break;
}
hasCmd = false;
m_locker.lock();
for (auto it = m_cmds.begin(); it != m_cmds.end(); ++it)
{
if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state))
{
// find first non-taking-photo cmd
if (it->cmdidx != Take_Photo)
{
cmd = *it;
m_cmds.erase(it);
hasCmd = true;
break;
}
}
else
{
cmd = *it;
m_cmds.erase(it);
hasCmd = true;
break;
}
}
m_locker.unlock();
if (!hasCmd)
{
if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state))
{
time_t timeout = time(NULL) - selfTestingStartTime;
if(timeout < 0)
selfTestingStartTime = time(NULL);
if (timeout >= selfTestingWaitTime)
{
XYLOG(XYLOG_SEVERITY_INFO, "超时(%u秒)未收到云台自检结束应答,状态改为空闲!", (uint32_t)timeout);
state = PTZS_IDLE;
m_sem.release();
continue;
}
else
{
//if(timeout >= CAMERA_SELF_TEST_TIME)
{
#ifndef NDEBUG
if (timeout == 1 || ((timeout % 10) == 0))
#endif
{
XYLOG(XYLOG_SEVERITY_INFO, "开始查询云台自检状态timeout=%u秒", (uint32_t)timeout);
}
if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr))
{
if(0 == ptz_state.ptz_status)
{
XYLOG(XYLOG_SEVERITY_INFO, "收到云台自检结束应答状态改为空闲timeout=%u秒", (uint32_t)timeout);
state = PTZS_IDLE;
m_sem.release();
continue;
}
}
}
}
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
m_sem.release();
continue;
}
if(0 == start_delay_time)
{
if(0 == iwaitime)
{
auto_delay_time = time(NULL);
iwaitime += 1;
m_sem.release();
continue;
}
else
{
if(time(NULL) - auto_delay_time < 0)
{
auto_delay_time = time(NULL);
}
if(time(NULL) - auto_delay_time >= auto_wait_time)
{
iwaitime = 0;
XYLOG(XYLOG_SEVERITY_INFO, "摄像机自动上电延时时间超过%u秒准备关闭摄像机", (uint32_t)auto_wait_time);
}
else
{
m_sem.release();
continue;
}
}
}
else
{
if(time(NULL) - start_delay_time < 0)
{/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间不会关摄像机电源*/
start_delay_time = time(NULL);
}
if(time(NULL) - start_delay_time >= close_delay_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机空闲时间超过%u秒准备关闭摄像机", (uint32_t)close_delay_time);
}
else
{
m_sem.release();
continue;
}
}
if (state == PTZS_POWER_OFF)
{
closecmd = 0;
XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发,摄像机本来就处于关机状态!");
// Do Nothing
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发通知云台准备关机state=%d", state);
for(i=0; i<3; i++)
{
if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr))
break;
}
powerCtrl.reset();
closecmd = 0;
state = PTZS_POWER_OFF;
XYLOG(XYLOG_SEVERITY_INFO, "自动触发关闭云台电源state=%d", state);
}
start_delay_time = 0;
continue;
}
switch (cmd.cmdidx)
{
case Take_Photo:
{
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
//powerCtrl = std::make_shared<PlzCameraPowerCtrl>(cmd.photoParams->mPhotoInfo.closeDelayTime);
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = cmd.photoParams->mPhotoInfo.selfTestingTime;
state = PTZS_PHOTO_SELF_TESTING;
XYLOG(XYLOG_SEVERITY_INFO, "1、收到拍照指令摄像机从关机状态改为自检状态");
m_locker.lock();
m_cmds.insert(m_cmds.begin(), cmd);
m_locker.unlock();
m_sem.release();
continue;
}
}
if(cmd.photoParams->mPhotoInfo.scheduleTime == 0)
{
if(1 == closecmd)
{
XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令但同时后续收到关机指令等待拍完照片再关机。state=%d", state);
}
else
{
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令state=%d", state);
}
}
else
XYLOG(XYLOG_SEVERITY_INFO, "2、收到自动拍照指令state=%d", state);
state = PTZS_TAKING_PHOTO;
if (cmd.preset != 0 && cmd.preset != 0xFF)
{
CameraPhotoCmd(0, cmd.channel, MOVE_PRESETNO, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
#if 0
if(START_ONCE_SELF == cmd.preset)
{
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "拍照调用200号预置点指令摄像机启动一次性自检从拍照状态改为自检状态取消拍照动作设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
break;
}
#endif
PTZ_preset_start_time = time(NULL);
if(START_ONCE_SELF == cmd.preset)
PTZ_preset_wait_time = CAMERA_SELF_TEST_TIME;
else
PTZ_preset_wait_time = MOVE_PRESET_WAIT_TIME;
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前开始调用预置点%ustate=%d", (uint32_t)cmd.preset, state);
for(;;)
{
if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr))
{
if(0 == ptz_state.ptz_status)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u收到移动结束应答移动时长=%d秒 state=%d", (uint32_t)cmd.preset, (uint32_t)(time(NULL)-PTZ_preset_start_time), state);
break;
}
}
if(time(NULL) - PTZ_preset_start_time < 0)
{/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间等待摄像机到达预置点*/
PTZ_preset_start_time = time(NULL);
}
if(time(NULL) - PTZ_preset_start_time >= PTZ_preset_wait_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u摄像机在%u秒内未收到调用预置点结束应答state=%d", (uint32_t)cmd.preset, (uint32_t)PTZ_preset_wait_time, state);
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(10));
photo_move_preset_time = time(NULL);
}
}
if(cmd.photoParams->mPhotoInfo.mediaType == 1)
m_pPhoneDevice->TakeVideoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
else if ((cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM || cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF))
{
m_pPhoneDevice->StartPushStreaming(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
}
else
m_pPhoneDevice->TakePhotoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
state = PTZS_IDLE;
}
break;
case PHOTO_OPEN_POWER:
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_PHOTO_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机摄像机处于state=%d", state);
}
break;
case OPEN_TOTAL:
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令摄像机处于state=%d", state);
}
closecmd = 0;
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "收到手动打开摄像机指令刷新关机计时初始值state=%d", state);
break;
case CLOSE_TOTAL:
if (state == PTZS_POWER_OFF)
{
closecmd = 0;
XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令,摄像机本来就处于关机状态!");
// Do Nothing
}
else if(PTZS_PHOTO_SELF_TESTING == state)
{
closecmd = 1;
XYLOG(XYLOG_SEVERITY_INFO, "在拍照自检过程中收到关机指令取消延时关机转到自动关机处理state=%d", state);
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令通知云台准备关机state=%d", state);
for(i=0; i<3; i++)
{
if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr))
break;
}
closecmd = 0;
powerCtrl.reset();
state = PTZS_POWER_OFF;
XYLOG(XYLOG_SEVERITY_INFO, "关闭云台电源state=%d", state);
}
start_delay_time = 0;
break;
default:
{
if (state == PTZS_POWER_OFF)
{
XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令,摄像机处于关机状态,无法执行!");
CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
break;
}
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令刷新关机计时初始值state=%d", state);
if(cmd.ts <= photo_move_preset_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "丢弃拍照调预置点期间收到的控制云台指令,指令时间" FMT_TIME_T ",拍照时间" FMT_TIME_T "state=%d", cmd.ts, photo_move_preset_time, state);
}
else
{
if((MOVE_PRESETNO == cmd.cmdidx) && (START_ONCE_SELF == cmd.preset))
{
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到调用200号预置点指令摄像机启动一次性自检从当前状态改为自检状态设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
}
}
break;
}
}
}

@ -1,100 +0,0 @@
//
// Created by Matthew on 2025/3/5.
//
#ifndef MICROPHOTO_PTZCONTROLLER_H
#define MICROPHOTO_PTZCONTROLLER_H
#include <Buffer.h>
#include <thread>
#include <vector>
#include <memory>
#include <string>
#include <mutex>
#include <SemaphoreEx.h>
#include <Client/Device.h>
enum PROC_PTZ_STATE
{
PTZS_POWER_OFF = 0,
PTZS_IDLE = 1,
PTZS_SELF_TESTING = 2,
PTZS_MOVING = 3,
PTZS_TAKING_PHOTO = 4,
PTZS_PHOTO_SELF_TESTING = 5,
};
#define CAMERA_SELF_TEST_TIME 150 /* Camera self-test time (excluding PTZ self-test)*/
#define MOVE_PRESET_WAIT_TIME 20 /* Waiting for the maximum time for the PTZ to move to the preset position*/
#define CAMERA_CLOSE_DELAYTIME 360 /* Auto Power-Off Timer Setting After Manual Power-On (for Camera)*/
#define PHOTO_OPEN_POWER 16000
#define WAIT_TIME_AUTO_CLOSE 2 /* In order to automatically capture multiple preset point images at the same time and prevent the camera from self checking every time it takes a picture.*/
class PtzPhotoParams
{
public:
PtzPhotoParams(const IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds) :
mPhotoInfo(photoInfo), mPath(path), mOsds(osds)
{
}
~PtzPhotoParams()
{
}
IDevice::PHOTO_INFO mPhotoInfo;
std::string mPath;
std::vector<IDevice::OSD_INFO> mOsds;
};
struct SERIAL_CMD
{
uint8_t channel;
uint8_t preset;
time_t ts;
int cmdidx;
uint32_t delayTime;
uint8_t bImageSize;
char serfile[128];
uint32_t baud;
int addr;
std::shared_ptr<PtzPhotoParams> photoParams;
};
class CPhoneDevice;
class PtzController
{
public:
PtzController(CPhoneDevice* pPhoneDevice);
void Startup();
// ();
void AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr);
void AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds);
void ExitAndWait();
protected:
static void PtzThreadProc(PtzController* pThis);
void PtzProc();
protected:
protected:
std::mutex m_locker;
std::vector<SERIAL_CMD> m_cmds;
CSemaphore m_sem;
bool m_exit;
std::thread m_thread;
CPhoneDevice* m_pPhoneDevice;
};
#endif //MICROPHOTO_PTZCONTROLLER_H

File diff suppressed because it is too large Load Diff

@ -27,7 +27,6 @@
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define MAX_FIELDS_NUM 20 /* BD_NMEA0183单组字符串数据内含数据最大数量*/
#define MAX_SERIAL_DEV_NUM 25 /* 最大接串口传感器数量*/
#define MAX_SERIAL_PORT_NUM 5
#define MAX_DEV_VALUE_NUM 12 /* 一台装置最大的采样值数量*/
@ -40,7 +39,6 @@
#define PELCO_D_PROTOCOL 6 /* 摄像机Pelco_D协议序号*/
#define SERIALCAMERA_PROTOCOL 8 /* 串口摄像机协议序号*/
#define MUTIWEATHER_PROTOCOL 9 /*多合一气象*/
#define NMEA0183_PROTOCOL 10 /* 单一北斗NMEA0183标准协议*/
#define RESERVE2_PROTOCOL 17 /* 备用2协议序号*/
#define RESERVE4_PROTOCOL 19 /* 备用4协议序号*/
#define RESERVE5_PROTOCOL 20 /* 备用5协议序号*/
@ -108,10 +106,10 @@
#define D_OPEN_MODULE_POWER 0x0009000C /* 打开机芯电源(1 有效)*/
/* 摄像机下发命令宏定义*/
#define TAKE_PHOTO 20000 /* 拍照*/
#define SET_BAUD 10000 /* 设置球机波特率*/
#define STOP_CMD 10005 /* 取消或停止指令*/
#define AUTO_SCAN 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define Take_Photo 0 /* 拍照*/
#define Stop_Baud 10000 /* 设置球机波特率*/
#define Stop_Cmd 10005 /* 取消或停止指令*/
#define Auto_Scan 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define IRIS_CLOSE 10007 /* 光圈缩小(1 有效)*/
#define IRIS_OPEN 10008 /* 光圈放大(1 有效)*/
#define FOCUS_NEAR 10009 /* 近距离聚焦(1 有效)*/
@ -126,14 +124,9 @@
#define SAVE_PRESETNO 10018 // 设置预置点
#define OPEN_TOTAL 10019 /* 打开总电源(1 有效)*/
#define OPEN_MODULE_POWER 10020 /* 打开机芯电源(1 有效)*/
#define NOTIFY_PTZ_CLOSE 10021 // 通知云台关闭
#define QUERY_PTZ_STATE 10022 // 查询云台状态
#define CLOSE_TOTAL 10040 /* 关闭总电源*/
#define SPEED_DOME_CAMERA 0 /* 球机摄像机*/
#define SERIAL_CAMERA 2 /* 串口摄像机a*/
#define START_ONCE_SELF 200 /* 一次性自检需要的调用的预置点200*/
#define COLLECT_DATA 0 /* 调试使用*/
#define HexCharToInt( c ) (((c) >= '0') && ((c) <= '9') ? (c) - '0' : ((c) >= 'a') && ((c) <= 'f') ? (c) - 'a' + 10 :((c) >= 'A') && ((c) <= 'F') ? (c) - 'A' + 10 : 0 )
@ -221,48 +214,6 @@ typedef struct
uint8_t Phase; /* 传感器所安装相别指拉力和倾角11表示A1....*/
} SERIAL_PARAM;
// 云台状态数据
typedef struct
{
uint8_t ptz_process; /* 云台所处过程(1:自检状态;2:调用预置点;3:一般状态;)*/
uint8_t ptz_status; /* 云台当前状态值(0:停止;1:运动;2:机芯未上电;其他:其他错误*/
int presetno; /* 云台所处预置点值*/
float x_coordinate; /* 云台所处位置水平方向坐标*/
float y_coordinate; /* 云台所处位置垂直方向坐标*/
} PTZ_STATE;
/*
$--RMC IDRMC --
2 UTCtime hhmmss.ss UTC
3 status
V=
A=
4 lat ddmm.mmmmm 2
5 uLat N-S-
6 lon dddmm.mmmm
m
3
7 uLon E-W-西
8 spd
9 cog
10 date ddmmyy dd mm yy
11 mv
12 mvE E-W-西
13 mode [1]
14 navStatus V
NMEA 4.1
15 CS 16 $*$**/
// 北斗卫星数据
typedef struct
{
struct tm UTC_time; /* UTC时间*/
int ms_time; /* 毫秒*/
double lat; /* 纬度,原值(前 2 字符表示度,后面的字符表示分)转换后为° */
char uLat; /* 纬度方向N-北S-南*/
double lon; /* 经度,原值(前 3 字符表示度,后面的字符表示分)转换后为°*/
char uLon; /* 经度'E'-东,'W'-西*/
char status; /* 'A'=数据有效 其他字符表示数据无效*/
} BD_GNSS_DATA;
typedef struct
{
int m_iRevStatus; /* */
@ -292,19 +243,8 @@ typedef struct
使*/
PHOTO_DEF image; /* 临时存储图片数据*/
int64_t FirstCmdTimeCnt; /* 串口读取数据起始时间*/
PTZ_STATE ptz_state;
int sendptzstatecmd; // 查询命令次数控制
BD_GNSS_DATA bd_data;
} SIO_PARAM_SERIAL_DEF;
typedef const struct
{
//char *account; // 命令说明
char *cmd_name; // 命令名称
int (*recv_process)(SIO_PARAM_SERIAL_DEF *); /* urc数据处理*/
}BD_NMEA0183_PROC_FUNC;
//串口相关装置所有参数集中定义
typedef struct
{
@ -392,11 +332,11 @@ void Gm_OpenSerialPort(int devidx);
// 关闭串口通讯
void Gm_CloseSerialPort();
void DBG_LOG(int commid, char flag, const char* format, ...);
int SaveLogTofile(int commid, const char *szbuf);
void DebugLog(int commid, char *szbuf, char flag);
int SaveLogTofile(int commid, char *szbuf);
// 功能说明:串口发送数据 返回实际发送的字节数
int GM_SerialComSend(const unsigned char * cSendBuf, size_t nSendLen, int commid);
void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, const char *filedir,const char *log);
void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, char *filedir,const char *log);
// 启动串口通讯
void GM_StartSerialComm();
// 启动使用串口拍照
@ -532,26 +472,6 @@ int GM_IsCloseCamera(SIO_PARAM_SERIAL_DEF *pPortParam);
int GM_CameraSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam);
int QueryPtzState(PTZ_STATE *ptz_state, int cmdidx, const char *serfile, unsigned int baud, int addr);
void MakePtzStateQueryCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx);
int Query_BDGNSS_Data(BD_GNSS_DATA *BD_data, int samptime, const char *serfile, unsigned int baud);
int GM_BdSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam);
void GM_BdSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam);
void BdRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, u_char *buf, int len);
unsigned char BDXorCheck(unsigned char *msg, int len);
void BD_NMEA0183_PortDataProcess(SIO_PARAM_SERIAL_DEF *curserial);
char** BD_NMEA0183_SplitString(char *str, int *total_fields);
int BD_get_BDRMC_data(SIO_PARAM_SERIAL_DEF *curserial);
#endif // __SENSOR_PROTOCOL_H__

File diff suppressed because it is too large Load Diff

@ -1,724 +0,0 @@
/* Copyright Statement:
*
* This software/firmware and related documentation ("MediaTek Software") are
* protected under relevant copyright laws. The information contained herein is
* confidential and proprietary to MediaTek Inc. and/or its licensors. Without
* the prior written permission of MediaTek inc. and/or its licensors, any
* reproduction, modification, use or disclosure of MediaTek Software, and
* information contained herein, in whole or in part, shall be strictly
* prohibited.
*
* MediaTek Inc. (C) 2010. All rights reserved.
*
* BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
* THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
* RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
* ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
* WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
* RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
* INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
* TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
* RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
* OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
* SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
* RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
* STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
* ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
* RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
* MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
* CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
*
* The following software/firmware and/or related documentation ("MediaTek
* Software") have been modified by MediaTek Inc. All revisions are subject to
* any receiver's applicable license agreements with MediaTek Inc.
*/
#ifndef _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_
#define _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_section {
MTK_HAL_REQUEST = 0xC000, // MTK HAL internal metadata become from 0xC000 0000
MTK_P1NODE,
MTK_P2NODE,
MTK_3A_TUNINING,
MTK_3A_EXIF,
MTK_MF_EXIF,
MTK_EIS,
MTK_STEREO,
MTK_FRAMESYNC,
MTK_VHDR,
MTK_PIPELINE,
MTK_NR,
MTK_PLUGIN,
MTK_DUALZOOM,
MTK_FEATUREPIPE,
MTK_POSTPROC,
MTK_FEATURE,
MTK_FSC,
} mtk_platform_metadata_section_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_section_start {
MTK_HAL_REQUEST_START = MTK_HAL_REQUEST << 16,
MTK_P1NODE_START = MTK_P1NODE << 16,
MTK_P2NODE_START = MTK_P2NODE << 16,
MTK_3A_TUNINING_START = MTK_3A_TUNINING << 16,
MTK_3A_EXIF_START = MTK_3A_EXIF << 16,
MTK_EIS_START = MTK_EIS << 16,
MTK_STEREO_START = MTK_STEREO << 16,
MTK_FRAMESYNC_START = MTK_FRAMESYNC << 16,
MTK_VHDR_START = MTK_VHDR << 16,
MTK_PIPELINE_START = MTK_PIPELINE << 16,
MTK_NR_START = MTK_NR << 16,
MTK_PLUGIN_START = MTK_PLUGIN << 16,
MTK_DUALZOOM_START = MTK_DUALZOOM << 16,
MTK_FEATUREPIPE_START = MTK_FEATUREPIPE << 16,
MTK_POSTPROC_START = MTK_POSTPROC << 16,
MTK_FEATURE_START = MTK_FEATURE << 16,
MTK_FSC_START = MTK_FSC << 16,
} mtk_platform_metadata_section_start_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_tag {
MTK_HAL_REQUEST_REQUIRE_EXIF = MTK_HAL_REQUEST_START, //MUINT8
MTK_HAL_REQUEST_DUMP_EXIF, //MUINT8
MTK_HAL_REQUEST_REPEAT, //MUINT8
MTK_HAL_REQUEST_DUMMY, //MUINT8
MTK_HAL_REQUEST_SENSOR_SIZE, //MSize
MTK_HAL_REQUEST_SENSOR_ID, //MINT32
MTK_HAL_REQUEST_DEVICE_ID, //MINT32
MTK_HAL_REQUEST_HIGH_QUALITY_CAP, //MUINT8
MTK_HAL_REQUEST_ISO_SPEED, //MINT32
MTK_HAL_REQUEST_BRIGHTNESS_MODE, //MINT32
MTK_HAL_REQUEST_CONTRAST_MODE, //MINT32
MTK_HAL_REQUEST_HUE_MODE, //MINT32
MTK_HAL_REQUEST_SATURATION_MODE, //MINT32
MTK_HAL_REQUEST_EDGE_MODE, //MINT32
MTK_HAL_REQUEST_PASS1_DISABLE, //MINT32
MTK_HAL_REQUEST_ERROR_FRAME, // used for error handling //MUINT8
MTK_HAL_REQUEST_PRECAPTURE_START, // 4cell //MUINT8
MTK_HAL_REQUEST_AF_TRIGGER_START, // 4cell //MUINT8
MTK_HAL_REQUEST_IMG_IMGO_FORMAT, //MINT32
MTK_HAL_REQUEST_IMG_RRZO_FORMAT, //MINT32
MTK_HAL_REQUEST_INDEX, //MINT32
MTK_HAL_REQUEST_COUNT, //MINT32
MTK_HAL_REQUEST_SMVR_FPS, //MUINT8 // 0: NOT batch request
MTK_HAL_REQUEST_REMOSAIC_ENABLE, //MUINT8 // 0: preview mode 1: capture mode
MTK_HAL_REQUEST_INDEX_BSS, //MINT32
MTK_HAL_REQUEST_ZSD_CAPTURE_INTENT, //MUINT8
MTK_HAL_REQUEST_REAL_CAPTURE_SIZE, //MSize
MTK_HAL_REQUEST_VIDEO_SIZE, //MSize
MTK_HAL_REQUEST_RAW_IMAGE_INFO, //MINT32 // index[0]: raw fmt, index[1]: raw stride, index[2]: raw size(width), index[3]: raw size(height)
MTK_HAL_REQUEST_ISP_PIPELINE_MODE, //MINT32
MTK_P1NODE_SCALAR_CROP_REGION = MTK_P1NODE_START, //MRect
MTK_P1NODE_BIN_CROP_REGION, //MRect
MTK_P1NODE_DMA_CROP_REGION, //MRect
MTK_P1NODE_BIN_SIZE, //MSize
MTK_P1NODE_RESIZER_SIZE, //MSize
MTK_P1NODE_RESIZER_SET_SIZE, //MSize
MTK_P1NODE_CTRL_RESIZE_FLUSH, //MBOOL
MTK_P1NODE_CTRL_READOUT_FLUSH, //MBOOL
MTK_P1NODE_CTRL_RECONFIG_SENSOR_SETTING, //MBOOL
MTK_P1NODE_PROCESSOR_MAGICNUM, //MINT32
MTK_P1NODE_MIN_FRM_DURATION, //MINT64
MTK_P1NODE_RAW_TYPE, //MINT32
MTK_P1NODE_SENSOR_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER1_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER2_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER1_SIZE, //MSize
MTK_P1NODE_SENSOR_MODE, //MINT32
MTK_P1NODE_SENSOR_VHDR_MODE, //MINT32
MTK_P1NODE_METADATA_TAG_INDEX, //MINT32
MTK_P1NODE_RSS_SIZE, //MSize
MTK_P1NODE_SENSOR_STATUS, //MINT32
MTK_P1NODE_SENSOR_RAW_ORDER, //MINT32
MTK_P1NODE_TWIN_SWITCH, //MINT32
MTK_P1NODE_TWIN_STATUS, //MINT32
MTK_P1NODE_RESIZE_QUALITY_SWITCH, //MINT32
MTK_P1NODE_RESIZE_QUALITY_STATUS, //MINT32
MTK_P1NODE_RESIZE_QUALITY_LEVEL, //MINT32
MTK_P1NODE_RESIZE_QUALITY_SWITCHING, //MBOOL
MTK_P1NODE_RESUME_SHUTTER_TIME_US, //MINT32
MTK_P1NODE_FRAME_START_TIMESTAMP, //MINT64
MTK_P1NODE_FRAME_START_TIMESTAMP_BOOT, //MINT64
MTK_P1NODE_REQUEST_PROCESSED_WITHOUT_WB, //MBOOL
MTK_P1NODE_ISNEED_GMV, //MBOOL
MTK_P2NODE_HIGH_SPEED_VDO_FPS = MTK_P2NODE_START, //MINT32
MTK_P2NODE_HIGH_SPEED_VDO_SIZE, //MSize
MTK_P2NODE_CTRL_CALTM_ENABLE, //MBOOL
MTK_P2NODE_FD_CROP_REGION, //MRect
MTK_P2NODE_CROP_REGION, //MRect // for removing black edge
MTK_P2NODE_DSDN_ENABLE, //MBOOL // for DSDN on/off controled by Policy
MTK_P2NODE_SENSOR_CROP_REGION, //MRect
MTK_3A_AE_HIGH_ISO_BINNING, //MBOOL // for 3HDR high iso binning mode
MTK_SENSOR_SCALER_CROP_REGION, //MRect
MTK_PROCESSOR_CAMINFO = MTK_3A_TUNINING_START, //IMemory
MTK_ISP_ATMS_MAPPING_INFO, //IMemory
MTK_3A_ISP_PROFILE, //MUINT8
MTK_3A_ISP_P1_PROFILE, //MUINT8
MTK_CAMINFO_LCSOUT_INFO, //IMemory
MTK_3A_ISP_BYPASS_LCE, //MBOOL
MTK_3A_ISP_DISABLE_NR, //MBOOL
MTK_3A_ISP_NR3D_SW_PARAMS, //MINT32[14] //GMVX, GMVY, confX, confY, MAX_GMV, frameReset, GMV_Status,ISO_cutoff
MTK_3A_ISP_NR3D_HW_PARAMS, //IMemory
MTK_3A_ISP_LCE_GAIN, //MINT32, bits[0:15]: LCE gain, bits[16:31]: LCE gain confidence ratio (0-100)
MTK_3A_ISP_FUS_NUM, //MINT32
MTK_3A_AE_CAP_PARAM, //IMemory
MTK_3A_AE_CAP_SINGLE_FRAME_HDR, //MUINT8
MTK_3A_AE_BV_TRIGGER, //MBOOL
MTK_3A_AF_LENS_POSITION, //MINT32
MTK_3A_FLICKER_RESULT, //MINT32
MTK_3A_DUMMY_BEFORE_REQUEST_FRAME, //MBOOL // Dummy frame before capture, only for capture intent, preview don't use
MTK_3A_DUMMY_AFTER_REQUEST_FRAME, //MBOOL // Dummy frame after capture, only for capture intent, preview don't use
MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MAX, //MINT32
MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MIN, //MINT32
MTK_3A_MANUAL_AWB_COLORTEMPERATURE, //MINT32
MTK_3A_HDR_MODE, //MUINT8
MTK_3A_AE_HDR_MIXED_ISO, //MUINT32
MTK_3A_AE_ZSL_STABLE, //MINT32 ( MBOOL )
MTK_3A_PGN_ENABLE, //MUINT8
MTK_3A_SKIP_HIGH_QUALITY_CAPTURE, //MUINT8
MTK_3A_AI_SHUTTER, //MBOOL
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, //MINT32
MTK_3A_FEATURE_AE_TARGET_MODE, //MINT32
MTK_3A_OPEN_ID, //MINT32
MTK_LSC_TBL_DATA, //IMemory
MTK_LSC_TSF_DATA, //IMemory
MTK_LSC_TSF_DUMP_NO, //IMemory
MTK_ISP_P2_ORIGINAL_SIZE, //MSize
MTK_ISP_P2_CROP_REGION, //MRect
MTK_ISP_P2_RESIZER_SIZE, //MSize
MTK_ISP_P2_IN_IMG_FMT, //MINT32, 0 or not exist: RAW->YUV, 1: YUV->YUV
MTK_ISP_P2_TUNING_UPDATE_MODE, //MUINT8, [0 or not exist]: as default; [1]: keep existed parameters but some parts will be updated; [2]: keep all existed parameters (force mode) [3] LPCNR Pass1 [4] LPCNR Pass2
MTK_ISP_P2_IN_IMG_RES_REVISED, //MINT32, describes P2 input image revised resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist.
MTK_ISP_APP_TARGET_SIZE, //MINT32, describes APP Target resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist.
MTK_MSF_SCALE_INDEX, //MINT32, which scale stage index, would only exist with scaling flow
MTK_MSF_FRAME_NUM, //MINT32, After BSS which frame number is this stage using
MTK_TOTAL_MULTI_FRAME_NUM, //MINT32, MSYUV fuction used this input to know frame nunber
MTK_TOTAL_MULTI_FRAME_NUM_CAPTURED, //MINT32, MSF function used
MTK_SW_DSDN_VERSION, //MINT32, distinguish different dsdn version
MTK_ISP_COLOR_SPACE, //MINT32
MTK_ISP_DRC_CURVE, //IMemory
MTK_ISP_DRC_CURVE_SIZE, //MINT32
MTK_ISP_FEO_DATA, //IMemory
MTK_ISP_FEO_ENABLE, //MINT32
MTK_ISP_FEO_INFO, //IMemory
MTK_ISP_HLR_RATIO, //MINT32, which is a HDR ratio applied in HLR
MTK_ISP_STAGE, //MINT32
MTK_FOCUS_AREA_POSITION, //MINT32
MTK_FOCUS_AREA_SIZE, //MSize
MTK_FOCUS_AREA_RESULT, //MUINT8
MTK_FOCUS_PAUSE, //MUINT8
MTK_FOCUS_MZ_ON, //MUINT8
MTK_3A_AF_FOCUS_VALUE, //MINT64
MTK_3A_PRV_CROP_REGION, //MRect
MTK_3A_ISP_MDP_TARGET_SIZE, //MSize
MTK_3A_REPEAT_RESULT, //MUINT8
MTK_3A_SKIP_PRECAPTURE, //MBOOL //if CUST_ENABLE_FLASH_DURING_TOUCH is true, MW can skip precapture
MTK_3A_SKIP_BAD_FRAME, //MBOOL
MTK_3A_FLARE_IN_MANUAL_CTRL_ENABLE, //MBOOL
MTK_3A_DYNAMIC_SUBSAMPLE_COUNT, //MINT32 30fps = 1, 60fps = 2, ... , 120fps = 4
MTK_3A_AE_LV_VALUE, //MINT32
MTK_APP_CONTROL, //MINT32
MTK_3A_CUST_PARAMS, //IMemory
MTK_3A_SETTING_CUST_PARAMS, //IMemory
MTK_3A_PERFRAME_INFO, //IMemory
MTK_SENSOR_MODE_INFO_ACTIVE_ARRAY_CROP_REGION, //MRect
MTK_3A_AE_BV, //MINT32
MTK_3A_AE_CWV, //MINT32
MTK_ISP_P2_PROCESSED_RAW, //MINT32
MTK_3A_EXIF_METADATA = MTK_3A_EXIF_START, //IMetadata
MTK_EIS_REGION = MTK_EIS_START, //MINT32
MTK_EIS_INFO, //MINT64
MTK_EIS_VIDEO_SIZE, //MRect
MTK_EIS_NEED_OVERRIDE_TIMESTAMP, //MBOOL
MTK_EIS_LMV_DATA, //IMemory
MTK_STEREO_JPS_MAIN1_CROP = MTK_STEREO_START, //MRect
MTK_STEREO_JPS_MAIN2_CROP, //MRect
MTK_STEREO_SYNC2A_MODE, //MINT32
MTK_STEREO_SYNCAF_MODE, //MINT32
MTK_STEREO_HW_FRM_SYNC_MODE, //MINT32
MTK_STEREO_NOTIFY, //MINT32
MTK_STEREO_SYNC2A_MASTER_SLAVE, //MINT32[2]
MTK_STEREO_SYNC2A_STATUS, //IMemory
MTK_JPG_ENCODE_TYPE, //MINT8
MTK_CONVERGENCE_DEPTH_OFFSET, //MFLOAT
MTK_N3D_WARPING_MATRIX_SIZE, //MUINT32
MTK_P1NODE_MAIN2_HAL_META, //IMetadata
MTK_P2NODE_BOKEH_ISP_PROFILE, //MUINT8
MTK_STEREO_FEATURE_DENOISE_MODE, //MINT32
MTK_STEREO_FEATURE_SENSOR_PROFILE, //MINT32
MTK_P1NODE_MAIN2_APP_META, //IMetadata
MTK_STEREO_FEATURE_OPEN_ID, //MINT32
MTK_STEREO_FRAME_PER_CAPTURE, //MINT32
MTK_STEREO_ENABLE_MFB, //MINT32
MTK_STEREO_BSS_RESULT, //MINT32
MTK_STEREO_FEATURE_FOV_CROP_REGION, //MINT32[6] // p.x, p.y, p.w, p.h, srcW, srcH
MTK_STEREO_DCMF_FEATURE_MODE, //MINT32 // mtk_platform_metadata_enum_dcmf_feature_mode
MTK_STEREO_HDR_EV, //MINT32
MTK_STEREO_DELAY_FRAME_COUNT, //MINT32
MTK_STEREO_DCMF_DEPTHMAP_SIZE, //MSize
MTK_STEREO_WITH_CAMSV, //MBOOL
MTK_FRAMESYNC_ID = MTK_FRAMESYNC_START, //MINT32
MTK_FRAMESYNC_TOLERANCE, //MINT64
MTK_FRAMESYNC_FAILHANDLE, //MINT32
MTK_FRAMESYNC_RESULT, //MINT64
MTK_FRAMESYNC_TYPE, //MINT32
MTK_FRAMESYNC_MODE, //MUINT8
MTK_VHDR_LCEI_DATA = MTK_VHDR_START, //Memory
MTK_VHDR_IMGO_3A_ISP_PROFILE, //MUINT8
MTK_HDR_FEATURE_HDR_HAL_MODE,
MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM,
MTK_VHDR_MULTIFRAME_TIMESTAMP, //MINT64
MTK_VHDR_MULTIFRAME_EXPOSURE_TIME, //MINT64
MTK_PIPELINE_UNIQUE_KEY = MTK_PIPELINE_START, //MINT32
MTK_PIPELINE_FRAME_NUMBER, //MINT32
MTK_PIPELINE_REQUEST_NUMBER, //MINT32
MTK_PIPELINE_EV_VALUE, //MINT32
MTK_PIPELINE_DUMP_UNIQUE_KEY, //MINT32
MTK_PIPELINE_DUMP_FRAME_NUMBER, //MINT32
MTK_PIPELINE_DUMP_REQUEST_NUMBER, //MINT32
MTK_PIPELINE_VIDEO_RECORD, //MINT32
MTK_NR_MODE = MTK_NR_START, //MINT32
MTK_NR_MNR_THRESHOLD_ISO, //MINT32
MTK_NR_SWNR_THRESHOLD_ISO, //MINT32
MTK_REAL_LV, //MINT32
MTK_ANALOG_GAIN, //MUINT32
MTK_AWB_RGAIN, //MINT32
MTK_AWB_GGAIN, //MINT32
MTK_AWB_BGAIN, //MINT32
MTK_PLUGIN_MODE = MTK_PLUGIN_START, //MINT64
MTK_PLUGIN_COMBINATION_KEY, //MINT64
MTK_PLUGIN_P2_COMBINATION, //MINT64
MTK_PLUGIN_PROCESSED_FRAME_COUNT, //MINT32
MTK_PLUGIN_CUSTOM_HINT, //MINT32
MTK_PLUGIN_DETACT_JOB_SYNC_TOKEN, //MINT64, may be not exists.
MTK_PLUGIN_UNIQUEKEY,
MTK_DUALZOOM_DROP_REQ = MTK_DUALZOOM_START, //MINT32
MTK_DUALZOOM_FORCE_ENABLE_P2, //MINT32
MTK_DUALZOOM_DO_FRAME_SYNC, //MINT32
MTK_DUALZOOM_ZOOM_FACTOR, //MINT32
MTK_DUALZOOM_DO_FOV, //MINT32
MTK_DUALZOOM_FOV_RECT_INFO, //MINT32
MTK_DUALZOOM_FOV_CALB_INFO, //MINT32
MTK_DUALZOOM_FOV_MARGIN_PIXEL, //MSize
MTK_DUALCAM_AF_STATE, //MUINT8
MTK_DUALCAM_LENS_STATE, //MUINT8
MTK_DUALCAM_TIMESTAMP, //MINT64
MTK_DUALZOOM_3DNR_MODE, //MINT32
MTK_DUALZOOM_ZOOMRATIO, //MINT32
MTK_DUALZOOM_CENTER_SHIFT, //MINT32
MTK_DUALZOOM_FOV_RATIO, //MFLOAT
MTK_DUALZOOM_REAL_MASTER, //MINT32
MTK_DUALZOOM_FD_TARGET_MASTER, //MINT32
MTK_DUALZOOM_FD_REAL_MASTER, //MINT32 // maybe not set
MTK_LMV_SEND_SWITCH_OUT, //MINT32
MTK_LMV_SWITCH_OUT_RESULT, //MINT32
MTK_LMV_VALIDITY, //MINT32
MTK_VSDOF_P1_MAIN1_ISO, //MINT32
MTK_DUALZOOM_IS_STANDBY, //MBOOL
MTK_DUALZOOM_CAP_CROP, //MRect
MTK_DUALZOOM_MASTER_UPDATE_MODE, //MBOOL
MTK_DUALZOOM_STREAMING_NR, //MINT32
MTK_FEATUREPIPE_APP_MODE = MTK_FEATUREPIPE_START, //MINT32
MTK_POSTPROC_TYPE = MTK_POSTPROC_START, //MINT32
MTK_FEATURE_STREAMING = MTK_FEATURE_START, //MINT64
MTK_FEATURE_CAPTURE, //MINT64
MTK_FEATURE_CAPTURE_PHYSICAL, //MINT64
MTK_FEATURE_FREE_MEMORY_MBYTE, //MINT32
MTK_FEATURE_MFNR_NVRAM_QUERY_INDEX, //MINT32
MTK_FEATURE_MFNR_NVRAM_DECISION_ISO, //MINT32
MTK_FEATURE_MFNR_TUNING_INDEX_HINT, //MINT64
MTK_FEATURE_MFNR_FINAL_EXP, //MINT32
MTK_FEATURE_MFNR_OPEN_ID, //MINT32
MTK_FEATURE_AINR_MDLA_MODE, //MINT32
MTK_ISP_AINR_MDLA_MODE, //MINT32
MTK_ISP_LTM_BIT_MODE, //MINT32
MTK_FEATURE_BSS_SELECTED_FRAME_COUNT, //MINT32
MTK_FEATURE_BSS_FORCE_DROP_NUM, //MINT32
MTK_FEATURE_BSS_FIXED_LSC_TBL_DATA, //MUINT8
MTK_FEATURE_BSS_PROCESS, //MINT32
MTK_FEATURE_BSS_ISGOLDEN, //MBOOL
MTK_FEATURE_BSS_REORDER, //MBOOL
MTK_FEATURE_BSS_MANUAL_ORDER, //MUINT8
MTK_FEATURE_BSS_RRZO_DATA, //MUINT8
MTK_FEATURE_BSS_DOWNSAMPLE, //MBOOL
MTK_FEATURE_PACK_RRZO, //MUINT8
MTK_FEATURE_FACE_RECTANGLES, //MRect array
MTK_FEATURE_FACE_POSE_ORIENTATIONS, //MINT32[n*3] array, each struct include: xAsix, yAsix, zAsix
MTK_FEATURE_CAP_YUV_PROCESSING, //MUINT8
MTK_FEATURE_CAP_PIPE_DCE_CONTROL, //MUINT8
MTK_FEATURE_MULTIFRAMENODE_BYPASSED, //MUINT8
MTK_FEATURE_FACE_APPLIED_GAMMA, //MINT32
MTK_FEATURE_CAP_PQ_USERID, //MINT64
MTK_FEATURE_FLIP_IN_P2A, //MINT32
MTK_FSC_CROP_DATA = MTK_FSC_START, //IMemory
MTK_FSC_WARP_DATA, //IMemory
MTK_STAGGER_ME_META, //IMetadata
MTK_STAGGER_SE_META, //IMetadata
MTK_STAGGER_BLOB_IMGO_ORDER //MUINT8
} mtk_platform_metadata_tag_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_3a_exif_metadata_tag {
MTK_3A_EXIF_FNUMBER, //MINT32
MTK_3A_EXIF_FOCAL_LENGTH, //MINT32
MTK_3A_EXIF_FOCAL_LENGTH_35MM, //MINT32
MTK_3A_EXIF_SCENE_MODE, //MINT32
MTK_3A_EXIF_AWB_MODE, //MINT32
MTK_3A_EXIF_LIGHT_SOURCE, //MINT32
MTK_3A_EXIF_EXP_PROGRAM, //MINT32
MTK_3A_EXIF_SCENE_CAP_TYPE, //MINT32
MTK_3A_EXIF_FLASH_LIGHT_TIME_US, //MINT32
MTK_3A_EXIF_AE_METER_MODE, //MINT32
MTK_3A_EXIF_AE_EXP_BIAS, //MINT32
MTK_3A_EXIF_CAP_EXPOSURE_TIME, //MINT32
MTK_3A_EXIF_AE_ISO_SPEED, //MINT32
MTK_3A_EXIF_REAL_ISO_VALUE, //MINT32
MTK_3A_EXIF_AE_BRIGHTNESS_VALUE, //MINT32
MTK_3A_EXIF_FLASH_FIRING_STATUS, //MINT32
MTK_3A_EXIF_FLASH_RETURN_DETECTION, //MINT32
MTK_3A_EXIF_FLASH_MODE, //MINT32
MTK_3A_EXIF_FLASH_FUNCTION, //MINT32
MTK_3A_EXIF_FLASH_REDEYE, //MINT32
MTK_3A_EXIF_DEBUGINFO_BEGIN, // debug info begin
// key: MINT32
MTK_3A_EXIF_DBGINFO_AAA_KEY = MTK_3A_EXIF_DEBUGINFO_BEGIN, //MINT32
MTK_3A_EXIF_DBGINFO_AAA_DATA,
MTK_3A_EXIF_DBGINFO_SDINFO_KEY,
MTK_3A_EXIF_DBGINFO_SDINFO_DATA,
MTK_3A_EXIF_DBGINFO_ISP_KEY,
MTK_3A_EXIF_DBGINFO_ISP_DATA,
//
MTK_CMN_EXIF_DBGINFO_KEY,
MTK_CMN_EXIF_DBGINFO_DATA,
//
MTK_MF_EXIF_DBGINFO_MF_KEY,
MTK_MF_EXIF_DBGINFO_MF_DATA,
//
MTK_N3D_EXIF_DBGINFO_KEY,
MTK_N3D_EXIF_DBGINFO_DATA,
//
MTK_POSTNR_EXIF_DBGINFO_NR_KEY,
MTK_POSTNR_EXIF_DBGINFO_NR_DATA,
//
MTK_RESVB_EXIF_DBGINFO_KEY,
MTK_RESVB_EXIF_DBGINFO_DATA,
//
MTK_RESVC_EXIF_DBGINFO_KEY,
MTK_RESVC_EXIF_DBGINFO_DATA,
// data: Memory
MTK_3A_EXIF_DEBUGINFO_END, // debug info end
} mtk_platform_3a_exif_metadata_tag_t;
// MTK_3A_FEATURE_AE_EXPOSURE_LEVEL
typedef enum mtk_camera_metadata_enum_ae_exposure_level {
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NONE = 0,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_SHORT,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NORMAL,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG,
} mtk_camera_metadata_enum_ae_exposure_level_t;
// MTK_3A_FEATURE_AE_TARGET_MODE
typedef enum mtk_camera_metadata_enum_ae_target_mode {
MTK_3A_FEATURE_AE_TARGET_MODE_NORMAL = 0,
MTK_3A_FEATURE_AE_TARGET_MODE_IVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_ZVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_LE_FIX,
MTK_3A_FEATURE_AE_TARGET_MODE_SE_FIX,
MTK_3A_FEATURE_AE_TARGET_MODE_4CELL_MVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR_RTO1X,
MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_2EXP,
MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_3EXP,
} mtk_camera_metadata_enum_ae_target_mode_t;
//MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM
typedef enum mtk_camera_metadata_enum_stagger_valid_exposure_num {
MTK_STAGGER_VALID_EXPOSURE_NON = 0,
MTK_STAGGER_VALID_EXPOSURE_1 = 1,
MTK_STAGGER_VALID_EXPOSURE_2 = 2,
MTK_STAGGER_VALID_EXPOSURE_3 = 3
} mtk_camera_metadata_enum_stagger_valid_exposure_num_t;
//MTK_3A_ISP_FUS_NUM
typedef enum mtk_camera_metadata_enum_3a_isp_fus_num {
MTK_3A_ISP_FUS_NUM_NON = 0,
MTK_3A_ISP_FUS_NUM_1 = 1,
MTK_3A_ISP_FUS_NUM_2 = 2,
MTK_3A_ISP_FUS_NUM_3 = 3,
} mtk_camera_metadata_enum_3a_isp_fus_num_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_enum_nr_mode {
MTK_NR_MODE_OFF = 0,
MTK_NR_MODE_MNR,
MTK_NR_MODE_SWNR,
MTK_NR_MODE_AUTO
} mtk_platform_metadata_enum_nr_mode_t;
typedef enum mtk_platform_metadata_enum_mfb_mode {
MTK_MFB_MODE_OFF = 0,
MTK_MFB_MODE_MFLL,
MTK_MFB_MODE_AIS,
MTK_MFB_MODE_NUM,
} mtk_platform_metadata_enum_mfb_mode_t;
typedef enum mtk_platform_metadata_enum_custom_hint {
MTK_CUSTOM_HINT_0 = 0,
MTK_CUSTOM_HINT_1,
MTK_CUSTOM_HINT_2,
MTK_CUSTOM_HINT_3,
MTK_CUSTOM_HINT_4,
MTK_CUSTOM_HINT_NUM,
} mtk_platform_metadata_enum_custom_hint_t;
typedef enum mtk_platform_metadata_enum_plugin_mode {
MTK_PLUGIN_MODE_COMBINATION = 1 << 0,
MTK_PLUGIN_MODE_NR = 1 << 1,
MTK_PLUGIN_MODE_HDR = 1 << 2,
MTK_PLUGIN_MODE_MFNR = 1 << 3,
MTK_PLUGIN_MODE_COPY = 1 << 4,
MTK_PLUGIN_MODE_TEST_PRV = 1 << 5,
MTK_PLUGIN_MODE_BMDN = 1 << 6,
MTK_PLUGIN_MODE_MFHR = 1 << 7,
MTK_PLUGIN_MODE_BMDN_3rdParty = 1 << 8,
MTK_PLUGIN_MODE_MFHR_3rdParty = 1 << 9,
MTK_PLUGIN_MODE_FUSION_3rdParty = 1 << 10,
MTK_PLUGIN_MODE_VSDOF_3rdParty = 1 << 11,
MTK_PLUGIN_MODE_COLLECT = 1 << 12,
MTK_PLUGIN_MODE_HDR_3RD_PARTY = 1 << 13,
MTK_PLUGIN_MODE_MFNR_3RD_PARTY = 1 << 14,
MTK_PLUGIN_MODE_BOKEH_3RD_PARTY = 1 << 15,
MTK_PLUGIN_MODE_DCMF_3RD_PARTY = 1 << 16,
} mtk_platform_metadata_enum_plugin_mode_t;
typedef enum mtk_platform_metadata_enum_p2_plugin_combination {
MTK_P2_RAW_PROCESSOR = 1 << 0,
MTK_P2_ISP_PROCESSOR = 1 << 1,
MTK_P2_YUV_PROCESSOR = 1 << 2,
MTK_P2_MDP_PROCESSOR = 1 << 3,
MTK_P2_CAPTURE_REQUEST = 1 << 4,
MTK_P2_PREVIEW_REQUEST = 1 << 5
} mtk_platform_metadata_enum_p2_plugin_combination;
typedef enum mtk_platform_metadata_enum_isp_color_space {
MTK_ISP_COLOR_SPACE_SRGB = 0 ,
MTK_ISP_COLOR_SPACE_DISPLAY_P3 = 1 ,
MTK_ISP_COLOR_SPACE_CUSTOM_1 = 2
} mtk_platform_metadata_enum_isp_color_space;
typedef enum mtk_platform_metadata_enum_dualzoom_drop_req {
MTK_DUALZOOM_DROP_NEVER_DROP = 0,
MTK_DUALZOOM_DROP_NONE = 1,
MTK_DUALZOOM_DROP_DIRECTLY = 2,
MTK_DUALZOOM_DROP_NEED_P1,
MTK_DUALZOOM_DROP_NEED_SYNCMGR,
MTK_DUALZOOM_DROP_NEED_SYNCMGR_NEED_STREAM_F_PIPE,
} mtk_platform_metadata_enum_dualzoom_drop_req_t;
typedef enum mtk_platform_metadata_enum_p1_sensor_status {
MTK_P1_SENSOR_STATUS_NONE = 0,
MTK_P1_SENSOR_STATUS_STREAMING = 1,
MTK_P1_SENSOR_STATUS_SW_STANDBY = 2,
MTK_P1_SENSOR_STATUS_HW_STANDBY = 3,
} mtk_platform_metadata_enum_p1_sensor_status_t;
typedef enum mtk_platform_metadata_enum_p1_twin_switch {
MTK_P1_TWIN_SWITCH_NONE = 0,
MTK_P1_TWIN_SWITCH_ONE_TG = 1,
MTK_P1_TWIN_SWITCH_TWO_TG = 2
} mtk_platform_metadata_enum_p1_twin_switch_t;
typedef enum mtk_platform_metadata_enum_p1_twin_status {
MTK_P1_TWIN_STATUS_NONE = 0,
MTK_P1_TWIN_STATUS_TG_MODE_1 = 1,
MTK_P1_TWIN_STATUS_TG_MODE_2 = 2,
MTK_P1_TWIN_STATUS_TG_MODE_3 = 3,
} mtk_platform_metadata_enum_p1_twin_status_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_switch {
MTK_P1_RESIZE_QUALITY_SWITCH_NONE = 0,
MTK_P1_RESIZE_QUALITY_SWITCH_L_L = 1,
MTK_P1_RESIZE_QUALITY_SWITCH_L_H = 2,
MTK_P1_RESIZE_QUALITY_SWITCH_H_L = 3,
MTK_P1_RESIZE_QUALITY_SWITCH_H_H = 4,
} mtk_platform_metadata_enum_p1_resize_quality_switch_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_status {
MTK_P1_RESIZE_QUALITY_STATUS_NONE = 0,
MTK_P1_RESIZE_QUALITY_STATUS_ACCEPT = 1,
MTK_P1_RESIZE_QUALITY_STATUS_IGNORE = 2,
MTK_P1_RESIZE_QUALITY_STATUS_REJECT = 3,
MTK_P1_RESIZE_QUALITY_STATUS_ILLEGAL = 4,
} mtk_platform_metadata_enum_p1_resize_quality_status_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_level {
MTK_P1_RESIZE_QUALITY_LEVEL_UNKNOWN = 0,
MTK_P1_RESIZE_QUALITY_LEVEL_L = 1,
MTK_P1_RESIZE_QUALITY_LEVEL_H = 2,
} mtk_platform_metadata_enum_p1_resize_quality_level_t;
typedef enum mtk_platform_metadata_enum_lmv_result {
MTK_LMV_RESULT_OK = 0,
MTK_LMV_RESULT_FAILED,
MTK_LMV_RESULT_SWITCHING
} mtk_platform_metadata_enum_lmv_result_t;
typedef enum mtk_platform_metadata_enum_featurepipe_app_mode {
MTK_FEATUREPIPE_PHOTO_PREVIEW = 0,
MTK_FEATUREPIPE_VIDEO_PREVIEW = 1,
MTK_FEATUREPIPE_VIDEO_RECORD = 2,
MTK_FEATUREPIPE_VIDEO_STOP = 3,
} mtk_platform_metadata_enum_featurepipe_app_mode_t;
typedef enum mtk_platform_metadata_enum_dcmf_feature_mode {
MTK_DCMF_FEATURE_BOKEH = 0,
MTK_DCMF_FEATURE_MFNR_BOKEH = 1,
MTK_DCMF_FEATURE_HDR_BOKEH = 2,
} mtk_platform_metadata_enum_dcmf_feature_mode_t;
typedef enum mtk_platform_metadata_enum_smvr_fps {
MTK_SMVR_FPS_30 = 0,
MTK_SMVR_FPS_120 = 1,
MTK_SMVR_FPS_240 = 2,
MTK_SMVR_FPS_480 = 3,
MTK_SMVR_FPS_960 = 4,
} mtk_platform_metadata_enum_smvr_fps_t;
//MTK_FRAMESYNC_FAILHANDLE
typedef enum mtk_platform_metadata_enum_fremesync_failhandle {
MTK_FRAMESYNC_FAILHANDLE_CONTINUE,
MTK_FRAMESYNC_FAILHANDLE_DROP,
} mtk_platform_metadata_enum_fremesync_failhandle_t;
//MTK_FRAMESYNC_RESULT
typedef enum mtk_platform_metadata_enum_fremesync_result {
MTK_FRAMESYNC_RESULT_PASS,
MTK_FRAMESYNC_RESULT_FAIL_CONTINUE,
MTK_FRAMESYNC_RESULT_FAIL_DROP,
} mtk_platform_metadata_enum_fremesync_result_t;
//MTK_FRAMESYNC_MODE
typedef enum mtk_platform_metadata_enum_fremesync_mode {
MTK_FRAMESYNC_MODE_VSYNC_ALIGNMENT,
MTK_FRAMESYNC_MODE_READOUT_CENTER_ALIGNMENT,
} mtk_platform_metadata_enum_fremesync_mode_t;
//MTK_FEATURE_MULTIFRAMENODE_BYPASSED
typedef enum mtk_platform_metadata_enum_multiframenode_bypassed {
MTK_FEATURE_MULTIFRAMENODE_NOT_BYPASSED = 0,
MTK_FEATURE_MULTIFRAMENODE_TO_BE_BYPASSED = 1
} mtk_platform_metadata_enum_mfllnode_bypassed_t;
//MTK_FEATURE_BSS_PROCESS
typedef enum mtk_platform_metadata_enum_bss_processing {
MTK_FEATURE_BSS_PROCESS_ENABLE = 0,
MTK_FEATURE_BSS_PROCESS_DISABLE = 1
} mtk_platform_metadata_enum_bss_processing_t;
//MTK_FEATURE_BSS_MANUAL_ORDER
typedef enum mtk_platform_metadata_enum_bss_manual_order {
MTK_FEATURE_BSS_MANUAL_ORDER_OFF = 0,
MTK_FEATURE_BSS_MANUAL_ORDER_GOLDEN = 1
} mtk_platform_metadata_enum_bss_manual_order_t;
//MTK_FEATURE_CAP_YUV_PROCESSING
typedef enum mtk_platform_metadata_enum_cap_yuv_processing {
MTK_FEATURE_CAP_YUV_PROCESSING_NOT_NEEDED = 0,
MTK_FEATURE_CAP_YUV_PROCESSING_NEEDED = 1
} mtk_platform_metadata_enum_cap_yuv_processing_t;
//MTK_FEATURE_CAP_PIPE_DCE_CONTROL
typedef enum mtk_platform_metadata_enum_cap_pipe_control {
MTK_FEATURE_CAP_PIPE_DCE_ENABLE_BUT_NOT_APPLY = 2,
MTK_FEATURE_CAP_PIPE_DCE_MANUAL_DISABLE = 1,
MTK_FEATURE_CAP_PIPE_DCE_DEFAULT_APPLY = 0
} mtk_platform_metadata_enum_cap_pipe_dce_control_t;
// MTK_FEATURE_AINR_MDLA_MODE, MTK_ISP_AINR_MDLA_MODE
typedef enum mtk_platform_metadata_enum_ainr_mdla_mode {
MTK_FEATURE_AINR_MDLA_MODE_NONE = 0,
MTK_FEATURE_AINR_MDLA_MODE_DRCOUT_16BIT = 1,
MTK_FEATURE_AINR_MDLA_MODE_NNOUT_12BIT = 2,
MTK_FEATURE_AINR_MDLA_MODE_NNOUT_16BIT = 3,
} mtk_platform_metadata_enum_ainr_mdla_mode_t;
//MTK_ISP_P2_PROCESSED_RAW
typedef enum mtk_platform_metadata_enum_p2_processed_raw {
MTK_ISP_P2_PROCESSED_RAW_NOT_NEEDED = 0,
MTK_ISP_P2_PROCESSED_RAW_NEEDED = 1
} mtk_platform_metadata_enum_p2_processed_raw_t;
//MTK_DUALZOOM_STREAMING_NR
typedef enum mtk_platform_metadata_enum_dualzoom_streaming_nr {
MTK_DUALZOOM_STREAMING_NR_AUTO = 0,
MTK_DUALZOOM_STREAMING_NR_OFF = 1
} mtk_platform_metadata_enum_dualzoom_streaming_nr_t;
//MTK_STAGGER_BLOB_IMGO_ORDER
typedef enum mtk_platform_metadata_enum_stagger_blob_imgo_order {
MTK_STAGGER_IMGO_NONE = 0,
MTK_STAGGER_IMGO_NE = 1,
MTK_STAGGER_IMGO_ME = 2,
MTK_STAGGER_IMGO_SE = 3
} mtk_platform_metadata_enum_stagger_blob_imgo_order_t;
//MTK_3A_EXIF_FLASH_FIRING_STATUS
typedef enum mtk_platform_metadata_enum_3a_exif_flash_firing_status_t {
MTK_3A_EXIF_FLASH_FIRING_STATUS_NOT_FIRED = 0,
MTK_3A_EXIF_FLASH_FIRING_STATUS_FIRED = 1,
} mtk_platform_metadata_enum_3a_exif_flash_firing_status_t;
//MTK_3A_EXIF_FLASH_RETURN_DETECTION
typedef enum mtk_platform_metadata_enum_3a_exif_flash_return_detection_t {
MTK_3A_EXIF_FLASH_RETURN_DETECTION_NOT_SUPPORT = 0,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_RESERVED = 1,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_NOT_DETECTED = 2,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_DETECTED = 3,
} mtk_platform_metadata_enum_3a_exif_flash_return_detection_t;
//MTK_3A_EXIF_FLASH_MODE
typedef enum mtk_platform_metadata_enum_3a_exif_flash_mode_t {
MTK_3A_EXIF_FLASH_MODE_UNKNOWN = 0,
MTK_3A_EXIF_FLASH_MODE_COMPULSORY_FIRING = 1,
MTK_3A_EXIF_FLASH_MODE_COMPULSORY_SUPPRESSION = 2,
MTK_3A_EXIF_FLASH_MODE_AUTO = 3,
} mtk_platform_metadata_enum_3a_exif_flash_mode_t;
//MTK_3A_EXIF_FLASH_FUNCTION
typedef enum mtk_platform_metadata_enum_3a_exif_flash_function_t {
MTK_3A_EXIF_FLASH_FUNCTION_SUPPORT = 0,
MTK_3A_EXIF_FLASH_FUNCTION_NOT_SUPPORT = 1,
} mtk_platform_metadata_enum_3a_exif_flash_function_t;
//MTK_3A_EXIF_FLASH_REDEYE
typedef enum mtk_platform_metadata_enum_3a_exif_flash_redeye_t {
MTK_3A_EXIF_FLASH_REDEYE_NOT_SUPPORT = 0,
MTK_3A_EXIF_FLASH_REDEYE_SUPPORT = 1,
} mtk_platform_metadata_enum_3a_exif_flash_redeye_t;
//MTK_FEATURE_ABF
typedef enum mtk_platform_metadata_enum_abf_mode {
MTK_ABF_MODE_OFF = 0,
MTK_ABF_MODE_ON,
} mtk_platform_metadata_enum_abf_mode_t;
#endif

File diff suppressed because it is too large Load Diff

@ -23,7 +23,6 @@
#include <opencv2/core/core.hpp>
#include "Camera2Helper.h"
#include <mutex>
#include <map>
#include <set>
/**
@ -40,9 +39,6 @@ static const uint64_t kMaxExposureTime = static_cast<uint64_t>(250000000);
#define WAIT_AF_LOCKED 4
#define PREVIEW_REQUEST_IDX 0
#define CAPTURE_REQUEST_IDX 1
#define DEFAULT_WARMUP_TIME 250 // 250ms
class CameraManager
{
@ -85,11 +81,10 @@ public:
unsigned int orientation:3;
unsigned int zoom : 1;
unsigned int wait3ALocked : 3;
unsigned int burstRawCapture : 3;
unsigned int burstRawCapture : 2;
unsigned int customHdr : 1;
unsigned int hdrStep : 3;
unsigned int minFps : 4;
unsigned int reserved : 7;
unsigned int reserved : 12;
int64_t exposureTime;
unsigned int sensitivity;
int compensation;
@ -165,7 +160,7 @@ public:
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height);
void CreateSession(ANativeWindow* previewWindow);
CaptureRequest* CreateRequest(bool isPreviewRequest, int32_t sensitivity = -1);
CaptureRequest* CreateRequest(bool isPreviewRequest);
void DestroyRequest(CaptureRequest* request);
void DestroySession();
@ -188,7 +183,6 @@ public:
void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult);
void FireBurstCapture();
void FireOneCapture(uint64_t ts);
uint32_t GetLdr() const
{
@ -201,17 +195,10 @@ public:
}
bool IsCameraAvailable(const std::string& cameraId);
int64_t GetTimestamp(const ACameraMetadata* result);
static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height);
static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult);
protected:
void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity);
void Setup3DNR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity);
void SetupHDR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity);
bool SetupTonemapCurve(ACameraMetadata* characteristics, ACaptureRequest* request);
protected:
std::mutex m_locker;
std::set<std::string> m_availableCameras;
@ -249,11 +236,9 @@ protected:
bool mCaptureTriggered;
bool mFocusTriggered;
bool mCaptureDispatched;
uint32_t mStableFrameCount;
CAPTURE_RESULT mResult;
uint64_t m_startTime;
unsigned long long m_startTime;
protected:
@ -274,24 +259,32 @@ protected:
ACameraOutputTarget* mOutputTarget;
ACaptureSessionOutput* mSessionOutput;
AImageReader* mImageReader2;
ANativeWindow* mImageWindow2;
ACameraOutputTarget* mOutputTarget2;
ACaptureSessionOutput* mSessionOutput2;
std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests;
ACameraCaptureSession* capture_session;
std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
std::map<int64_t, std::shared_ptr<ACameraMetadata> > mCaptureResultMap;
uint32_t mLdr;
uint32_t mFinalLdr;
uint32_t mFinalBurstCaptures;
int32_t mFinalOutputFormat;
std::vector<std::shared_ptr<AImage> > mCaptureFrames;
// cv::Mat mOneFrame;
std::vector<std::pair<int64_t, cv::Mat> > mOneFrame;
cv::Mat mOneFrame;
std::vector<std::vector<uint8_t> > mRawFrames;
int64_t m_minTimestamp;
ACameraCaptureSession* capture_session;
// AImageReader* image_reader;
// ANativeWindow* image_reader_surface;
// ACameraOutputTarget* image_reader_target;
// ACaptureRequest* capture_request;
// ACaptureSessionOutput* capture_session_output;
};

@ -6,13 +6,11 @@
#include <chrono>
#include <thread>
#include <android/log.h>
#include <errno.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
@ -22,97 +20,16 @@ extern "C" {
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#include <libavutil/log.h>
#include <android/log.h>
void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl) {
// Map FFmpeg log levels to Android log levels
int android_log_level;
switch (level) {
case AV_LOG_PANIC:
case AV_LOG_FATAL:
android_log_level = ANDROID_LOG_FATAL;
break;
case AV_LOG_ERROR:
android_log_level = ANDROID_LOG_ERROR;
break;
case AV_LOG_WARNING:
android_log_level = ANDROID_LOG_WARN;
break;
case AV_LOG_INFO:
android_log_level = ANDROID_LOG_INFO;
break;
case AV_LOG_VERBOSE:
android_log_level = ANDROID_LOG_VERBOSE;
break;
case AV_LOG_DEBUG:
case AV_LOG_TRACE:
android_log_level = ANDROID_LOG_DEBUG;
break;
default:
android_log_level = ANDROID_LOG_INFO;
break;
}
// Format the log message
char log_message[1024];
vsnprintf(log_message, sizeof(log_message), fmt, vl);
// Send the log message to logcat
__android_log_print(android_log_level, "FFmpeg", "%s", log_message);
}
int setup_output_streams(AVFormatContext *input_ctx, AVFormatContext *output_ctx) {
// Copy streams and fix time_base
for (unsigned int i = 0; i < input_ctx->nb_streams; i++) {
AVStream *in_stream = input_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(output_ctx, NULL);
if (!out_stream) {
return AVERROR_UNKNOWN;
}
// Copy codec parameters
int ret = avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
if (ret < 0) {
return ret;
}
// Fix time base
out_stream->time_base = in_stream->time_base;
// Clear any existing flags
out_stream->codecpar->codec_tag = 0;
}
return 0;
}
int write_mp4_header(AVFormatContext *output_ctx) {
AVDictionary *opts = NULL;
// MP4 specific options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write header
int ret = avformat_write_header(output_ctx, &opts);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Header write failed: %s (code: %d)\n", errbuf, ret);
}
av_dict_free(&opts);
return ret;
}
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle)
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
av_register_all();
avformat_network_init();
// Open input RTMP stream
if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) {
fprintf(stderr, "Could not open input file '%s'\n", rtmpUrl);
@ -209,50 +126,24 @@ void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duratio
}
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle)
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
#ifndef NDEBUG
// Set the custom log callback
av_log_set_callback(ffmpeg_log_callback);
av_log_set_level(AV_LOG_WARNING);
#endif
std::string url = rtspUrl;
AVDictionary* options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
av_dict_set(&options, "stimeout", "5000000", 0);
if (!userName.empty())
{
av_dict_set(&options, "username", userName.c_str(), 0); // Replace with actual username
av_dict_set(&options, "password", password.c_str(), 0); // Replace with actual password
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", userName.c_str(), password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
av_register_all();
avformat_network_init();
// Open input RTSP stream
int res = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &options);
av_dict_free(&options);
if (res != 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Could not open input: %s (error code: %d)\n", errbuf, res);
// fprintf(stderr, "Could not open input file '%s'\n", rtspUrl);
if (avformat_open_input(&inputFormatContext, rtspUrl, nullptr, nullptr) != 0) {
fprintf(stderr, "Could not open input file '%s'\n", rtspUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
// fprintf(stderr, "Could not find stream information\n");
fprintf(stderr, "Could not find stream information\n");
avformat_close_input(&inputFormatContext);
return;
}
@ -267,49 +158,21 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
const AVCodecParameters *in_codecpar = inStream->codecpar;
// Skip audio streams
if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
continue;
}
if (in_codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
// Copy video stream as-is
const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id);
AVStream *out_stream = avformat_new_stream(outputFormatContext, codec);
if (!out_stream) {
return;
}
avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
out_stream->codecpar->codec_tag = 0;
out_stream->time_base = (AVRational){1, 90000};
out_stream->avg_frame_rate = inStream->avg_frame_rate;
}
else if (in_codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
// Setup AAC audio stream
const AVCodec *aac_encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!aac_encoder) {
fprintf(stderr, "AAC encoder not found\n");
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
fprintf(stderr, "Failed to allocate output stream\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
AVStream *out_stream = avformat_new_stream(outputFormatContext, aac_encoder);
if (!out_stream) {
if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
fprintf(stderr, "Failed to copy codec parameters\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
// Set AAC parameters
out_stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
out_stream->codecpar->codec_id = AV_CODEC_ID_AAC;
out_stream->codecpar->sample_rate = in_codecpar->sample_rate;
out_stream->codecpar->format = AV_SAMPLE_FMT_FLTP;
out_stream->codecpar->channels = in_codecpar->channels;
out_stream->codecpar->channel_layout = av_get_default_channel_layout(in_codecpar->channels);
out_stream->codecpar->bit_rate = 128000;
out_stream->codecpar->frame_size = 1024; // AAC frame size
out_stream->time_base = (AVRational){1, in_codecpar->sample_rate};
}
outStream->codecpar->codec_tag = 0;
}
// Open output file
@ -322,58 +185,22 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
}
}
AVDictionary *opts = NULL;
// Set output format options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write output file header
res = avformat_write_header(outputFormatContext, &opts);
av_dict_free(&opts);
if (res < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Error occurred when writing header to output file: %s (error code: %d)\n", errbuf, res);
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error occurred when writing header to output file\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
#if 0
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
#endif
uint32_t framesToSkip = 16;
uint32_t framesSkipped = 0;
// Skip initial frames
while (framesSkipped < framesToSkip) {
if (av_read_frame(inputFormatContext, &packet) < 0)
break;
if (packet.stream_index == 0) { // Video stream
framesSkipped++;
}
av_packet_unref(&packet);
}
auto startTime = av_gettime();
// int64_t durationNs = (int64_t)duration * 1000000;
int64_t durationNs = (int64_t)(duration + 32) * 1000;
// Read packets from input and write them to output
while (1) {
if ((av_gettime() - startTime) >= durationNs) {
// printf("Duration limit reached (%d seconds)\n", ctx->duration_secs);
break;
}
#if 0
while (av_read_frame(inputFormatContext, &packet) >= 0) {
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
@ -386,35 +213,11 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
fprintf(stderr, "Error muxing packet\n");
break;
}
#endif
if (av_read_frame(inputFormatContext, &packet) < 0) break;
// Skip audio packets
if (inputFormatContext->streams[packet.stream_index]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
av_packet_unref(&packet);
continue;
}
// Adjust packet timebase
AVStream *in_stream = inputFormatContext->streams[packet.stream_index];
AVStream *out_stream = outputFormatContext->streams[packet.stream_index];
av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base);
packet.pos = -1;
res = av_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (res < 0)
{
break;
}
}
// stop_thread.join();
stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);

@ -6,11 +6,10 @@
#define MICROPHOTO_RTSPRECORDER_H
#include <string>
#include <android/multinetwork.h>
// void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs);
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle);
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration);
class RTSPRecorder {

@ -1,547 +0,0 @@
//
// Created by Matthew on 2025/3/11.
//
#include "Streaming.h"
#include <iostream>
#include <string>
#include <thread>
#include <atomic>
#include <android/api-level.h>
#include <android/log.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
extern void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl);
#if 0
StreamForwarder::~StreamForwarder() {
stop();
if (inputCtx) {
avformat_close_input(&inputCtx);
}
if (outputCtx) {
if (outputCtx->pb) {
avio_closep(&outputCtx->pb);
}
avformat_free_context(outputCtx);
}
}
bool StreamForwarder::initialize(const std::string& inputUrl, const std::string& outputUrl) {
if (!openInput(inputUrl)) {
return false;
}
if (!openOutput(outputUrl)) {
return false;
}
return true;
}
bool StreamForwarder::openInput(const std::string& inputUrl) {
inputCtx = avformat_alloc_context();
if (!inputCtx) {
return false;
}
if (avformat_open_input(&inputCtx, inputUrl.c_str(), nullptr, nullptr) < 0) {
return false;
}
if (avformat_find_stream_info(inputCtx, nullptr) < 0) {
return false;
}
return true;
}
bool StreamForwarder::openOutput(const std::string& outputUrl) {
int ret = avformat_alloc_output_context2(&outputCtx, nullptr, "flv", outputUrl.c_str());
if (ret < 0) {
return false;
}
// Copy streams from input to output
for (unsigned int i = 0; i < inputCtx->nb_streams; i++) {
AVStream* inStream = inputCtx->streams[i];
AVStream* outStream = avformat_new_stream(outputCtx, inStream->codec->codec);
if (!outStream) {
return false;
}
ret = avcodec_copy_context(outStream->codec, inStream->codec);
if (ret < 0) {
return false;
}
}
// Open output file
if (!(outputCtx->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&outputCtx->pb, outputUrl.c_str(), AVIO_FLAG_WRITE);
if (ret < 0) {
return false;
}
}
// Write header
ret = avformat_write_header(outputCtx, nullptr);
if (ret < 0) {
return false;
}
return true;
}
void StreamForwarder::setFrameCallback(std::function<void(uint8_t*, int, int, int)> callback) {
frameCallback = callback;
}
void StreamForwarder::start() {
isRunning = true;
forwardPackets();
}
void StreamForwarder::stop() {
isRunning = false;
}
void StreamForwarder::forwardPackets() {
AVPacket packet;
AVFrame* frame = av_frame_alloc();
while (isRunning) {
if (av_read_frame(inputCtx, &packet) < 0) {
break;
}
// Process video frames if callback is set
if (frameCallback && packet.stream_index == 0) { // Assuming video is stream 0
AVCodecContext* codecCtx = inputCtx->streams[packet.stream_index]->codec;
int ret = avcodec_send_packet(codecCtx, &packet);
if (ret < 0) {
continue;
}
while (ret >= 0) {
ret = avcodec_receive_frame(codecCtx, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
} else if (ret < 0) {
goto end;
}
processFrame(frame);
}
}
// Forward packet
av_packet_rescale_ts(&packet,
inputCtx->streams[packet.stream_index]->time_base,
outputCtx->streams[packet.stream_index]->time_base);
int ret = av_interleaved_write_frame(outputCtx, &packet);
if (ret < 0) {
break;
}
av_packet_unref(&packet);
}
end:
av_frame_free(&frame);
av_write_trailer(outputCtx);
}
void StreamForwarder::processFrame(AVFrame* frame) {
if (frameCallback) {
frameCallback(frame->data[0], frame->linesize[0],
frame->width, frame->height);
}
}
#endif
RtspForwarder::RtspForwarder(const std::string& input, const std::string& output)
: inputUrl(input), outputUrl(output), isRunning(false)
{
}
bool RtspForwarder::isStreaming() const
{
return isRunning;
}
bool RtspForwarder::start()
{
run();
return true;
}
bool RtspForwarder::stop()
{
isRunning = false;
return true;
}
int RtspForwarder::run()
{
#ifndef NDEBUG
// Set the custom log callback
av_log_set_callback(ffmpeg_log_callback);
av_log_set_level(AV_LOG_DEBUG);
#endif
isRunning = true;
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
int ret;
int videoStreamIndex = -1;
int64_t startTime = AV_NOPTS_VALUE;
AVBSFContext* bsf_ctx = nullptr;
std::string url = inputUrl;
if (!m_userName.empty())
{
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", m_userName.c_str(), m_password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
// Input options
AVDictionary* inputOptions = nullptr;
av_dict_set(&inputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&inputOptions, "stimeout", "5000000", 0); // 5 second timeout
// av_dict_set(&inputOptions, "buffer_size", "1024000", 0); // 1MB buffer
std::cout << "Opening input: " << url << std::endl;
// Open input
ret = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &inputOptions);
av_dict_free(&inputOptions);
if (ret < 0) {
std::cerr << "Could not open input: " << av_err2str(ret) << std::endl;
return ret;
}
// Get stream info
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) {
// std::cerr << "Failed to get stream info: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
return ret;
}
// Find video stream
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
if (inputFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex == -1) {
// std::cerr << "No video stream found" << std::endl;
avformat_close_input(&inputFormatContext);
return -1;
}
// Create stream mapping
std::vector<int> streamMapping(inputFormatContext->nb_streams, -1);
int outputStreamIdx = 0;
// Allocate output context
ret = avformat_alloc_output_context2(&outputFormatContext, nullptr, "rtsp", outputUrl.c_str());
if ((ret < 0) || !outputFormatContext) {
std::cerr << "Could not create output context" << std::endl;
avformat_close_input(&inputFormatContext);
return false;
}
// FIXED VERSION - remove the redundant stream creation
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
const AVCodecParameters *in_codecpar = inStream->codecpar;
// Skip non-video streams if needed
if (in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO) {
streamMapping[i] = -1;
continue;
}
// Create only ONE stream per input stream
const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id);
AVStream *outStream = avformat_new_stream(outputFormatContext, codec);
if (!outStream) {
return false;
}
ret = avcodec_parameters_copy(outStream->codecpar, in_codecpar);
outStream->codecpar->codec_tag = 0;
outStream->time_base = (AVRational){1, 90000};
outStream->avg_frame_rate = inStream->avg_frame_rate;
// Map input stream to output stream
streamMapping[i] = outputStreamIdx++;
}
const AVBitStreamFilter* filter = av_bsf_get_by_name("h264_mp4toannexb");
if (filter)
{
for (unsigned i = 0; i < outputFormatContext->nb_streams; i++) {
AVStream* stream = outputFormatContext->streams[i];
if (stream->codecpar->codec_id == AV_CODEC_ID_H264) {
ret = av_bsf_alloc(filter, &bsf_ctx);
if (ret < 0) {
std::cerr << "Failed to allocate bitstream filter context: " << av_err2str(ret) << std::endl;
return false;
}
// Copy parameters from input to bsf
ret = avcodec_parameters_copy(bsf_ctx->par_in, stream->codecpar);
if (ret < 0) {
std::cerr << "Failed to copy parameters to bsf: " << av_err2str(ret) << std::endl;
return false;
}
// Initialize the bsf context
ret = av_bsf_init(bsf_ctx);
if (ret < 0) {
std::cerr << "Failed to initialize bitstream filter: " << av_err2str(ret) << std::endl;
return false;
}
// Update output parameters
ret = avcodec_parameters_copy(stream->codecpar, bsf_ctx->par_out);
if (ret < 0) {
std::cerr << "Failed to copy parameters from bsf: " << av_err2str(ret) << std::endl;
return false;
}
break; // Only apply to the first H.264 stream
}
}
}
AVDictionary* outputOptions = nullptr;
av_dict_set(&outputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&outputOptions, "rtsp_flags", "filter_src", 0);
av_dict_set(&outputOptions, "timeout", "5000000", 0);
av_dict_set(&outputOptions, "allowed_media_types", "video", 0);
av_dict_set(&outputOptions, "buffer_size", "1024000", 0); // 1MB buffer
av_dict_set(&outputOptions, "fflags", "nobuffer", 0); // Reduce latency
av_dict_set(&outputOptions, "muxdelay", "0.1", 0); // Reduce delay
av_dict_set(&outputOptions, "max_delay", "500000", 0);
av_dict_set(&outputOptions, "preset", "ultrafast", 0);
av_dict_set(&outputOptions, "tune", "zerolatency", 0);
av_dict_set(&outputOptions, "rtsp_flags", "prefer_tcp", 0);
// Open output
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
// Output options
// ret = avio_open(&outputFormatContext->pb, outputUrl.c_str(), AVIO_FLAG_WRITE);
ret = avio_open2(&outputFormatContext->pb, outputFormatContext->url, AVIO_FLAG_WRITE, NULL, &outputOptions);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Could not open output URL: " << errbuf << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
av_dict_free(&outputOptions);
return ret;
}
}
// Write header
ret = avformat_write_header(outputFormatContext, &outputOptions);
av_dict_free(&outputOptions);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing header: " << errbuf << std::endl;
avformat_close_input(&inputFormatContext);
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return ret;
}
// Main loop - read and write packets
AVPacket packet;
AVMediaType medaiType;
while (isRunning) {
ret = av_read_frame(inputFormatContext, &packet);
if (ret < 0) {
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
std::cerr << "End of stream or timeout, reconnecting in "
<< reconnectDelayMs << "ms" << std::endl;
std::this_thread::sleep_for(std::chrono::milliseconds(reconnectDelayMs));
avformat_close_input(&inputFormatContext);
ret = avformat_open_input(&inputFormatContext, inputUrl.c_str(), nullptr, &inputOptions);
if (ret < 0) continue;
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) continue;
continue;
}
break;
}
// Later when writing packets:
int original_stream_index = packet.stream_index;
if (streamMapping[original_stream_index] >= 0) {
packet.stream_index = streamMapping[original_stream_index];
// Write packet...
} else {
// Skip this packet
av_packet_unref(&packet);
continue;
}
// Skip audio packets
medaiType = inputFormatContext->streams[original_stream_index]->codecpar->codec_type;
if (medaiType == AVMEDIA_TYPE_AUDIO || medaiType == AVMEDIA_TYPE_DATA)
{
av_packet_unref(&packet);
continue;
}
#if 0
// Fix timestamps if enabled
if (fixTimestamps) {
// Handle timestamp issues similar to FFmpeg warning
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
if (packet.pts == AV_NOPTS_VALUE) {
// Generate PTS if missing
if (startTime == AV_NOPTS_VALUE) {
startTime = av_gettime();
}
packet.pts = av_rescale_q(av_gettime() - startTime,
AV_TIME_BASE_Q,
inStream->time_base);
packet.dts = packet.pts;
}
// Rescale timestamps to output timebase
packet.pts = av_rescale_q_rnd(packet.pts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration,
inStream->time_base,
outStream->time_base);
}
// Write packet to output
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
std::cerr << "Error writing frame: " << av_err2str(ret) << std::endl;
break;
}
#endif
AVStream *in_stream = inputFormatContext->streams[original_stream_index];
AVStream *out_stream = outputFormatContext->streams[packet.stream_index];
av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base);
// CRITICAL: Fix timestamp issues
if (packet.dts != AV_NOPTS_VALUE && packet.pts != AV_NOPTS_VALUE && packet.dts > packet.pts) {
packet.dts = packet.pts;
}
// Handle missing timestamps
if (packet.pts == AV_NOPTS_VALUE) {
if (startTime == AV_NOPTS_VALUE) {
startTime = av_gettime();
}
packet.pts = av_rescale_q(av_gettime() - startTime,
AV_TIME_BASE_Q,
out_stream->time_base);
packet.dts = packet.pts;
}
packet.pos = -1;
// Apply bitstream filter if it's H.264
if (bsf_ctx && out_stream->codecpar->codec_id == AV_CODEC_ID_H264) {
ret = av_bsf_send_packet(bsf_ctx, &packet);
if (ret < 0) {
std::cerr << "Error sending packet to bitstream filter: " << av_err2str(ret) << std::endl;
break;
}
while (ret >= 0) {
ret = av_bsf_receive_packet(bsf_ctx, &packet);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
// Need more input or end of file
break;
} else if (ret < 0) {
std::cerr << "Error receiving packet from bitstream filter: " << av_err2str(ret) << std::endl;
break;
}
// Write the filtered packet
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing frame: " << errbuf << std::endl;
break;
}
}
} else {
// Write the packet without filtering
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing frame: " << errbuf << std::endl;
break;
}
}
}
cleanup:
// Free the bitstream filter context
if (bsf_ctx) {
av_bsf_free(&bsf_ctx);
}
// Write trailer
av_write_trailer(outputFormatContext);
// Cleanup
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return ret;
}

@ -1,90 +0,0 @@
//
// Created by Matthew on 2025/3/11.
//
#ifndef MICROPHOTO_STREAMING_H
#define MICROPHOTO_STREAMING_H
#include <string>
#include <memory>
#include <functional>
#include <iostream>
#include <thread>
#include <atomic>
#include <android/multinetwork.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libswscale/swscale.h>
}
class Streaming
{
public:
virtual ~Streaming() {}
virtual bool start() { return false; }
virtual bool stop() { return false; }
virtual bool isStreaming() const { return false; }
void setAuth(const std::string& userName, const std::string& password)
{
m_userName = userName;
m_password = password;
}
protected:
std::string m_userName;
std::string m_password;
};
#if 0
class StreamForwarder : public Streaming
{
private:
AVFormatContext* inputCtx = nullptr;
AVFormatContext* outputCtx = nullptr;
bool isRunning = false;
public:
StreamForwarder() = default;
virtual ~StreamForwarder();
bool initialize(const std::string& inputUrl, const std::string& outputUrl);
virtual void start();
virtual void stop();
private:
bool openInput(const std::string& inputUrl);
bool openOutput(const std::string& outputUrl);
void forwardPackets();
void setFrameCallback(std::function<void(uint8_t*, int, int, int)> callback);
};
#endif
class RtspForwarder : public Streaming {
private:
std::string inputUrl;
std::string outputUrl;
std::atomic<bool> isRunning;
// Options
int reconnectDelayMs = 5000;
bool fixTimestamps = true;
public:
RtspForwarder(const std::string& input, const std::string& output);
virtual bool start();
virtual bool stop();
virtual bool isStreaming() const;
int run();
};
#endif //MICROPHOTO_STREAMING_H

@ -1,330 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "HangYuCtrl.h"
#include "netcamera.h"
#include "httpclient.h"
#include <LogThread.h>
#include <SpecData_JSON.h>
#include <cstring>
#include <algorithm>
HangYuCtrl::~HangYuCtrl()
{
}
bool HangYuCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
//流类型范围1-4,1为主流
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return 0;
}
std::string xmlString(resData.begin(), resData.end());
size_t widthStart = xmlString.find("<ResolutionWidth>");
size_t widthEnd = xmlString.find("</ResolutionWidth>");
if (widthStart != std::string::npos && widthEnd != std::string::npos) {
widthStart += std::string("<ResolutionWidth>").length();
xmlString.replace(widthStart, widthEnd - widthStart, std::to_string(resX));
}
size_t heightStart = xmlString.find("<ResolutionHeigth>");
size_t heightEnd = xmlString.find("</ResolutionHeigth>");
if (heightStart != std::string::npos && heightEnd != std::string::npos) {
heightStart += std::string("<ResolutionHeigth>").length();
xmlString.replace(heightStart, heightEnd - heightStart, std::to_string(resY));
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
return 0;
}
return true;
}
bool HangYuCtrl::SetOsd(uint8_t channel, std::string osdstring, uint8_t pos)
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
//左上OSD
bool hasDateTime = (osdstring.find("$$DATETIME$$") != std::string::npos);
size_t posi = osdstring.find("$$DATETIME$$");
if (posi != std::string::npos) {
size_t endPos = posi + 12;
while (endPos < osdstring.size() && (osdstring[endPos] == ' ' || osdstring[endPos] == '\n')) {
endPos++;
}
osdstring.erase(posi, endPos - posi);
}
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
std::replace(osdstring.begin(), osdstring.end(), '\n', '^');
string xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?><MultiLineOSD><DisplayTime><Enable>" + string(hasDateTime ? "true" : "false") + "</Enable><PosX>8</PosX><PosY>0</PosY></DisplayTime><OSD><ID>1</ID><Enable>false</Enable><Text>"+ osdstring+ "</Text><x>8</x><y>" + string(hasDateTime ? "24" : "0") + "</y></MultiLineOSD>";
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
return res;
}
void HangYuCtrl::EnableOsd(bool enable, uint8_t channel)
{
//航煜 只能显示时间和一个OSD
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return;
}
std::string xmlString(resData.begin(), resData.end());
std::string enableStartTag = "<Enable>";
std::string enableEndTag = "</Enable>";
size_t pos = 0;
while ((pos = xmlString.find(enableStartTag, pos)) != std::string::npos) {
size_t startPos = pos + enableStartTag.length();
size_t endPos = xmlString.find(enableEndTag, startPos);
if (endPos == std::string::npos) {
break;
}
std::string newValue = enable ? "true" : "false";
xmlString.replace(startPos, endPos - startPos, newValue);
pos = endPos + enableEndTag.length();
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
// return;
}
}
std::string HangYuCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = 0;
for (int idx = 0; idx < 10; idx++)
{
res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res == 0 && !resData.empty())
{
break;
}
}
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
const char* start = strstr((const char*)&resData[0], "<RTSPURI>");
if (start == NULL)
{
return "";
}
start += 9;
const char* end = strstr(start, "</RTSPURI>");
if (end == NULL)
{
return "";
}
return std::string(start, end);
}
bool HangYuCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
// <?xml version="1.0" encoding="utf-8"?>
//<Time>
//<DateTimeFormat>
//<!--req,string,YYYYMMDDWhhmmss,YYYYMMDDhhmmss,MMDDYYYYWhhmmss,MMD
// DYYYYhhmmss,DDMMYYYYWhhmmss,DDMMYYYYhhmmss-->
//</DateTimeFormat>
//<TimeFormat><!--req,xs:string,12hour,24hour--></TimeFormat>
//<SystemTime><!--req,xs:datetime,” 20040503T173008+08”--></SystemTime>
//<SyncNTPFlag><!--req,xs:string,"Sync,NoSync"--></SyncNTPFlag>
//</Time>
std::string reqData = "<?xml version=\"1.0\" encoding=\"utf-8\"?><Time><SystemTime>"
+ FormatLocalDateTime("%d%02d%02dT%02d%02d%02d") + "+08</SystemTime></Time>";
std::string url = "http://" + m_ip + "/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool HangYuCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
{
bool res = false;
std::vector<uint8_t> data;
// /Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", m_ip.c_str(), (uint32_t)streamID);
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
if (0 == nRet)
{
bool qualityDowngraded = false;
std::string originalConfig;
if (img.size() < 1000)
{
qualityDowngraded = DowngradeQuality(originalConfig);
XYLOG(XYLOG_SEVERITY_INFO,"Reduce Img Quality");
}
nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
if (!originalConfig.empty())
{
UpdateQuality(originalConfig);
}
std::vector<uint8_t> header = {0xFF, 0xD8, 0xFF, 0xE0}; // JPEG
std::vector<uint8_t>::iterator it = std::search(img.begin(), img.end(), header.begin(), header.end());
if (it != img.end() && it != img.begin())
{
img.erase(img.begin(), it);
#ifndef NDEBUG
int aa = 0;
#endif
}
}
return nRet == 0;
}
bool HangYuCtrl::DowngradeQuality(std::string& originalConfig)
{
bool res = false;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet)
{
std::string str = ByteArrayToString(&data[0], data.size());
originalConfig = str;
if (replaceAll(str, "<Quality>middle</Quality>", "<Quality>low</Quality>") == 0)
{
res = (replaceAll(str, "<Quality>high</Quality>", "<Quality>middle</Quality>") != 0);
}
else
{
res = true;
}
if (!res)
{
return res;
}
data.clear();
if (res)
{
nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, str.c_str(), data);
return 0 == nRet;
}
}
return false;
}
bool HangYuCtrl::UpdateQuality(const std::string& originalConfig)
{
std::vector<uint8_t> data;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
int nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, originalConfig.c_str(), data);
return 0 == nRet;
}
bool HangYuCtrl::UpgradeQuality()
{
bool res = false;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet)
{
std::string str = ByteArrayToString(&data[0], data.size());
if (replaceAll(str, "<Quality>low</Quality>", "<Quality>middle</Quality>") == 0)
{
res = (replaceAll(str, "<Quality>middle</Quality>", "<Quality>high</Quality>") != 0);
}
else
{
res = true;
}
if (!res)
{
return res;
}
data.clear();
if (res)
{
nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, str.c_str(), data);
return 0 == nRet;
}
}
return false;
}
bool HangYuCtrl::QueryQuality(std::string& qualityContents)
{
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet && !data.empty())
{
qualityContents = ByteArrayToString(&data[0], data.size());
}
return (0 == nRet);
}
bool HangYuCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path)
{
return false;
}

@ -1,34 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef __MICROPHOTO_HANGYUCTRL_H__
#define __MICROPHOTO_HANGYUCTRL_H__
#include "VendorCtrl.h"
class HangYuCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~HangYuCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t channel);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool HasAuthOnStreaming() const { return true; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
private:
bool QueryQuality(std::string& qualityContents);
bool DowngradeQuality(std::string& originalConfig);
bool UpdateQuality(const std::string& originalConfig);
bool UpgradeQuality();
};
#endif //__MICROPHOTO_HANGYUCTRL_H__

@ -1,204 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "HikonCtrl.h"
#include "netcamera.h"
#include "httpclient.h"
#include <LogThread.h>
#include <SpecData_JSON.h>
#include <cstring>
#include <algorithm>
HikonCtrl::~HikonCtrl()
{
}
bool HikonCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
//流类型范围1-4,1为主流
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return 0;
}
std::string xmlString(resData.begin(), resData.end());
size_t widthStart = xmlString.find("<ResolutionWidth>");
size_t widthEnd = xmlString.find("</ResolutionWidth>");
if (widthStart != std::string::npos && widthEnd != std::string::npos) {
widthStart += std::string("<ResolutionWidth>").length();
xmlString.replace(widthStart, widthEnd - widthStart, std::to_string(resX));
}
size_t heightStart = xmlString.find("<ResolutionHeigth>");
size_t heightEnd = xmlString.find("</ResolutionHeigth>");
if (heightStart != std::string::npos && heightEnd != std::string::npos) {
heightStart += std::string("<ResolutionHeigth>").length();
xmlString.replace(heightStart, heightEnd - heightStart, std::to_string(resY));
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
return 0;
}
return true;
}
bool HikonCtrl::SetOsd(uint8_t channel, std::string osdstring, uint8_t pos)
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
//左上OSD
bool hasDateTime = (osdstring.find("$$DATETIME$$") != std::string::npos);
size_t posi = osdstring.find("$$DATETIME$$");
if (posi != std::string::npos) {
size_t endPos = posi + 12;
while (endPos < osdstring.size() && (osdstring[endPos] == ' ' || osdstring[endPos] == '\n')) {
endPos++;
}
osdstring.erase(posi, endPos - posi);
}
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
std::replace(osdstring.begin(), osdstring.end(), '\n', '^');
string xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?><MultiLineOSD><DisplayTime><Enable>" + string(hasDateTime ? "true" : "false") + "</Enable><PosX>8</PosX><PosY>0</PosY></DisplayTime><OSD><ID>1</ID><Enable>false</Enable><Text>"+ osdstring+ "</Text><x>8</x><y>" + string(hasDateTime ? "24" : "0") + "</y></MultiLineOSD>";
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
return res;
}
void HikonCtrl::EnableOsd(bool enable, uint8_t channel)
{
//航煜 只能显示时间和一个OSD
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return;
}
std::string xmlString(resData.begin(), resData.end());
std::string enableStartTag = "<Enable>";
std::string enableEndTag = "</Enable>";
size_t pos = 0;
while ((pos = xmlString.find(enableStartTag, pos)) != std::string::npos) {
size_t startPos = pos + enableStartTag.length();
size_t endPos = xmlString.find(enableEndTag, startPos);
if (endPos == std::string::npos) {
break;
}
std::string newValue = enable ? "true" : "false";
xmlString.replace(startPos, endPos - startPos, newValue);
pos = endPos + enableEndTag.length();
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
// return;
}
}
std::string HikonCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = 0;
for (int idx = 0; idx < 10; idx++)
{
res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res == 0 && !resData.empty())
{
break;
}
}
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
const char* start = strstr((const char*)&resData[0], "<RTSPURI>");
if (start == NULL)
{
return "";
}
start += 9;
const char* end = strstr(start, "</RTSPURI>");
if (end == NULL)
{
return "";
}
return std::string(start, end);
}
bool HikonCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
// <?xml version="1.0" encoding="utf-8"?>
//<Time>
//<DateTimeFormat>
//<!--req,string,YYYYMMDDWhhmmss,YYYYMMDDhhmmss,MMDDYYYYWhhmmss,MMD
// DYYYYhhmmss,DDMMYYYYWhhmmss,DDMMYYYYhhmmss-->
//</DateTimeFormat>
//<TimeFormat><!--req,xs:string,12hour,24hour--></TimeFormat>
//<SystemTime><!--req,xs:datetime,” 20040503T173008+08”--></SystemTime>
//<SyncNTPFlag><!--req,xs:string,"Sync,NoSync"--></SyncNTPFlag>
//</Time>
std::string reqData = "<?xml version=\"1.0\" encoding=\"utf-8\"?><Time><SystemTime>"
+ FormatLocalDateTime("%d%02d%02dT%02d%02d%02d") + "+08</SystemTime></Time>";
std::string url = "http://" + m_ip + "/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool HikonCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
{
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/ISAPI/Streaming/channels/1/picture?", m_ip.c_str());
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
return nRet == 0;
}
bool HikonCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path)
{
return false;
}

@ -1,34 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef __MICROPHOTO_HIKONCTRL_H__
#define __MICROPHOTO_HIKONCTRL_H__
#include "VendorCtrl.h"
class HikonCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~HikonCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t channel);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool HasAuthOnStreaming() const { return true; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
private:
bool QueryQuality(std::string& qualityContents);
bool DowngradeQuality(std::string& originalConfig);
bool UpdateQuality(const std::string& originalConfig);
bool UpgradeQuality();
};
#endif //__MICROPHOTO_HIKONCTRL_H__

@ -2,12 +2,10 @@
// Created by Matthew on 2025/3/4.
//
#include "VendorCtrl.h"
#include <curl/curl.h>
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime/* = true*/) :
m_ip(ip), m_userName(userName), m_password(password), m_channel(channel), m_netHandle(netHandle)
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password) :
m_ip(ip), m_userName(userName), m_password(password), m_channel(channel)
{
}
std::string VendorCtrl::CvtJSONToString(const Json::Value& data)
{
@ -20,8 +18,3 @@ std::string VendorCtrl::CvtJSONToString(const Json::Value& data)
#endif
return Json::writeString(builder, data);
}
bool VendorCtrl::IsTimeout() const
{
return m_lastErrorCode == CURLE_OPERATION_TIMEDOUT;
}

@ -7,31 +7,17 @@
#include <string>
#include <json/json.h>
#include <android/multinetwork.h>
#define LEFT_TOP 0
#define RIGHT_TOP 1
#define LEFT_BOTTOM 2
#define RIGHT_BOTTOM 3
class VendorCtrl {
public:
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime = true);
virtual ~VendorCtrl() {}
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel);
virtual ~VendorCtrl() = 0;
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos) = 0;
virtual void EnableOsd(bool enable, uint8_t channel) = 0;
virtual bool SetOsd() = 0;
virtual void EnableOsd(bool enable) = 0;
virtual std::string GetStreamingUrl(uint8_t channel) = 0;
virtual bool UpdateTime(time_t ts) = 0;
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img) = 0;
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path) = 0;
virtual bool HasAuthOnStreaming() const { return false; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY) = 0;
void UpdateNetHandle(net_handle_t netHandle) { m_netHandle = netHandle; }
int GetLastError() const { return m_lastErrorCode; }
bool IsTimeout() const;
virtual bool TakePhoto(std::vector<uint8_t>& img) = 0;
protected:
@ -42,8 +28,6 @@ protected:
std::string m_userName;
std::string m_password;
uint8_t m_channel;
net_handle_t m_netHandle;
int m_lastErrorCode;
};

@ -4,234 +4,44 @@
#include "YuShiCtrl.h"
#include "httpclient.h"
#include "netcamera.h"
#include <json/json.h>
YuShiCtrl::~YuShiCtrl()
{
}
bool YuShiCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
return false;
}
bool YuShiCtrl::SetOsd(uint8_t channel, std::string osd, uint8_t pos)
bool YuShiCtrl::SetOsd()
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/OSDs/Contents", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
string jsonstring;
switch (pos) {
case LEFT_TOP:
{
OSDJson(0, 1, osd, 0, 0, true, jsonstring);
break;
}
case RIGHT_TOP:
{
OSDJson(1, 1, osd, 9900, 0, false, jsonstring);
break;
}
case LEFT_BOTTOM:
{
OSDJson(2, 1, osd, 0, 9900, false, jsonstring);
break;
}
case RIGHT_BOTTOM:
{
OSDJson(3, 1, osd, 9900, 9900, false, jsonstring);
break;
}
}
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, jsonstring.c_str(), resData);
return res;
}
void YuShiCtrl::EnableOsd(bool enable, uint8_t channel)
void YuShiCtrl::EnableOsd(bool enable)
{
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/OSDs/Contents", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res =DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
std::string jsonString(resData.begin(), resData.end());
Json::CharReaderBuilder reader;
Json::Value root;
std::string errors;
std::istringstream s(jsonString);
if (!Json::parseFromStream(reader, s, &root, &errors)) {
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to parse JSON:%s", errors.c_str());
return;
}
Json::Value& data = root["Response"]["Data"];
if (data.isNull()) {
XYLOG(XYLOG_SEVERITY_ERROR,"Data not found in JSON");
return;
}
Json::Value& contentList = data["ContentList"];
for (auto& content : contentList) {
content["Enabled"] = enable ? 1 : 0;
}
Json::StreamWriterBuilder writer;
std::string putJsonString = Json::writeString(writer, data);
DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, putJsonString.c_str(), resData);
return false;
}
std::string YuShiCtrl::GetStreamingUrl(uint8_t channel)
std::string GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/0/LiveStreamURL", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
Json::CharReaderBuilder builder;
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
Json::Value json;
const char* doc = (const char*)&(resData[0]);
if (reader->parse(doc, doc + resData.size() - 1, &json, NULL))
{
if (json.isMember("Response"))
{
Json::Value& jsonRes = json["Response"];
if (jsonRes.isMember("Data"))
{
Json::Value& jsonData = jsonRes["Data"];
if (jsonData.isMember("URL"))
{
return std::string(jsonData["URL"].asCString());
}
}
}
}
return "";
}
bool YuShiCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
/LAPI/V1.0/System/Time
#if 0
Json::Value jsonData(Json::objectValue);
jsonData["TimeZone"] = "GMT+08:00";
jsonData["DeviceTime"] = (int64_t)ts;
jsonData["DeviceTime"] = ts;
jsonData["DateFormat"] = 0; // YYYY-MM-DD
jsonData["HourFormat"] = 1; // 24H
#endif
std::string contents = "{\"TimeZone\":\"GMT+08:00\",\"DateFormat\":0,\"HourFormat\":1,\"DeviceTime\":" + std::to_string(ts) + "}";
std::string url = "http://" + m_ip + "/LAPI/V1.0/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, contents.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool YuShiCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
bool YuShiCtrl::TakePhoto(std::vector<uint8_t>& img)
{
// Yu Shi
char url[128] = { 0 };
int streamSid = 0; // should put into config
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/%d/Snapshot", m_ip.c_str(), (uint32_t)streamID, streamSid);
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
return nRet == 0;
}
bool YuShiCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path) {
return false;
}
void YuShiCtrl::OSDJson(int id, bool enabled, std::string osdString, int x, int y, bool timeOn, std::string& jsonString)
{
Json::Value root;
root["Num"] = 1;
Json::Value contentList(Json::arrayValue);
Json::Value content;
content["ID"] = id;
content["Enabled"] = enabled;
int row = 1;
for (char ch : osdString) {
if (ch == '\n') {
row++;
}
}
content["Num"] = row;
Json::Value contentInfo(Json::arrayValue);
size_t start = 0;
size_t end = osdString.find('\n');
if(timeOn)
{
//如果在此位置显示时间
Json::Value info;
info["ContentType"] = 2;
info["Value"] = "";
contentInfo.append(info);
}
for (int i = 0; i < row; i++)
{
std::string line;
if (end == std::string::npos) {
line = osdString.substr(start);
} else {
line = osdString.substr(start, end - start);
start = end + 1;
end = osdString.find('\n', start);
}
Json::Value info;
info["ContentType"] = 1;
info["Value"] = line;
contentInfo.append(info);
}
content["ContentInfo"] = contentInfo;
Json::Value area;
Json::Value topLeft;
topLeft["X"] = x; //9900
topLeft["Y"] = y;
area["TopLeft"] = topLeft;
content["Area"] = area;
contentList.append(content);
root["ContentList"] = contentList;
Json::StreamWriterBuilder writer;
jsonString = Json::writeString(writer, root);
}

@ -13,16 +13,13 @@ public:
using VendorCtrl::VendorCtrl;
virtual ~YuShiCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t streamID);
virtual bool SetOsd();
virtual void EnableOsd(bool enable);
virtual std::string GetStreamingUrl(uint8_t channel);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
virtual bool TakePhoto(std::vector<uint8_t>& img);
private:
void OSDJson(int id, bool enabled, std::string osdString, int x, int y, bool timeOn, std::string& jsonString);
};

@ -10,7 +10,6 @@ static size_t OnWriteData(void* buffer, size_t size, size_t nmemb, void* lpVoid)
std::vector<uint8_t>* data = (std::vector<uint8_t>*)lpVoid;
if( NULL == data || NULL == buffer )
{
XYLOG(XYLOG_SEVERITY_ERROR,"OnWriteData callback -1");
return -1;
}
uint8_t* begin = (uint8_t *)buffer;
@ -28,12 +27,11 @@ static int SockOptCallback(void *clientp, curl_socket_t curlfd, curlsocktype pur
{
int errcode = errno;
printf("android_setsocknetwork errno=%d", errcode);
XYLOG(XYLOG_SEVERITY_ERROR,"setsocknetwork -1, errcode=%d",errcode);
}
return res == 0 ? CURL_SOCKOPT_OK : CURL_SOCKOPT_ERROR;
}
int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data, int* curlResVal/* = NULL*/)
int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data)
{
CURLcode nRet;
std::string auth;
@ -63,10 +61,8 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char
if (netHandle != NETWORK_UNSPECIFIED)
{
#if 0
curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback);
curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle);
#endif
}
curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1);
@ -87,10 +83,6 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10);
nRet = curl_easy_perform(curl);
if (curlResVal != NULL)
{
*curlResVal = nRet;
}
long responseCode = 0;
if (CURLE_OK == nRet)
@ -125,7 +117,7 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char
return ((0 == nRet) && (responseCode == 200)) ? 0 : 1;
}
int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector<uint8_t>& data, int* curlResVal/* = NULL*/)
int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, char* data)
{
std::string auth;
@ -154,10 +146,8 @@ int DoPutRequest(const char* url, int authType, const char* userName, const char
if (netHandle != NETWORK_UNSPECIFIED)
{
#if 0
curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback);
curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle);
#endif
}
if(contents != NULL)
@ -174,10 +164,6 @@ int DoPutRequest(const char* url, int authType, const char* userName, const char
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10);
CURLcode nRet = curl_easy_perform(curl);
if (curlResVal != NULL)
{
*curlResVal = nRet;
}
if (CURLE_OK != nRet)
{
printf("GET err=%d", nRet);
@ -251,11 +237,10 @@ int UniviewResolutionSet(const NET_PHOTO_INFO& photoInfo, int channel, unsigned
Json::StreamWriterBuilder writer;
std::string sendbuf = Json::writeString(writer, outdata);
std::vector<uint8_t> respContent;
char respContent[1024];
DoPutRequest(path.c_str(), photoInfo.authType, photoInfo.userName, photoInfo.password, photoInfo.netHandle, sendbuf.c_str(), respContent);
// respContent.push_back(0);
// XYLOG(XYLOG_SEVERITY_DEBUG, "Sendlen= %zu, respContent=%s", sendbuf.size(), (const char*)&respContent[0]);
XYLOG(XYLOG_SEVERITY_DEBUG, "Sendlen= %zu, respContent=%s", sendbuf.size(), respContent);
return 0;
}

@ -18,7 +18,7 @@
bool setIPAddress(const char *if_name, const char *ip_addr, const char *net_mask, const char *gateway_addr);
int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data, int* curlResVal = NULL);
int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector<uint8_t>& data, int* curlResVal = NULL);
int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data);
int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, char* data);
#endif // __HTTP_CLIENT__

@ -112,6 +112,37 @@ int set_port_attr (int fd, int baudrate, int databit, const char *stopbit, char
return (tcsetattr (fd, TCSANOW, &opt));
}
static void setInt(int cmd, int value)
{
int fd = open("/dev/mtkgpioctrl", O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value = value;
// LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value);
if( fd > 0 )
{
int res = ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
close(fd);
}
return;
}
static void setRS485Enable(bool z) {
setInt(CMD_SET_485_EN_STATE, z ? 1 : 0);
}
static void set485WriteMode() {
setInt(CMD_SET_485_STATE, 1);
}
static void set485ReadMode() {
setInt(CMD_SET_485_STATE, 0);
}
static void set12VEnable(bool z) {
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
}
/*********************************************************************************
* *
**********************************************************************************/

@ -8,6 +8,10 @@
#include <string>
#include "GPIOControl.h"
#define MAX_STRING_LEN 32
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, "serial_port_comm", fmt, ##args)
// 串口参数
@ -30,6 +34,14 @@ typedef struct
unsigned char m_au8RecvBuf[128];/* */
} SIO_PARAM_SERIAL_DEF;
typedef struct
{
int cmd;
int value;
int result;
long value2;
char str[MAX_STRING_LEN];
}IOT_PARAM;
void PortDataProcess( void );
int serial_port_comm();

@ -0,0 +1,222 @@
package com.xypower.mpapp;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.Rect;
import android.os.IBinder;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class FloatingWindow extends Service {
private Context mContext;
private WindowManager mWindowManager;
private View mView;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
mContext = this;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
allAboutLayout(intent);
moveView();
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onDestroy() {
try {
if (mView != null) {
mWindowManager.removeView(mView);
}
} catch (Exception ex) {
// ex.printStackTrace();
Log.e("FW", "Exception " + ex.getMessage());
}
super.onDestroy();
}
WindowManager.LayoutParams mWindowsParams;
private void moveView() {
/*
DisplayMetrics metrics = mContext.getResources().getDisplayMetrics();
int width = (int) (metrics.widthPixels * 1f);
int height = (int) (metrics.heightPixels * 1f);
mWindowsParams = new WindowManager.LayoutParams(
width,//WindowManager.LayoutParams.WRAP_CONTENT,
height,//WindowManager.LayoutParams.WRAP_CONTENT,
//WindowManager.LayoutParams.TYPE_SYSTEM_ALERT,
(Build.VERSION.SDK_INT <= 25) ? WindowManager.LayoutParams.TYPE_PHONE : WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY
,
//WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL,
WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL
| WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN // Not displaying keyboard on bg activity's EditText
| WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
| WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD
| WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
| WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON,
//WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, //Not work with EditText on keyboard
PixelFormat.TRANSLUCENT);
mWindowsParams.gravity = Gravity.TOP | Gravity.LEFT;
//params.x = 0;
mWindowsParams.y = 100;
mWindowManager.addView(mView, mWindowsParams);
mView.setOnTouchListener(new View.OnTouchListener() {
private int initialX;
private int initialY;
private float initialTouchX;
private float initialTouchY;
long startTime = System.currentTimeMillis();
@Override
public boolean onTouch(View v, MotionEvent event) {
if (System.currentTimeMillis() - startTime <= 300) {
return false;
}
if (isViewInBounds(mView, (int) (event.getRawX()), (int) (event.getRawY()))) {
editTextReceiveFocus();
} else {
editTextDontReceiveFocus();
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
initialX = mWindowsParams.x;
initialY = mWindowsParams.y;
initialTouchX = event.getRawX();
initialTouchY = event.getRawY();
break;
case MotionEvent.ACTION_UP:
break;
case MotionEvent.ACTION_MOVE:
mWindowsParams.x = initialX + (int) (event.getRawX() - initialTouchX);
mWindowsParams.y = initialY + (int) (event.getRawY() - initialTouchY);
mWindowManager.updateViewLayout(mView, mWindowsParams);
break;
}
return false;
}
});
*/
}
private boolean isViewInBounds(View view, int x, int y) {
Rect outRect = new Rect();
int[] location = new int[2];
view.getDrawingRect(outRect);
view.getLocationOnScreen(location);
outRect.offset(location[0], location[1]);
return outRect.contains(x, y);
}
private void editTextReceiveFocus() {
if (!wasInFocus) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = true;
}
}
private void editTextDontReceiveFocus() {
if (wasInFocus) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = false;
hideKeyboard(mContext, edt1);
}
}
private boolean wasInFocus = true;
private EditText edt1;
private void allAboutLayout(Intent intent) {
LayoutInflater layoutInflater = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
mView = layoutInflater.inflate(R.layout.ovelay_window, null);
edt1 = (EditText) mView.findViewById(R.id.edt1);
final TextView tvValue = (TextView) mView.findViewById(R.id.tvValue);
Button btnClose = (Button) mView.findViewById(R.id.btnClose);
edt1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
// mWindowsParams.softInputMode = WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = true;
showSoftKeyboard(v);
}
});
edt1.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {
tvValue.setText(edt1.getText());
}
@Override
public void afterTextChanged(Editable editable) {
}
});
btnClose.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
stopSelf();
}
});
}
private void hideKeyboard(Context context, View view) {
if (view != null) {
InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
public void showSoftKeyboard(View view) {
if (view.requestFocus()) {
InputMethodManager imm = (InputMethodManager)
getSystemService(Context.INPUT_METHOD_SERVICE);
imm.showSoftInput(view, InputMethodManager.SHOW_IMPLICIT);
}
}
}

@ -1,19 +0,0 @@
package com.xypower.mpapp;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
public class HeartBeatResponseReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if ("com.systemui.ACTION_HEARTBEAT_RESPONSE".equals(action)) {
long timestamp = intent.getLongExtra("timestamp", 0);
Log.d("MpApp","系统广播监听 timestamp:"+timestamp);
MicroPhotoService.infoLog("收到heartbeat广播 timestamp:" + timestamp);
}
}
}

@ -1,7 +1,6 @@
package com.xypower.mpapp;
import android.Manifest;
import android.app.Activity;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Context;
@ -10,11 +9,9 @@ import android.content.Intent;
import android.content.pm.PackageManager;
import android.location.Location;
import android.location.LocationListener;
import android.net.Uri;
import android.os.Build;
import android.os.Handler;
import android.os.Messenger;
import android.os.PowerManager;
import android.os.StrictMode;
import androidx.appcompat.app.ActionBar;
@ -24,7 +21,6 @@ import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.os.SystemClock;
import android.provider.Settings;
import android.telephony.SubscriptionManager;
import android.text.TextUtils;
import android.util.Log;
@ -35,7 +31,6 @@ import android.widget.Toast;
import com.dev.devapi.api.SysApi;
import com.xypower.common.CameraUtils;
import com.xypower.common.FilesUtils;
import com.xypower.common.MicroPhotoContext;
import com.xypower.mpapp.databinding.ActivityMainBinding;
import com.xypower.mpapp.utils.LocationUtil;
@ -55,20 +50,10 @@ public class MainActivity extends AppCompatActivity {
private Messenger mMessenger = null;
private long mConfigModificationTime = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(TAG, "Start inflate");
binding = ActivityMainBinding.inflate(getLayoutInflater());
Log.d(TAG, "Finish inflate");
setContentView(binding.getRoot());
// getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
try {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) {
int activeSubId = SubscriptionManager.getActiveDataSubscriptionId();
if (activeSubId == -1) {
@ -76,23 +61,19 @@ public class MainActivity extends AppCompatActivity {
}
}
Log.d(TAG, "Start inflate");
binding = ActivityMainBinding.inflate(getLayoutInflater());
Log.d(TAG, "Finish inflate");
setContentView(binding.getRoot());
// getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
ActionBar actionBar = getSupportActionBar();
Date date = new Date(BuildConfig.BUILD_TIMESTAMP);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
String caption = "MP";
switch (MicroPhotoService.getCustomAppId()) {
case 1:
caption = "RP";
break;
case 2:
caption = "N938";
break;
default:
break;
}
caption += " v" + MicroPhotoContext.getVersionName(getApplicationContext()) + " " + sdf.format(date);
sdf = new SimpleDateFormat("MM-dd HH:mm:ss");
String caption = actionBar.getTitle().toString() + " v" + MicroPhotoContext.getVersionName(getApplicationContext()) + " " + sdf.format(date);
sdf = new SimpleDateFormat("MM-dd HH:mm");
caption += " / " + sdf.format(new Date());
actionBar.setTitle(caption);
@ -114,7 +95,7 @@ public class MainActivity extends AppCompatActivity {
if (!MicroPhotoContext.hasMpAppConfig(appContext)) {
String mstPath = MicroPhotoContext.buildMpResAppDir(appContext);
String mstPath = MicroPhotoContext.buildMasterAppDir(appContext);
File mstPathFile = new File(mstPath);
File mpdataFile = new File(mstPathFile, "mpdata");
@ -122,7 +103,7 @@ public class MainActivity extends AppCompatActivity {
File dataFile = new File(appPathFile, "data");
if (dataFile.exists()) {
try {
FilesUtils.delete(dataFile);
dataFile.delete();
} catch (Exception ex) {
ex.printStackTrace();
}
@ -134,18 +115,6 @@ public class MainActivity extends AppCompatActivity {
ex.printStackTrace();
}
}
else {
Intent resIntent = getPackageManager().getLaunchIntentForPackage(MicroPhotoContext.PACKAGE_NAME_MPRES);
if (resIntent != null) {
resIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
resIntent.putExtra("initres", 1);
String sn = MicroPhotoService.getSerialNumber();
if (!TextUtils.isEmpty(sn)) {
resIntent.putExtra("sn", sn);
}
startActivity(resIntent);
}
}
}
Intent intent = getIntent();
@ -161,7 +130,36 @@ public class MainActivity extends AppCompatActivity {
Log.d(TAG, "MainActivity: reboot=" + rebootFlag + " noDelay=" + noDelay);
MicroPhotoContext.AppConfig appConfig = loadConfigInfo();
final MicroPhotoContext.AppConfig appConfig = MicroPhotoContext.getMpAppConfig(appContext);
if (TextUtils.isEmpty(appConfig.cmdid)) {
appConfig.cmdid = MicroPhotoService.getSerialNumber();
binding.cmdid.setText(appConfig.cmdid);
} else {
binding.cmdid.setText(appConfig.cmdid);
}
binding.server.setText(appConfig.server);
binding.port.setText(appConfig.port != 0 ? Integer.toString(appConfig.port) : "");
String protocolStr = appConfig.protocol + "-";
for (int idx = 0; idx < binding.protocol.getCount(); idx++) {
String item = binding.protocol.getItemAtPosition(idx).toString();
if (item.startsWith(protocolStr)) {
binding.protocol.setSelection(idx);
break;
}
}
if (appConfig.networkProtocol < binding.networkProtocol.getCount()) {
binding.networkProtocol.setSelection(appConfig.networkProtocol);
}
if (appConfig.encryption < binding.encryptions.getCount()) {
binding.encryptions.setSelection(appConfig.encryption);
}
binding.heartbeat.setText((appConfig.heartbeat > 0) ? Integer.toString(appConfig.heartbeat) : "");
binding.packetSize.setText((appConfig.packetSize > 0) ? Integer.toString(appConfig.packetSize) : "");
if (appConfig.network < binding.network.getCount()) {
binding.network.setSelection(appConfig.network);
}
binding.btnStartServ.setEnabled(!MicroPhotoService.isRunning);
binding.btnStopServ.setEnabled(MicroPhotoService.isRunning);
@ -176,13 +174,12 @@ public class MainActivity extends AppCompatActivity {
}
if (MicroPhotoContext.hasMpAppConfig(appContext)) {
final Runnable runnable = new Runnable() {
Runnable runnable = new Runnable() {
@Override
public void run() {
if (!MicroPhotoService.isRunning && !TextUtils.isEmpty(appConfig.cmdid) && !TextUtils.isEmpty(appConfig.server) && appConfig.port != 0) {
if (binding.btnStartServ.isEnabled()) {
Log.i(TAG, "Perform AutoStart");
binding.btnStartServ.performClick();
}
}
@ -192,14 +189,10 @@ public class MainActivity extends AppCompatActivity {
long timeout = 500;
if (SystemClock.elapsedRealtime() < 180000) {
// In 3 minutes
timeout = 10000; // in 10 seconds
timeout = 30000; // in 30 seconds
}
Handler handler = new Handler();
handler.postDelayed(runnable, timeout);
Log.i(TAG, "Set AutoStart after " + Long.toString(timeout) + "ms");
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
@ -208,63 +201,6 @@ public class MainActivity extends AppCompatActivity {
super.onDestroy();
}
@Override
protected void onResume() {
super.onResume();
try {
File file = MicroPhotoContext.getMpAppConfigFile(getApplicationContext());
if (file.lastModified() > mConfigModificationTime) {
loadConfigInfo();
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
protected MicroPhotoContext.AppConfig loadConfigInfo() {
final MicroPhotoContext.AppConfig appConfig = MicroPhotoContext.getMpAppConfig(getApplicationContext());
mConfigModificationTime = appConfig.modificationTime;
if (TextUtils.isEmpty(appConfig.cmdid)) {
appConfig.cmdid = MicroPhotoService.getSerialNumber();
binding.cmdid.setText(appConfig.cmdid);
} else {
binding.cmdid.setText(appConfig.cmdid);
}
binding.server.setText(appConfig.server);
binding.port.setText(appConfig.port != 0 ? Integer.toString(appConfig.port) : "");
String protocolStr = appConfig.protocol + "-";
for (int idx = 0; idx < binding.protocol.getCount(); idx++) {
String item = binding.protocol.getItemAtPosition(idx).toString();
if (item.startsWith(protocolStr)) {
binding.protocol.setSelection(idx);
break;
}
}
protocolStr = appConfig.networkProtocol + "-";
for (int idx = 0; idx < binding.networkProtocol.getCount(); idx++) {
String item = binding.networkProtocol.getItemAtPosition(idx).toString();
if (item.startsWith(protocolStr)) {
binding.networkProtocol.setSelection(idx);
break;
}
}
if (appConfig.encryption < binding.encryptions.getCount()) {
binding.encryptions.setSelection(appConfig.encryption);
}
binding.heartbeat.setText((appConfig.heartbeat > 0) ? Integer.toString(appConfig.heartbeat) : "");
binding.packetSize.setText((appConfig.packetSize > 0) ? Integer.toString(appConfig.packetSize) : "");
if (appConfig.network < binding.network.getCount()) {
binding.network.setSelection(appConfig.network);
}
return appConfig;
}
protected void initListener() {
this.binding.btnStartServ.setOnClickListener(new View.OnClickListener() {
@ -296,7 +232,6 @@ public class MainActivity extends AppCompatActivity {
startMicroPhotoService(appContext, curAppConfig, mMessenger);
Log.i(TAG, "Service auto-started");
binding.btnStartServ.setEnabled(false);
binding.btnStopServ.setEnabled(true);
}
@ -435,35 +370,28 @@ public class MainActivity extends AppCompatActivity {
@Override
public void onClick(View v) {
restartSelfWithStartActivity();
// restartSelfWithAlarmManager();
}
private void restartSelfWithStartActivity() {
final Context context = getApplicationContext();
Context context = MainActivity.this;
Intent intent = new Intent(context, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK| Intent.FLAG_ACTIVITY_CLEAR_TASK);
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
intent.putExtra("reason", "Manual Restart From MainActivity");
// finish();
context.startActivity(intent);
final Handler handler = new Handler();
finish();
handler.postDelayed(new Runnable() {
@Override
public void run() {
System.exit(0);
}
}, 0);
}
private void restartSelfWithAlarmManager() {
Intent intent = new Intent(MainActivity.this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK| Intent.FLAG_ACTIVITY_CLEAR_TASK);
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
@ -698,6 +626,4 @@ public class MainActivity extends AppCompatActivity {
}
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,76 @@
package com.xypower.mpapp;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Build;
import android.util.Log;
import android.widget.Toast;
public class ScreenActionReceiver extends BroadcastReceiver {
private String TAG = "ScreenActionReceiver";
@Override
public void onReceive(Context context, Intent intent) {
//LOG
StringBuilder sb = new StringBuilder();
sb.append("Action: " + intent.getAction() + "\n");
// sb.append("URI: " + intent.toUri(Intent.URI_INTENT_SCHEME).toString() + "\n");
String log = sb.toString();
Log.d(TAG, log);
Toast.makeText(context, log, Toast.LENGTH_SHORT).show();
String action = intent.getAction();
try {
if (Intent.ACTION_SCREEN_ON.equals(action)) {
Log.d(TAG, "screen is on...");
Toast.makeText(context, "screen ON", Toast.LENGTH_SHORT);
//Run the locker
context.startService(new Intent(context, FloatingWindow.class));
} else if (Intent.ACTION_SCREEN_OFF.equals(action)) {
Log.d(TAG, "screen is off...");
Toast.makeText(context, "screen OFF", Toast.LENGTH_SHORT);
} else if (Intent.ACTION_USER_PRESENT.equals(action)) {
Log.d(TAG, "screen is unlock...");
Toast.makeText(context, "screen UNLOCK", Toast.LENGTH_SHORT);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.startForegroundService(new Intent(context, FloatingWindow.class));
} else {
context.startService(new Intent(context, FloatingWindow.class));
}
} else if (Intent.ACTION_BOOT_COMPLETED.equals(action)) {
Log.d(TAG, "boot completed...");
Toast.makeText(context, "BOOTED..", Toast.LENGTH_SHORT);
//Run the locker
/* Intent i = new Intent(context, FloatingWindow.class);
context.startService(i);
*/
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// context.startForegroundService(new Intent(context, FloatingWindow.class));
} else {
// context.startService(new Intent(context, FloatingWindow.class));
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public IntentFilter getFilter(){
final IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_SCREEN_OFF);
filter.addAction(Intent.ACTION_SCREEN_ON);
return filter;
}
}

@ -769,10 +769,7 @@ public class Camera2VideoActivity extends AppCompatActivity {
@Override
public void run() {
Log.i("OSD", "Record Stop " + Long.toString(mDuration));
if (mGPUCameraRecorder != null) {
mGPUCameraRecorder.stop();
}
int aa = 0;
}
@ -812,7 +809,6 @@ public class Camera2VideoActivity extends AppCompatActivity {
.cameraId(Integer.toString(mCameraId))
.mute(true)
.duration(mDuration * 1000)
.rotation(mOrientation)
.build();
Log.i("OSD", "mGPUCameraRecorder created");

@ -3,6 +3,7 @@ package com.xypower.mpapp.video;
import android.Manifest;
import android.app.Activity;
import android.app.Dialog;
import android.content.ComponentName;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
@ -17,6 +18,7 @@ import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.graphics.drawable.BitmapDrawable;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
@ -34,6 +36,8 @@ import androidx.appcompat.app.AlertDialog;
import androidx.core.app.ActivityCompat;
import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.Fragment;
import androidx.legacy.app.FragmentCompat;
import androidx.legacy.app.FragmentCompat;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import android.os.Environment;
@ -50,6 +54,7 @@ import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.Toast;
import com.xypower.mpapp.MicroPhotoService;
import com.xypower.mpapp.R;
@ -71,7 +76,7 @@ import java.util.concurrent.TimeUnit;
* Use the {@link VideoFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class VideoFragment extends Fragment implements View.OnClickListener, MediaRecorder.OnInfoListener {
public class VideoFragment extends Fragment implements View.OnClickListener, MediaRecorder.OnInfoListener, FragmentCompat.OnRequestPermissionsResultCallback {
public static final String ACTION_FINISH = "com.xypower.mvapp.ACT_FINISH";
public static final String ACTION_MP_VIDEO_FINISHED = "com.xypower.mpapp.ACT_V_FINISHED";

@ -205,13 +205,13 @@
app:layout_constraintTop_toTopOf="@+id/btnStartServ" />
<Button
android:id="@+id/btnSendHb"
android:id="@+id/btnLogs"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_hb"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
android:text="日志"
app:layout_constraintStart_toEndOf="@+id/btnChannels"
app:layout_constraintTop_toTopOf="@+id/btnStartServ" />
@ -260,17 +260,6 @@
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto3"
app:layout_constraintTop_toTopOf="@+id/btnTakePhoto" />
<Button
android:id="@+id/btnSendWs"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_ws"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto4"
app:layout_constraintTop_toTopOf="@+id/btnTakePhoto" />
<Button
android:id="@+id/takeVideoBtn"
android:layout_width="wrap_content"
@ -317,25 +306,14 @@
app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" />
<Button
android:id="@+id/btnSendBi"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_bi"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/takeVideoBtn4"
app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" />
<Button
android:id="@+id/btnLogs"
android:id="@+id/btnSendHb"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_hb"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin_small"
android:layout_marginTop="@dimen/activity_vertical_spacing_small"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
android:text="日志"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/takeVideoBtn" />
@ -347,8 +325,8 @@
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnLogs"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
app:layout_constraintStart_toEndOf="@+id/btnSendHb"
app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button
android:id="@+id/btnReboot"
@ -359,7 +337,7 @@
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnRestartApp"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button
android:id="@+id/btnCameraInfo"
@ -370,18 +348,7 @@
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnReboot"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
<Button
android:id="@+id/btnSendFault"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_fault"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnCameraInfo"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button
android:id="@+id/btnDowseCamera"

@ -1,8 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="networkProtocols">
<item>0-TCP</item>
<item>1-UDP</item>
<item>10-MQTT</item>
<item>TCP</item>
<item>UDP</item>
</string-array>
</resources>

@ -6,10 +6,8 @@
<item>65282-江苏</item>
<item>65283-湖南</item>
<item>65284-浙江</item>
<item>65285-河南统一</item>
<item>65285-河南</item>
<item>65286-郑州</item>
<item>65290-河南全景</item>
<item>65298-宁夏</item>
<item>65310-山西智洋</item>
</string-array>
</resources>

@ -7,9 +7,6 @@
<string name="main_packet_size_default">默认2K</string>
<string name="main_server">支持域名自动转IP</string>
<string name="main_send_hb">心跳</string>
<string name="main_send_ws">工作状态</string>
<string name="main_send_bi">基本信息</string>
<string name="main_send_fault">故障</string>
<string name="main_restart_app">重启APP</string>
<string name="main_reboot">重启设备</string>
<string name="main_camera_info">摄像头</string>

@ -30,10 +30,9 @@ android {
dependencies {
implementation 'androidx.core:core:1.6.0'
// implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.4.0'
// implementation 'com.linkedin.dexmaker:dexmaker:2.28.3'
implementation 'com.linkedin.dexmaker:dexmaker:2.28.3'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'

@ -6,6 +6,7 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.text.TextUtils;
import android.util.Log;
import android.util.Size;
@ -44,16 +45,6 @@ public class CameraUtils {
Integer orientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
builder.append(orientation == null ? "" : orientation.toString());
int[] capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean hasRaw = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) {
hasRaw = true;
break;
}
}
builder.append(" raw=" + (hasRaw ? "1" : "0"));
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] sizes = map.getOutputSizes(ImageFormat.YUV_420_888);

@ -33,23 +33,19 @@ public class FileDownloader {
URL url = new URL(urlString);
connection = (HttpURLConnection) url.openConnection();
connection.setRequestProperty("Accept-Encoding", "gzip");
connection.setConnectTimeout(10000);
connection.setReadTimeout(30000);
connection.setConnectTimeout(5000);
connection.setReadTimeout(120000);
connection.setDoInput(true);
connection.connect();
final File temp = new File(filePath);
if (temp.exists()) {
long fileSize = temp.length();
connection.setRequestProperty("Range", "bytes=" + Long.toString(fileSize) + "-");
}
// if (temp.exists())
// temp.delete();
// temp.createNewFile();
if (temp.exists())
temp.delete();
temp.createNewFile();
temp.setReadable(true, false);
temp.setWritable(true, false);
downloadFile = temp;
Log.d("download", "url " + urlString + "\n save to " + temp);
os = new FileOutputStream(temp, true);
os = new FileOutputStream(temp);
String encoding = connection.getContentEncoding();
is = connection.getInputStream();

@ -6,7 +6,6 @@ import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@ -93,22 +92,8 @@ public class FileUploader {
}
request.writeBytes(this.CRLF);
FileInputStream fis = null;
try {
fis = new FileInputStream(uploadFile);
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
int length = -1;
while ((length = fis.read(buffer)) != -1) {
request.write(buffer, 0, length);
}
} catch (Exception ex) {
ex.printStackTrace();
} finally {
FilesUtils.closeFriendly(fis);
}
// byte[] bytes = Files.readAllBytes(uploadFile.toPath());
// request.write(bytes);
byte[] bytes = Files.readAllBytes(uploadFile.toPath());
request.write(bytes);
}
/**

@ -1,14 +1,10 @@
package com.xypower.common;
import android.content.Context;
import android.content.res.AssetManager;
import android.text.TextUtils;
import android.util.Log;
import org.w3c.dom.Text;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
@ -17,7 +13,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.nio.channels.Channels;
import java.nio.channels.SeekableByteChannel;
@ -256,217 +251,4 @@ public class FilesUtils {
public static byte[] readAllBytes(String file) {
return readAllBytes(new File(file));
}
public static boolean delete(File file) {
if (!file.exists()) {
return false;
} else {
if (file.isFile())
return deleteSingleFile(file);
else
return deleteDirectory(file.getAbsolutePath());
}
}
private static boolean deleteSingleFile(File file) {
// 如果文件路径所对应的文件存在,并且是一个文件,则直接删除
if (file.exists() && file.isFile()) {
if (file.delete()) {
// Log.e("--Method--", "Copy_Delete.deleteSingleFile: 删除单个文件" + filePath$Name + "成功!");
return true;
} else {
return false;
}
} else {
return false;
}
}
/**
* @param filePath
* @return truefalse
*/
private static boolean deleteDirectory(String filePath) {
// 如果dir不以文件分隔符结尾自动添加文件分隔符
if (!filePath.endsWith(File.separator))
filePath = filePath + File.separator;
File dirFile = new File(filePath);
// 如果dir对应的文件不存在或者不是一个目录则退出
if ((!dirFile.exists()) || (!dirFile.isDirectory())) {
return false;
}
boolean flag = true;
// 删除文件夹中的所有文件包括子目录
File[] files = dirFile.listFiles();
for (File file : files) {
// 删除子文件
if (file.isFile()) {
flag = deleteSingleFile(file);
if (!flag)
break;
}
// 删除子目录
else if (file.isDirectory()) {
flag = deleteDirectory(file
.getAbsolutePath());
if (!flag)
break;
}
}
if (!flag) {
return false;
}
// 删除当前目录
if (dirFile.delete()) {
// Log.e("--Method--", "Copy_Delete.deleteDirectory: 删除目录" + filePath + "成功!");
return true;
} else {
return false;
}
}
public static void copyAssetsDir(Context context, String directory, String destPath) {
try {
AssetManager assetManager = context.getAssets();
String[] fileList = assetManager.list(directory);
if (fileList != null && fileList.length > 0) {
File file = new File(destPath);
if (!file.exists()) {
file.mkdirs();
}
if (!directory.endsWith(File.separator)) {
directory += File.separator;
}
if (!destPath.endsWith(File.separator)) {
destPath += File.separator;
}
for (String fileName : fileList) {
copyAssetsDir(context, directory + fileName, destPath + fileName);
}
} else {
// Try to file
copyAssetsFile(context, directory, destPath);
}
} catch (Exception e) {
e.printStackTrace();
}
// else {//如果是文件
// InputStream inputStream=context.getAssets().open(filePath);
// File file=new File(context.getFilesDir().getAbsolutePath()+ File.separator+filePath);
// Log.i("copyAssets2Phone","file:"+file);
// if(!file.exists() || file.length()==0) {
// FileOutputStream fos=new FileOutputStream(file);
// int len=-1;
// byte[] buffer=new byte[1024];
// while ((len=inputStream.read(buffer))!=-1){
// fos.write(buffer,0,len);
// }
// fos.flush();
// inputStream.close();
// fos.close();
// showToast(context,"模型文件复制完毕");
// } else {
// showToast(context,"模型文件已存在,无需复制");
// }
// }
}
public static void copyAssetsFile(Context context, String fileName, String destPath) {
InputStream inputStream = null;
FileOutputStream fos = null;
try {
inputStream = context.getAssets().open(fileName);
//getFilesDir() 获得当前APP的安装路径 /data/data/包名/files 目录
File file = new File(destPath);
if (file.exists()) {
file.delete();
}
File parentDir = file.getParentFile();
if (parentDir != null && !parentDir.exists()) {
parentDir.mkdirs();
}
if (parentDir != null && !parentDir.canWrite()) {
Log.e("FilesUtils", "No write permission to directory: " + parentDir.getAbsolutePath());
return;
}
fos = new FileOutputStream(file);
int len = -1;
byte[] buffer = new byte[1024];
while ((len = inputStream.read(buffer)) != -1) {
try {
fos.write(buffer, 0, len);
} catch (Exception ex) {
ex.printStackTrace();
}
}
fos.flush();
} catch (Exception e) {
e.printStackTrace();
} finally {
FilesUtils.closeFriendly(inputStream);
FilesUtils.closeFriendly(fos);
}
}
/**
* AssetsCRLFLF
*
* @param context
* @param fileName Assets
* @param destPath
*/
public static void copyAndNormalizeTextAssetsFile(Context context, String fileName, String destPath) {
InputStream inputStream = null;
BufferedReader reader = null;
BufferedWriter writer = null;
try {
inputStream = context.getAssets().open(fileName);
reader = new BufferedReader(new InputStreamReader(inputStream));
// 创建目标文件
File file = new File(destPath);
if (file.exists()) {
file.delete();
}
File parentDir = file.getParentFile();
if (parentDir != null && !parentDir.exists()) {
parentDir.mkdirs();
}
if (parentDir != null && !parentDir.canWrite()) {
Log.e("FilesUtils", "No write permission to directory: " + parentDir.getAbsolutePath());
return;
}
// 使用BufferedWriter写入文件同时处理行尾符
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file)));
String line;
// 逐行读取并写入由BufferedWriter自动处理行尾
while ((line = reader.readLine()) != null) {
writer.write(line);
writer.newLine(); // 使用平台默认的换行符在Android上是LF
}
writer.flush();
Log.d("FilesUtils", "File normalized and copied successfully: " + destPath);
} catch (Exception e) {
Log.e("FilesUtils", "Error normalizing file: " + e.getMessage(), e);
} finally {
closeFriendly(reader);
closeFriendly(writer);
closeFriendly(inputStream);
}
}
}

@ -0,0 +1,248 @@
package com.xypower.common;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.os.Handler;
import androidx.annotation.RequiresApi;
import android.util.Log;
import androidx.annotation.RequiresApi;
import com.android.dx.stock.ProxyBuilder;
import java.io.File;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
public class HotspotManager {
@RequiresApi(api = Build.VERSION_CODES.O)
public static class OreoWifiManager {
private static final String TAG = OreoWifiManager.class.getSimpleName();
private Context mContext;
private WifiManager mWifiManager;
private ConnectivityManager mConnectivityManager;
public OreoWifiManager(Context c) {
mContext = c;
mWifiManager = (WifiManager) mContext.getSystemService(Context.WIFI_SERVICE);
mConnectivityManager = (ConnectivityManager) mContext.getSystemService(ConnectivityManager.class);
}
/**
* This sets the Wifi SSID and password
* Call this before {@code startTethering} if app is a system/privileged app
* Requires: android.permission.TETHER_PRIVILEGED which is only granted to system apps
*/
public void configureHotspot(String name, String password) {
WifiConfiguration apConfig = new WifiConfiguration();
apConfig.SSID = name;
apConfig.preSharedKey = password;
apConfig.allowedKeyManagement.set(WifiConfiguration.KeyMgmt.WPA_PSK);
try {
Method setConfigMethod = mWifiManager.getClass().getMethod("setWifiApConfiguration", WifiConfiguration.class);
boolean status = (boolean) setConfigMethod.invoke(mWifiManager, apConfig);
Log.d(TAG, "setWifiApConfiguration - success? " + status);
} catch (Exception e) {
Log.e(TAG, "Error in configureHotspot");
e.printStackTrace();
}
}
/**
* Checks where tethering is on.
* This is determined by the getTetheredIfaces() method,
* that will return an empty array if not devices are tethered
*
* @return true if a tethered device is found, false if not found
*/
/*public boolean isTetherActive() {
try {
Method method = mConnectivityManager.getClass().getDeclaredMethod("getTetheredIfaces");
if (method == null) {
Log.e(TAG, "getTetheredIfaces is null");
} else {
String res[] = (String[]) method.invoke(mConnectivityManager, null);
Log.d(TAG, "getTetheredIfaces invoked");
Log.d(TAG, Arrays.toString(res));
if (res.length > 0) {
return true;
}
}
} catch (Exception e) {
Log.e(TAG, "Error in getTetheredIfaces");
e.printStackTrace();
}
return false;
}
*/
/**
* This enables tethering using the ssid/password defined in Settings App>Hotspot & tethering
* Does not require app to have system/privileged access
* Credit: Vishal Sharma - https://stackoverflow.com/a/52219887
*/
public boolean startTethering(final OnStartTetheringCallback callback) {
// On Pie if we try to start tethering while it is already on, it will
// be disabled. This is needed when startTethering() is called programmatically.
/*if (isTetherActive()) {
Log.d(TAG, "Tether already active, returning");
return false;
}*/
File outputDir = mContext.getCodeCacheDir();
Object proxy;
try {
proxy = ProxyBuilder.forClass(OnStartTetheringCallbackClass())
.dexCache(outputDir).handler(new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
switch (method.getName()) {
case "onTetheringStarted":
callback.onTetheringStarted();
break;
case "onTetheringFailed":
callback.onTetheringFailed();
break;
default:
ProxyBuilder.callSuper(proxy, method, args);
}
return null;
}
}).build();
} catch (Exception e) {
Log.e(TAG, "Error in enableTethering ProxyBuilder");
e.printStackTrace();
return false;
}
Method method = null;
try {
method = mConnectivityManager.getClass().getDeclaredMethod("startTethering", int.class, boolean.class, OnStartTetheringCallbackClass(), Handler.class);
if (method == null) {
Log.e(TAG, "startTetheringMethod is null");
} else {
method.invoke(mConnectivityManager, ConnectivityManager.TYPE_MOBILE, false, proxy, null);
Log.d(TAG, "startTethering invoked");
}
return true;
} catch (Exception e) {
Log.e(TAG, "Error in enableTethering");
e.printStackTrace();
}
return false;
}
public void stopTethering() {
try {
Method method = mConnectivityManager.getClass().getDeclaredMethod("stopTethering", int.class);
if (method == null) {
Log.e(TAG, "stopTetheringMethod is null");
} else {
method.invoke(mConnectivityManager, ConnectivityManager.TYPE_MOBILE);
Log.d(TAG, "stopTethering invoked");
}
} catch (Exception e) {
Log.e(TAG, "stopTethering error: " + e.toString());
e.printStackTrace();
}
}
private Class OnStartTetheringCallbackClass() {
try {
return Class.forName("android.net.ConnectivityManager$OnStartTetheringCallback");
} catch (ClassNotFoundException e) {
Log.e(TAG, "OnStartTetheringCallbackClass error: " + e.toString());
e.printStackTrace();
}
return null;
}
}
public static abstract class OnStartTetheringCallback {
/**
* Called when tethering has been successfully started.
*/
public abstract void onTetheringStarted();
/**
* Called when starting tethering failed.
*/
public abstract void onTetheringFailed();
}
@RequiresApi(api = Build.VERSION_CODES.O)
private static void setHotspotOnPhone(Context mContext, boolean isEnable) {
OreoWifiManager mTestOreoWifiManager = null;
if (mTestOreoWifiManager ==null) {
mTestOreoWifiManager = new OreoWifiManager(mContext);
}
if (isEnable){
OnStartTetheringCallback callback = new OnStartTetheringCallback() {
@Override
public void onTetheringStarted() {
}
@Override
public void onTetheringFailed() {
}
};
mTestOreoWifiManager.startTethering(callback);
}else{
mTestOreoWifiManager.stopTethering();
}
}
/*
public static void setWiFiApEnable(Context context, boolean isEnable) {
ConnectivityManager mConnectivityManager= (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
if (isEnable) {
mConnectivityManager.startTethering(ConnectivityManager.TETHERING_WIFI, false, new ConnectivityManager.OnStartTetheringCallback() {
@Override
public void onTetheringStarted() {
Log.d(TAG, "onTetheringStarted");
// Don't fire a callback here, instead wait for the next update from wifi.
}
@Override
public void onTetheringFailed() {
Log.d(TAG, "onTetheringFailed");
// TODO: Show error.
}
});
} else {
mConnectivityManager.stopTethering(ConnectivityManager.TETHERING_WIFI);
}
}
*/
public static void enableHotspot(Context context, boolean isEnable) {
// R: Adnroid 11
// O: Android 8
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// Android 11
setHotspotOnPhone(context, isEnable);
}/* else if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// Android 8
}
*/
}
}

@ -2,7 +2,6 @@ package com.xypower.common;
import android.content.Context;
import android.os.Environment;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
@ -133,7 +132,7 @@ public class JSONUtils {
return false;
}
File configFile = new File(path.trim());
File configFile = new File(Environment.getExternalStorageDirectory(), path);
if (!configFile.exists()) {
if (val == null) {
// Should delete the config field

@ -1,8 +1,5 @@
package com.xypower.common;
import android.content.Context;
import java.io.FileInputStream;
import java.security.MessageDigest;
/* loaded from: ds_base_2.0.9_23030112.aar:classes.jar:com/dowse/base/util/MD5Util.class */
@ -32,25 +29,4 @@ public class MD5Util {
}
return r.toString();
}
public static String getFileMd5(String filePath) {
try (FileInputStream fis = new FileInputStream(filePath)) {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] buffer = new byte[8192]; // 使用大缓冲区提升性能:ml-citation{ref="5,7" data="citationList"}
int len;
while ((len = fis.read(buffer)) != -1) {
md.update(buffer, 0, len);
}
byte[] digest = md.digest();
StringBuilder sb = new StringBuilder();
for (byte b : digest) {
sb.append(String.format("%02x", b & 0xff)); // 处理字节转十六进制:ml-citation{ref="3,7" data="citationList"}
}
return sb.toString();
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
}

@ -30,13 +30,7 @@ public class MicroPhotoContext {
public static final String PACKAGE_NAME_MPAPP = "com.xypower.mpapp";
public static final String PACKAGE_NAME_MPMASTER = "com.xypower.mpmaster";
public static final String PACKAGE_NAME_MPRES = "com.xypower.mpres";
public static final String SERVICE_NAME_MPSERVICE = PACKAGE_NAME_MPAPP + ".MicroPhotoService";
public static final String SERVICE_NAME_MPMASTER = PACKAGE_NAME_MPMASTER + ".MpMasterService";
public static final String ACTION_HEARTBEAT_MP = "com.xypower.mpapp.ACT_HB";
public static final String ACTION_TAKEPHOTO_MP = "com.xypower.mpapp.ACT_TP";
public static final String ACTION_RESTART_MP = "com.xypower.mpapp.ACT_RESTART";
public static final String ACTION_UPDATE_CONFIGS_MP = "com.xypower.mpapp.ACT_UPD_CFG";
@ -50,8 +44,6 @@ public class MicroPhotoContext {
public final static int DEFAULT_HEARTBEAT_FOR_SHARED_NW = 10; // minutes
public final static int DEFAULT_QUICK_HEARTBEAT = 60; // second
public static final long BUILD_TIME_WO_SID_20250418 = 1744905600000L;
public static class AppConfig {
public String cmdid;
public String server;
@ -63,7 +55,6 @@ public class MicroPhotoContext {
public int packetSize;
public int encryption; //0不加密 1明文 2加密
public int channels; //摄像头通道数目
public long modificationTime = 0;
}
public static class MasterConfig {
@ -149,13 +140,13 @@ public class MicroPhotoContext {
return str;
}
public static boolean isAppAlive(Context context, String packageName, String serviceClassName) {
public static boolean isAppAlive(Context context, String packageName) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningServiceInfo> services = am.getRunningServices(Integer.MAX_VALUE);
boolean isRunning = false;
for (ActivityManager.RunningServiceInfo rsi : services) {
if (packageName.equalsIgnoreCase(rsi.service.getPackageName()) && TextUtils.equals(serviceClassName, rsi.service.getClassName())) {
if (packageName.equalsIgnoreCase(rsi.service.getPackageName())) {
isRunning = true;
break;
}
@ -164,21 +155,6 @@ public class MicroPhotoContext {
return isRunning;
}
public static int getProcessIdOfService(Context context, String packageName, String serviceClassName) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningServiceInfo> services = am.getRunningServices(Integer.MAX_VALUE);
int pid = 0;
for (ActivityManager.RunningServiceInfo rsi : services) {
if (packageName.equalsIgnoreCase(rsi.service.getPackageName()) && TextUtils.equals(serviceClassName, rsi.service.getClassName())) {
pid = rsi.pid;
break;
}
}
return pid;
}
public static String buildAppDir(Context contxt) {
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
@ -250,22 +226,6 @@ public class MicroPhotoContext {
return path;
}
public static String buildMpResAppDir(Context contxt) {
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
path += PACKAGE_NAME_MPRES + File.separator;
File pathFile = new File(path);
if (!pathFile.exists() && !pathFile.mkdirs()) {
return null;
}
return path;
}
public static boolean hasMpAppConfig(Context context) {
boolean existed = true;
String appPath = MicroPhotoContext.buildMpAppDir(context);
@ -281,26 +241,15 @@ public class MicroPhotoContext {
return getMpAppConfig(context, appPath + "data/App.json");
}
public static File getMpAppConfigFile(Context context) {
String appPath = buildMpAppDir(context);
return new File(appPath + "data/App.json");
}
public static AppConfig getMpAppConfig(Context context, String path) {
AppConfig appConfig = new AppConfig();
File file = new File(path);
try {
if (file.exists()) {
appConfig.modificationTime = file.lastModified();
String content = FilesUtils.readTextFile(path);
JSONObject jsonObject = TextUtils.isEmpty(content) ? new JSONObject() : new JSONObject(content);
appConfig.cmdid = jsonObject.optString(jsonObject.has("CMDID") ? "CMDID" : "cmdid", "");
appConfig.cmdid = jsonObject.optString(jsonObject.has("cmdid") ? "cmdid" : "CMDID", "");
appConfig.server = jsonObject.optString(jsonObject.has("server") ? "server" : "Server", "");
appConfig.port = jsonObject.optInt(jsonObject.has("port") ? "port" : "Port", 0);
appConfig.protocol = jsonObject.optInt(jsonObject.has("protocol") ? "protocol" : "Protocol", DEFAULT_PROTOCOL);
@ -314,7 +263,6 @@ public class MicroPhotoContext {
if (appConfig.protocol == 0) {
appConfig.protocol = DEFAULT_PROTOCOL;
}
}
} catch (JSONException e) {
e.printStackTrace();
}
@ -424,22 +372,22 @@ public class MicroPhotoContext {
}
}
// public static void restartMpApp(Context context, String reason) {
// /*
// Context context = MicroPhotoService.this.getApplicationContext();
// Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName());
//
// int noDelay = 1;
// intent.putExtra("noDelay", noDelay);
// PendingIntent restartIntent = PendingIntent.getActivity(context, 0, intent, 0);
// AlarmManager mgr = (AlarmManager)getSystemService(Context.ALARM_SERVICE);
// mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1000, restartIntent); // 1秒钟后重启应用
// System.exit(0);
//
// */
//
// restartApp(context, PACKAGE_NAME_MPAPP, reason);
// }
public static void restartMpApp(Context context, String reason) {
/*
Context context = MicroPhotoService.this.getApplicationContext();
Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName());
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
PendingIntent restartIntent = PendingIntent.getActivity(context, 0, intent, 0);
AlarmManager mgr = (AlarmManager)getSystemService(Context.ALARM_SERVICE);
mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1000, restartIntent); // 1秒钟后重启应用
System.exit(0);
*/
restartApp(context, PACKAGE_NAME_MPAPP, reason);
}
public static void restartMpApp(Context context, String reason, long delayedTimeMs) {
Intent intent = context.getPackageManager().getLaunchIntentForPackage(PACKAGE_NAME_MPAPP);
@ -469,29 +417,20 @@ public class MicroPhotoContext {
*/
// try {
// Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
// if (intent != null) {
// intent.putExtra("noDelay", 1);
// if (!TextUtils.isEmpty(reason)) {
// intent.putExtra("reason", reason);
// }
// intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
// context.startActivity(intent);
// }
// } catch (Exception e) {
// e.printStackTrace();
// }
SysApi.forceStopApp(context,packageName);
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new RuntimeException(e);
if (TextUtils.equals(packageName, PACKAGE_NAME_MPAPP)) {
Intent intent = new Intent(ACTION_RESTART_MP);
intent.putExtra("noDelay", 1);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.setPackage(PACKAGE_NAME_MPAPP);
context.sendBroadcast(intent);
} else {
SysApi.forceStopApp(context, packageName);
}
//// 然后启动目标应用
try {
Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
if (intent != null) {
intent.putExtra("noDelay", 1);

@ -1,28 +1,19 @@
package com.xypower.common;
import android.annotation.SuppressLint;
import android.app.usage.NetworkStats;
import android.app.usage.NetworkStatsManager;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.net.ConnectivityManager;
import android.net.LinkProperties;
import android.net.Network;
import android.net.NetworkCapabilities;
import android.net.NetworkInfo;
import android.net.Uri;
import android.net.wifi.WifiManager;
import android.os.RemoteException;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import android.text.format.Formatter;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.URI;
import java.util.Enumeration;
import java.util.regex.Pattern;
@ -64,7 +55,7 @@ public class NetworkUtils {
}
public static String getMobileNetworkIp(Context context) {
ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
ConnectivityManager connectivityManager = (ConnectivityManager)context.getSystemService(Context.CONNECTIVITY_SERVICE);
@SuppressLint("MissingPermission") NetworkInfo[] networkInfos = connectivityManager.getAllNetworkInfo();
if (networkInfos == null || networkInfos.length == 0) {
@ -98,29 +89,6 @@ public class NetworkUtils {
}
public static String getMobileIPAddress() {
try {
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
while (interfaces.hasMoreElements()) {
NetworkInterface networkInterface = interfaces.nextElement();
if (networkInterface.isUp() && !networkInterface.isLoopback()) {
if (networkInterface.getName() != null && !networkInterface.getName().contains("ap")) {
Enumeration<InetAddress> addresses = networkInterface.getInetAddresses();
while (addresses.hasMoreElements()) {
InetAddress address = addresses.nextElement();
if (!address.isLoopbackAddress() && address.getAddress().length == 4) { // IPv4
return address.getHostAddress();
}
}
}
}
}
} catch (SocketException e) {
e.printStackTrace();
}
return null;
}
public static int addAPN(Context context, String name, String desc, String numeric, String user, String pwd) {
int id = -1;
String NUMERIC = getSIMInfo(context);
@ -196,40 +164,4 @@ public class NetworkUtils {
*/
public static class Usage {
public long mobleRxBytes;//移动 下载字节
public long mobleTxBytes;//移动 上传字节
public String uid;//包名
}
/**
*
* @param context
* @param startTime
* @param endTime
* @param uid uid
*/
public static Usage getApplicationQuerySummary(Context context, long startTime, long endTime, int uid) {
Usage usage = new Usage();
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
NetworkStatsManager nsm = (NetworkStatsManager) context.getSystemService(Context.NETWORK_STATS_SERVICE);
assert nsm != null;
try {
NetworkStats mobile = nsm.querySummary(ConnectivityManager.TYPE_MOBILE, null, startTime, endTime);
do {
NetworkStats.Bucket bucket = new NetworkStats.Bucket();
mobile.getNextBucket(bucket);
if(bucket.getUid() == uid) {
usage.mobleRxBytes += bucket.getRxBytes();
usage.mobleTxBytes += bucket.getTxBytes();
}
} while (mobile.hasNextBucket());
} catch (RemoteException e) {
e.printStackTrace();
}
}
return usage;
}
}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":0,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":5376,"resolutionCY":3024,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"burstCaptures":4,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":3,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":1920,"resolutionCY":1080,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":2,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"hdrStep":0,"ldrEnabled":0,"orientation":4,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"recognization":0,"requestTemplate":1,"resolutionCX":3264,"resolutionCY":2448,"sceneMode":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1 +0,0 @@
{"bsManufacturer":"\u4e0a\u6d77\u6b23\u5f71\u7535\u529b\u79d1\u6280\u80a1\u4efd\u6709\u9650\u516c\u53f8","channels":3,"encryption":0,"equipName":"\u56fe\u50cf\u5728\u7ebf\u76d1\u6d4b","heartbeat":10,"imgQuality":80,"model":"MSRDT-1-WP","network":0,"networkProtocol":0,"outputDbgInfo":0,"packetBase":1,"packetSize":32768,"port":6891,"postDataPaused":0,"productionDate":1717200000,"protocol":65298,"quality":80,"reportFault":0,"server":"61.169.135.146","timeForKeepingLogs":1296000,"timeForKeepingPhotos":1296000,"upgradePacketBase":1,"workStatusTimes":3}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1,100 +0,0 @@
[{"v":6015, "c":1},
{"v":6283, "c":2},
{"v":6442, "c":3},
{"v":6553, "c":4},
{"v":6641, "c":5},
{"v":6708, "c":6},
{"v":6735, "c":7},
{"v":6742, "c":8},
{"v":6746, "c":9},
{"v":6751, "c":10},
{"v":6757, "c":11},
{"v":6765, "c":12},
{"v":6774, "c":13},
{"v":6785, "c":14},
{"v":6797, "c":15},
{"v":6811, "c":16},
{"v":6822, "c":17},
{"v":6833, "c":18},
{"v":6844, "c":19},
{"v":6853, "c":20},
{"v":6863, "c":21},
{"v":6871, "c":22},
{"v":6878, "c":23},
{"v":6883, "c":24},
{"v":6891, "c":25},
{"v":6896, "c":26},
{"v":6897, "c":27},
{"v":6901, "c":28},
{"v":6903, "c":29},
{"v":6904, "c":30},
{"v":6906, "c":31},
{"v":6907, "c":32},
{"v":6908, "c":33},
{"v":6910, "c":34},
{"v":6911, "c":35},
{"v":6911, "c":36},
{"v":6913, "c":37},
{"v":6914, "c":38},
{"v":6914, "c":39},
{"v":6915, "c":40},
{"v":6917, "c":41},
{"v":6918, "c":42},
{"v":6918, "c":43},
{"v":6921, "c":44},
{"v":6922, "c":45},
{"v":6924, "c":46},
{"v":6926, "c":47},
{"v":6927, "c":48},
{"v":6929, "c":49},
{"v":6931, "c":50},
{"v":6934, "c":51},
{"v":6938, "c":52},
{"v":6941, "c":53},
{"v":6946, "c":54},
{"v":6948, "c":55},
{"v":6952, "c":56},
{"v":6954, "c":57},
{"v":6957, "c":58},
{"v":6959, "c":59},
{"v":6961, "c":60},
{"v":6963, "c":61},
{"v":6965, "c":62},
{"v":6967, "c":63},
{"v":6971, "c":64},
{"v":6973, "c":65},
{"v":6976, "c":66},
{"v":6978, "c":67},
{"v":6980, "c":68},
{"v":6982, "c":69},
{"v":6984, "c":70},
{"v":6986, "c":71},
{"v":6988, "c":72},
{"v":6989, "c":73},
{"v":6991, "c":74},
{"v":6992, "c":75},
{"v":6993, "c":76},
{"v":6995, "c":77},
{"v":6997, "c":78},
{"v":6998, "c":79},
{"v":7000, "c":80},
{"v":7003, "c":81},
{"v":7004, "c":82},
{"v":7006, "c":83},
{"v":7008, "c":84},
{"v":7011, "c":85},
{"v":7014, "c":86},
{"v":7018, "c":87},
{"v":7021, "c":88},
{"v":7024, "c":89},
{"v":7029, "c":90},
{"v":7033, "c":91},
{"v":7039, "c":92},
{"v":7044, "c":93},
{"v":7052, "c":94},
{"v":7062, "c":95},
{"v":7073, "c":96},
{"v":7087, "c":97},
{"v":7104, "c":98},
{"v":7122, "c":99},
{"v":7142, "c":100}]

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":0,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":5376,"resolutionCY":3024,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"burstCaptures":4,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":3,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":1920,"resolutionCY":1080,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":2,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"hdrStep":0,"ldrEnabled":0,"orientation":4,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"recognization":0,"requestTemplate":1,"resolutionCX":3264,"resolutionCY":2448,"sceneMode":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0}

@ -1 +0,0 @@
{"blobName16":"354","blobName32":"366","blobName8":"output","borderColor":16776960,"enabled":0,"items":[{"enabled":1,"iid":0,"name":"\u6316\u6398\u673a","prob":0.5,"subType":5,"type":1},{"enabled":1,"iid":1,"name":"\u540a\u5854","prob":0.5,"subType":2,"type":1},{"enabled":1,"iid":2,"name":"\u540a\u8f66","prob":0.5,"subType":1,"type":1},{"enabled":1,"iid":3,"name":"\u6c34\u6ce5\u6cf5\u8f66","prob":0.5,"subType":4,"type":1},{"enabled":1,"iid":4,"name":"\u5c71\u706b","prob":0.5,"subType":40,"type":4},{"enabled":1,"iid":5,"name":"\u70df\u96fe","prob":0.5,"subType":41,"type":4},{"enabled":1,"iid":6,"name":"\u63a8\u571f\u673a","prob":0.5,"subType":3,"type":1},{"enabled":1,"iid":7,"name":"\u7ffb\u6597\u8f66","prob":0.5,"subType":10,"type":1},{"enabled":1,"iid":8,"name":"\u5bfc\u7ebf\u5f02\u7269","prob":0.5,"subType":1,"type":3},{"enabled":1,"iid":9,"name":"\u9632\u5c18\u7f51","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":10,"name":"\u538b\u8def\u673a","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":11,"name":"\u6405\u62cc\u8f66","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":12,"name":"\u6869\u673a","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":13,"name":"\u56f4\u6321","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":14,"name":"\u6c34\u9a6c","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":15,"name":"\u5b89\u5168\u5e3d","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":16,"name":"\u4e95\u76d6\u7f3a\u5931","prob":1.0099999904632568,"subType":2,"type":3}],"textColor":16776960,"thickness":4,"version":"2024-12-30"}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -6,7 +6,6 @@ import android.graphics.SurfaceTexture;
import android.hardware.camera2.*;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;

@ -41,7 +41,6 @@ public class GPUCameraRecorder {
private final int degrees;
private final boolean recordNoFilter;
private final long duration;
private final boolean ismirror;
private long startTime;
@ -60,8 +59,7 @@ public class GPUCameraRecorder {
final boolean isLandscapeDevice,
final int degrees,
final boolean recordNoFilter,
final long duration,
final boolean ismirror
final long duration
) {
@ -83,7 +81,6 @@ public class GPUCameraRecorder {
this.degrees = degrees;
this.recordNoFilter = recordNoFilter;
this.duration = duration;
this.ismirror = ismirror;
// create preview Renderer
if (null == glPreviewRenderer) {
@ -122,7 +119,7 @@ public class GPUCameraRecorder {
public void run() {
if (glPreviewRenderer != null) {
glPreviewRenderer.setAngle(degrees);
glPreviewRenderer.onStartPreview(previewWidth, previewHeight, isLandscapeDevice,ismirror);
glPreviewRenderer.onStartPreview(previewWidth, previewHeight, isLandscapeDevice);
}
}
});

@ -4,7 +4,6 @@ import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.opengl.GLSurfaceView;
import android.util.Log;
@ -31,10 +30,6 @@ public class GPUCameraRecorderBuilder {
private int cameraHeight = 720;
private GlFilter glFilter;
private long duration;
private int rotation;
private Integer sensororientation;
private Integer facing;
private boolean ifmirror = false;
public GPUCameraRecorderBuilder(Activity activity, GLSurfaceView glSurfaceView) {
this.activity = activity;
@ -89,11 +84,6 @@ public class GPUCameraRecorderBuilder {
return this;
}
public GPUCameraRecorderBuilder rotation(int d) {
this.rotation = d;
return this;
}
public GPUCameraRecorderBuilder recordNoFilter(boolean recordNoFilter) {
this.recordNoFilter = recordNoFilter;
return this;
@ -107,34 +97,12 @@ public class GPUCameraRecorderBuilder {
CameraManager cameraManager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
boolean isLandscapeDevice = resources.getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE;
CameraCharacteristics cameraCharacteristics = null;
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (Exception ex) {
ex.printStackTrace();
}
if (cameraCharacteristics != null) {
facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
sensororientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
}
int degrees = 0;
if (facing == 1) {
if (rotation == -1) {
degrees = sensororientation;
} else {
degrees = sensororientation + (rotation - 1) * 90;
}
ifmirror = false;
} else {
if (rotation == -1) {
degrees = sensororientation + 180;
} else {
degrees = sensororientation + (rotation - 1) * 90 + 180;
if (isLandscapeDevice) {
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Log.d("GPUCameraRecorder", "Surface.ROTATION_90 = " + Surface.ROTATION_90 + " rotation = " + rotation);
degrees = 90 * (rotation - 2);
}
ifmirror = true;
}
Log.d("GPUCameraRecorder", "测试测试" + degrees);
GPUCameraRecorder GPUCameraRecorder = new GPUCameraRecorder(
cameraRecordListener,
@ -151,8 +119,7 @@ public class GPUCameraRecorderBuilder {
isLandscapeDevice,
degrees,
recordNoFilter,
duration,
ifmirror
duration
);
GPUCameraRecorder.setFilter(glFilter);

@ -66,14 +66,11 @@ public class GlPreviewRenderer extends GlFrameBufferObjectRenderer implements Su
Matrix.setIdentityM(STMatrix, 0);
}
public void onStartPreview(float cameraPreviewWidth, float cameraPreviewHeight, boolean isLandscapeDevice, boolean ismirror) {
public void onStartPreview(float cameraPreviewWidth, float cameraPreviewHeight, boolean isLandscapeDevice) {
Matrix.setIdentityM(MMatrix, 0);
Matrix.rotateM(MMatrix, 0, -angle, 0.0f, 0.0f, 1.0f);
Matrix.rotateM(MMatrix, 0, angle, 0.0f, 0.0f, 1.0f);
if (ismirror) {
Matrix.scaleM(MMatrix, 0, 1, -1, 1);
}
// Log.d("GPUCameraRecorder ", "angle" + angle);
// Log.d("GPUCameraRecorder ", "getMeasuredHeight " + glView.getMeasuredHeight());
// Log.d("GPUCameraRecorder ", "getMeasuredWidth " + glView.getMeasuredWidth());
@ -282,7 +279,7 @@ public class GlPreviewRenderer extends GlFrameBufferObjectRenderer implements Su
public void setAngle(int angle) {
this.angle = angle;
if (angle == 180 || angle == 0) {
if (angle == 90 || angle == 270) {
aspectRatio = (float) cameraResolution.getWidth() / cameraResolution.getHeight();
} else {
aspectRatio = (float) cameraResolution.getHeight() / cameraResolution.getWidth();

@ -4,7 +4,7 @@ plugins {
def AppMajorVersion = 1
def AppMinorVersion = 1
def AppBuildNumber = 36
def AppBuildNumber = 1
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber
@ -15,7 +15,6 @@ android {
defaultConfig {
applicationId "com.xypower.mpmaster"
minSdk 28
//noinspection ExpiredTargetSdkVersion
targetSdk 28
versionCode AppVersionCode
versionName AppVersionName
@ -97,17 +96,15 @@ android {
dependencies {
implementation 'androidx.core:core:1.6.0'
implementation 'androidx.activity:activity:1.3.0'
implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.4.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
// implementation 'androidx.annotation:annotation:+'
implementation 'androidx.annotation:annotation:1.2.0'
// implementation 'com.linkedin.dexmaker:dexmaker:2.28.3'
implementation 'androidx.annotation:annotation:+'
implementation 'com.linkedin.dexmaker:dexmaker:2.28.3'
implementation project(path: ':common')
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
// implementation 'com.orhanobut:logger:2.2.0'
implementation 'com.orhanobut:logger:2.2.0'
implementation files('libs/devapi.aar')
}

@ -1,27 +1,20 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:sharedUserId="com.xypower.mp"
package="com.xypower.mpmaster"
tools:ignore="Deprecated">
package="com.xypower.mpmaster">
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.KILL_BACKGROUND_PROCESSES" />
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
<uses-permission android:name="android.permission.KILL_BACKGROUND_PROCESSES" />
<uses-permission android:name="android.permission.MODIFY_PHONE_STATE"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.READ_LOGS"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.PACKAGE_USAGE_STATS"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.MODIFY_PHONE_STATE" />
<uses-permission android:name="android.permission.READ_LOGS" />
<uses-permission android:name="android.permission.PACKAGE_USAGE_STATS" />
<uses-permission
android:name="android.permission.FORCE_STOP_PACKAGES"
tools:ignore="ProtectedPermissions" />
@ -117,6 +110,7 @@
android:screenOrientation="landscape">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>

@ -0,0 +1 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"compensation":0,"exposureTime":0,"ldrEnabled":0,"orientation":0,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"quality":80,"recognization":2,"requestTemplate":2,"resolutionCX":5376,"resolutionCY":3024,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -0,0 +1 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"burstCaptures":4,"compensation":0,"exposureTime":0,"ldrEnabled":0,"orientation":3,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"quality":80,"recognization":2,"requestTemplate":2,"resolutionCX":1920,"resolutionCY":1080,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":2,"usingSysCamera":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -0,0 +1 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"compensation":0,"ldrEnabled":0,"orientation":4,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"recognization":2,"requestTemplate":1,"resolutionCX":3264,"resolutionCY":2448,"sceneMode":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0}

@ -1 +1 @@
{"blobName16":"354","blobName32":"366","blobName8":"output","borderColor":16776960,"enabled":0,"items":[{"enabled":1,"iid":0,"name":"\u6316\u6398\u673a","prob":0.5,"subType":5,"type":1},{"enabled":1,"iid":1,"name":"\u540a\u5854","prob":0.5,"subType":2,"type":1},{"enabled":1,"iid":2,"name":"\u540a\u8f66","prob":0.5,"subType":1,"type":1},{"enabled":1,"iid":3,"name":"\u6c34\u6ce5\u6cf5\u8f66","prob":0.5,"subType":4,"type":1},{"enabled":1,"iid":4,"name":"\u5c71\u706b","prob":0.5,"subType":40,"type":4},{"enabled":1,"iid":5,"name":"\u70df\u96fe","prob":0.5,"subType":41,"type":4},{"enabled":1,"iid":6,"name":"\u63a8\u571f\u673a","prob":0.5,"subType":3,"type":1},{"enabled":1,"iid":7,"name":"\u7ffb\u6597\u8f66","prob":0.5,"subType":10,"type":1},{"enabled":1,"iid":8,"name":"\u5bfc\u7ebf\u5f02\u7269","prob":0.5,"subType":1,"type":3},{"enabled":1,"iid":9,"name":"\u9632\u5c18\u7f51","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":10,"name":"\u538b\u8def\u673a","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":11,"name":"\u6405\u62cc\u8f66","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":12,"name":"\u6869\u673a","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":13,"name":"\u56f4\u6321","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":14,"name":"\u6c34\u9a6c","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":15,"name":"\u5b89\u5168\u5e3d","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":16,"name":"\u4e95\u76d6\u7f3a\u5931","prob":1.0099999904632568,"subType":2,"type":3}],"textColor":16776960,"thickness":4,"version":"2024-12-30"}
{"blobName16":"354","blobName32":"366","blobName8":"output","borderColor":16776960,"enabled":1,"items":[{"enabled":1,"iid":0,"name":"\u6316\u6398\u673a","prob":0.5,"subType":5,"type":1},{"enabled":1,"iid":1,"name":"\u540a\u5854","prob":0.5,"subType":2,"type":1},{"enabled":1,"iid":2,"name":"\u540a\u8f66","prob":0.5,"subType":1,"type":1},{"enabled":1,"iid":3,"name":"\u6c34\u6ce5\u6cf5\u8f66","prob":0.5,"subType":4,"type":1},{"enabled":1,"iid":4,"name":"\u5c71\u706b","prob":0.5,"subType":40,"type":4},{"enabled":1,"iid":5,"name":"\u70df\u96fe","prob":0.5,"subType":41,"type":4},{"enabled":1,"iid":6,"name":"\u63a8\u571f\u673a","prob":0.5,"subType":3,"type":1},{"enabled":1,"iid":7,"name":"\u7ffb\u6597\u8f66","prob":0.5,"subType":10,"type":1},{"enabled":1,"iid":8,"name":"\u5bfc\u7ebf\u5f02\u7269","prob":0.5,"subType":1,"type":3},{"enabled":1,"iid":9,"name":"\u9632\u5c18\u7f51","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":10,"name":"\u538b\u8def\u673a","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":11,"name":"\u6405\u62cc\u8f66","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":12,"name":"\u6869\u673a","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":13,"name":"\u56f4\u6321","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":14,"name":"\u6c34\u9a6c","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":15,"name":"\u5b89\u5168\u5e3d","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":16,"name":"\u4e95\u76d6\u7f3a\u5931","prob":1.0099999904632568,"subType":2,"type":3}],"textColor":16776960,"thickness":4}

@ -0,0 +1,175 @@
7767517
173 197
Input images 0 1 images
Convolution /model.0/conv/Conv 1 1 images /model.0/conv/Conv_output_0 0=32 1=6 11=6 2=1 12=1 3=2 13=2 4=2 14=2 15=2 16=2 5=1 6=3456
Swish /model.0/act/Mul 1 1 /model.0/conv/Conv_output_0 /model.0/act/Mul_output_0
Convolution /model.1/conv/Conv 1 1 /model.0/act/Mul_output_0 /model.1/conv/Conv_output_0 0=64 1=3 11=3 2=1 12=1 3=2 13=2 4=1 14=1 15=1 16=1 5=1 6=18432
Swish /model.1/act/Mul 1 1 /model.1/conv/Conv_output_0 /model.1/act/Mul_output_0
Split splitncnn_0 1 2 /model.1/act/Mul_output_0 /model.1/act/Mul_output_0_splitncnn_0 /model.1/act/Mul_output_0_splitncnn_1
Convolution /model.2/cv1/conv/Conv 1 1 /model.1/act/Mul_output_0_splitncnn_1 /model.2/cv1/conv/Conv_output_0 0=32 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=2048
Swish /model.2/cv1/act/Mul 1 1 /model.2/cv1/conv/Conv_output_0 /model.2/cv1/act/Mul_output_0
Split splitncnn_1 1 2 /model.2/cv1/act/Mul_output_0 /model.2/cv1/act/Mul_output_0_splitncnn_0 /model.2/cv1/act/Mul_output_0_splitncnn_1
Convolution /model.2/m/m.0/cv1/conv/Conv 1 1 /model.2/cv1/act/Mul_output_0_splitncnn_1 /model.2/m/m.0/cv1/conv/Conv_output_0 0=32 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=1024
Swish /model.2/m/m.0/cv1/act/Mul 1 1 /model.2/m/m.0/cv1/conv/Conv_output_0 /model.2/m/m.0/cv1/act/Mul_output_0
Convolution /model.2/m/m.0/cv2/conv/Conv 1 1 /model.2/m/m.0/cv1/act/Mul_output_0 /model.2/m/m.0/cv2/conv/Conv_output_0 0=32 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=9216
Swish /model.2/m/m.0/cv2/act/Mul 1 1 /model.2/m/m.0/cv2/conv/Conv_output_0 /model.2/m/m.0/cv2/act/Mul_output_0
BinaryOp /model.2/m/m.0/Add 2 1 /model.2/cv1/act/Mul_output_0_splitncnn_0 /model.2/m/m.0/cv2/act/Mul_output_0 /model.2/m/m.0/Add_output_0 0=0
Convolution /model.2/cv2/conv/Conv 1 1 /model.1/act/Mul_output_0_splitncnn_0 /model.2/cv2/conv/Conv_output_0 0=32 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=2048
Swish /model.2/cv2/act/Mul 1 1 /model.2/cv2/conv/Conv_output_0 /model.2/cv2/act/Mul_output_0
Concat /model.2/Concat 2 1 /model.2/m/m.0/Add_output_0 /model.2/cv2/act/Mul_output_0 /model.2/Concat_output_0 0=0
Convolution /model.2/cv3/conv/Conv 1 1 /model.2/Concat_output_0 /model.2/cv3/conv/Conv_output_0 0=64 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=4096
Swish /model.2/cv3/act/Mul 1 1 /model.2/cv3/conv/Conv_output_0 /model.2/cv3/act/Mul_output_0
Convolution /model.3/conv/Conv 1 1 /model.2/cv3/act/Mul_output_0 /model.3/conv/Conv_output_0 0=128 1=3 11=3 2=1 12=1 3=2 13=2 4=1 14=1 15=1 16=1 5=1 6=73728
Swish /model.3/act/Mul 1 1 /model.3/conv/Conv_output_0 /model.3/act/Mul_output_0
Split splitncnn_2 1 2 /model.3/act/Mul_output_0 /model.3/act/Mul_output_0_splitncnn_0 /model.3/act/Mul_output_0_splitncnn_1
Convolution /model.4/cv1/conv/Conv 1 1 /model.3/act/Mul_output_0_splitncnn_1 /model.4/cv1/conv/Conv_output_0 0=64 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=8192
Swish /model.4/cv1/act/Mul 1 1 /model.4/cv1/conv/Conv_output_0 /model.4/cv1/act/Mul_output_0
Split splitncnn_3 1 2 /model.4/cv1/act/Mul_output_0 /model.4/cv1/act/Mul_output_0_splitncnn_0 /model.4/cv1/act/Mul_output_0_splitncnn_1
Convolution /model.4/m/m.0/cv1/conv/Conv 1 1 /model.4/cv1/act/Mul_output_0_splitncnn_1 /model.4/m/m.0/cv1/conv/Conv_output_0 0=64 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=4096
Swish /model.4/m/m.0/cv1/act/Mul 1 1 /model.4/m/m.0/cv1/conv/Conv_output_0 /model.4/m/m.0/cv1/act/Mul_output_0
Convolution /model.4/m/m.0/cv2/conv/Conv 1 1 /model.4/m/m.0/cv1/act/Mul_output_0 /model.4/m/m.0/cv2/conv/Conv_output_0 0=64 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=36864
Swish /model.4/m/m.0/cv2/act/Mul 1 1 /model.4/m/m.0/cv2/conv/Conv_output_0 /model.4/m/m.0/cv2/act/Mul_output_0
BinaryOp /model.4/m/m.0/Add 2 1 /model.4/cv1/act/Mul_output_0_splitncnn_0 /model.4/m/m.0/cv2/act/Mul_output_0 /model.4/m/m.0/Add_output_0 0=0
Split splitncnn_4 1 2 /model.4/m/m.0/Add_output_0 /model.4/m/m.0/Add_output_0_splitncnn_0 /model.4/m/m.0/Add_output_0_splitncnn_1
Convolution /model.4/m/m.1/cv1/conv/Conv 1 1 /model.4/m/m.0/Add_output_0_splitncnn_1 /model.4/m/m.1/cv1/conv/Conv_output_0 0=64 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=4096
Swish /model.4/m/m.1/cv1/act/Mul 1 1 /model.4/m/m.1/cv1/conv/Conv_output_0 /model.4/m/m.1/cv1/act/Mul_output_0
Convolution /model.4/m/m.1/cv2/conv/Conv 1 1 /model.4/m/m.1/cv1/act/Mul_output_0 /model.4/m/m.1/cv2/conv/Conv_output_0 0=64 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=36864
Swish /model.4/m/m.1/cv2/act/Mul 1 1 /model.4/m/m.1/cv2/conv/Conv_output_0 /model.4/m/m.1/cv2/act/Mul_output_0
BinaryOp /model.4/m/m.1/Add 2 1 /model.4/m/m.0/Add_output_0_splitncnn_0 /model.4/m/m.1/cv2/act/Mul_output_0 /model.4/m/m.1/Add_output_0 0=0
Convolution /model.4/cv2/conv/Conv 1 1 /model.3/act/Mul_output_0_splitncnn_0 /model.4/cv2/conv/Conv_output_0 0=64 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=8192
Swish /model.4/cv2/act/Mul 1 1 /model.4/cv2/conv/Conv_output_0 /model.4/cv2/act/Mul_output_0
Concat /model.4/Concat 2 1 /model.4/m/m.1/Add_output_0 /model.4/cv2/act/Mul_output_0 /model.4/Concat_output_0 0=0
Convolution /model.4/cv3/conv/Conv 1 1 /model.4/Concat_output_0 /model.4/cv3/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.4/cv3/act/Mul 1 1 /model.4/cv3/conv/Conv_output_0 /model.4/cv3/act/Mul_output_0
Split splitncnn_5 1 2 /model.4/cv3/act/Mul_output_0 /model.4/cv3/act/Mul_output_0_splitncnn_0 /model.4/cv3/act/Mul_output_0_splitncnn_1
Convolution /model.5/conv/Conv 1 1 /model.4/cv3/act/Mul_output_0_splitncnn_1 /model.5/conv/Conv_output_0 0=256 1=3 11=3 2=1 12=1 3=2 13=2 4=1 14=1 15=1 16=1 5=1 6=294912
Swish /model.5/act/Mul 1 1 /model.5/conv/Conv_output_0 /model.5/act/Mul_output_0
Split splitncnn_6 1 2 /model.5/act/Mul_output_0 /model.5/act/Mul_output_0_splitncnn_0 /model.5/act/Mul_output_0_splitncnn_1
Convolution /model.6/cv1/conv/Conv 1 1 /model.5/act/Mul_output_0_splitncnn_1 /model.6/cv1/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=32768
Swish /model.6/cv1/act/Mul 1 1 /model.6/cv1/conv/Conv_output_0 /model.6/cv1/act/Mul_output_0
Split splitncnn_7 1 2 /model.6/cv1/act/Mul_output_0 /model.6/cv1/act/Mul_output_0_splitncnn_0 /model.6/cv1/act/Mul_output_0_splitncnn_1
Convolution /model.6/m/m.0/cv1/conv/Conv 1 1 /model.6/cv1/act/Mul_output_0_splitncnn_1 /model.6/m/m.0/cv1/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.6/m/m.0/cv1/act/Mul 1 1 /model.6/m/m.0/cv1/conv/Conv_output_0 /model.6/m/m.0/cv1/act/Mul_output_0
Convolution /model.6/m/m.0/cv2/conv/Conv 1 1 /model.6/m/m.0/cv1/act/Mul_output_0 /model.6/m/m.0/cv2/conv/Conv_output_0 0=128 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=147456
Swish /model.6/m/m.0/cv2/act/Mul 1 1 /model.6/m/m.0/cv2/conv/Conv_output_0 /model.6/m/m.0/cv2/act/Mul_output_0
BinaryOp /model.6/m/m.0/Add 2 1 /model.6/cv1/act/Mul_output_0_splitncnn_0 /model.6/m/m.0/cv2/act/Mul_output_0 /model.6/m/m.0/Add_output_0 0=0
Split splitncnn_8 1 2 /model.6/m/m.0/Add_output_0 /model.6/m/m.0/Add_output_0_splitncnn_0 /model.6/m/m.0/Add_output_0_splitncnn_1
Convolution /model.6/m/m.1/cv1/conv/Conv 1 1 /model.6/m/m.0/Add_output_0_splitncnn_1 /model.6/m/m.1/cv1/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.6/m/m.1/cv1/act/Mul 1 1 /model.6/m/m.1/cv1/conv/Conv_output_0 /model.6/m/m.1/cv1/act/Mul_output_0
Convolution /model.6/m/m.1/cv2/conv/Conv 1 1 /model.6/m/m.1/cv1/act/Mul_output_0 /model.6/m/m.1/cv2/conv/Conv_output_0 0=128 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=147456
Swish /model.6/m/m.1/cv2/act/Mul 1 1 /model.6/m/m.1/cv2/conv/Conv_output_0 /model.6/m/m.1/cv2/act/Mul_output_0
BinaryOp /model.6/m/m.1/Add 2 1 /model.6/m/m.0/Add_output_0_splitncnn_0 /model.6/m/m.1/cv2/act/Mul_output_0 /model.6/m/m.1/Add_output_0 0=0
Split splitncnn_9 1 2 /model.6/m/m.1/Add_output_0 /model.6/m/m.1/Add_output_0_splitncnn_0 /model.6/m/m.1/Add_output_0_splitncnn_1
Convolution /model.6/m/m.2/cv1/conv/Conv 1 1 /model.6/m/m.1/Add_output_0_splitncnn_1 /model.6/m/m.2/cv1/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.6/m/m.2/cv1/act/Mul 1 1 /model.6/m/m.2/cv1/conv/Conv_output_0 /model.6/m/m.2/cv1/act/Mul_output_0
Convolution /model.6/m/m.2/cv2/conv/Conv 1 1 /model.6/m/m.2/cv1/act/Mul_output_0 /model.6/m/m.2/cv2/conv/Conv_output_0 0=128 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=147456
Swish /model.6/m/m.2/cv2/act/Mul 1 1 /model.6/m/m.2/cv2/conv/Conv_output_0 /model.6/m/m.2/cv2/act/Mul_output_0
BinaryOp /model.6/m/m.2/Add 2 1 /model.6/m/m.1/Add_output_0_splitncnn_0 /model.6/m/m.2/cv2/act/Mul_output_0 /model.6/m/m.2/Add_output_0 0=0
Convolution /model.6/cv2/conv/Conv 1 1 /model.5/act/Mul_output_0_splitncnn_0 /model.6/cv2/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=32768
Swish /model.6/cv2/act/Mul 1 1 /model.6/cv2/conv/Conv_output_0 /model.6/cv2/act/Mul_output_0
Concat /model.6/Concat 2 1 /model.6/m/m.2/Add_output_0 /model.6/cv2/act/Mul_output_0 /model.6/Concat_output_0 0=0
Convolution /model.6/cv3/conv/Conv 1 1 /model.6/Concat_output_0 /model.6/cv3/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=65536
Swish /model.6/cv3/act/Mul 1 1 /model.6/cv3/conv/Conv_output_0 /model.6/cv3/act/Mul_output_0
Split splitncnn_10 1 2 /model.6/cv3/act/Mul_output_0 /model.6/cv3/act/Mul_output_0_splitncnn_0 /model.6/cv3/act/Mul_output_0_splitncnn_1
Convolution /model.7/conv/Conv 1 1 /model.6/cv3/act/Mul_output_0_splitncnn_1 /model.7/conv/Conv_output_0 0=512 1=3 11=3 2=1 12=1 3=2 13=2 4=1 14=1 15=1 16=1 5=1 6=1179648
Swish /model.7/act/Mul 1 1 /model.7/conv/Conv_output_0 /model.7/act/Mul_output_0
Split splitncnn_11 1 2 /model.7/act/Mul_output_0 /model.7/act/Mul_output_0_splitncnn_0 /model.7/act/Mul_output_0_splitncnn_1
Convolution /model.8/cv1/conv/Conv 1 1 /model.7/act/Mul_output_0_splitncnn_1 /model.8/cv1/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=131072
Swish /model.8/cv1/act/Mul 1 1 /model.8/cv1/conv/Conv_output_0 /model.8/cv1/act/Mul_output_0
Split splitncnn_12 1 2 /model.8/cv1/act/Mul_output_0 /model.8/cv1/act/Mul_output_0_splitncnn_0 /model.8/cv1/act/Mul_output_0_splitncnn_1
Convolution /model.8/m/m.0/cv1/conv/Conv 1 1 /model.8/cv1/act/Mul_output_0_splitncnn_1 /model.8/m/m.0/cv1/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=65536
Swish /model.8/m/m.0/cv1/act/Mul 1 1 /model.8/m/m.0/cv1/conv/Conv_output_0 /model.8/m/m.0/cv1/act/Mul_output_0
Convolution /model.8/m/m.0/cv2/conv/Conv 1 1 /model.8/m/m.0/cv1/act/Mul_output_0 /model.8/m/m.0/cv2/conv/Conv_output_0 0=256 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=589824
Swish /model.8/m/m.0/cv2/act/Mul 1 1 /model.8/m/m.0/cv2/conv/Conv_output_0 /model.8/m/m.0/cv2/act/Mul_output_0
BinaryOp /model.8/m/m.0/Add 2 1 /model.8/cv1/act/Mul_output_0_splitncnn_0 /model.8/m/m.0/cv2/act/Mul_output_0 /model.8/m/m.0/Add_output_0 0=0
Convolution /model.8/cv2/conv/Conv 1 1 /model.7/act/Mul_output_0_splitncnn_0 /model.8/cv2/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=131072
Swish /model.8/cv2/act/Mul 1 1 /model.8/cv2/conv/Conv_output_0 /model.8/cv2/act/Mul_output_0
Concat /model.8/Concat 2 1 /model.8/m/m.0/Add_output_0 /model.8/cv2/act/Mul_output_0 /model.8/Concat_output_0 0=0
Convolution /model.8/cv3/conv/Conv 1 1 /model.8/Concat_output_0 /model.8/cv3/conv/Conv_output_0 0=512 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=262144
Swish /model.8/cv3/act/Mul 1 1 /model.8/cv3/conv/Conv_output_0 /model.8/cv3/act/Mul_output_0
Convolution /model.9/cv1/conv/Conv 1 1 /model.8/cv3/act/Mul_output_0 /model.9/cv1/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=131072
Swish /model.9/cv1/act/Mul 1 1 /model.9/cv1/conv/Conv_output_0 /model.9/cv1/act/Mul_output_0
Split splitncnn_13 1 2 /model.9/cv1/act/Mul_output_0 /model.9/cv1/act/Mul_output_0_splitncnn_0 /model.9/cv1/act/Mul_output_0_splitncnn_1
Pooling /model.9/m/MaxPool 1 1 /model.9/cv1/act/Mul_output_0_splitncnn_1 /model.9/m/MaxPool_output_0 0=0 1=5 11=5 2=1 12=1 3=2 13=2 14=2 15=2 5=1
Split splitncnn_14 1 2 /model.9/m/MaxPool_output_0 /model.9/m/MaxPool_output_0_splitncnn_0 /model.9/m/MaxPool_output_0_splitncnn_1
Pooling /model.9/m_1/MaxPool 1 1 /model.9/m/MaxPool_output_0_splitncnn_1 /model.9/m_1/MaxPool_output_0 0=0 1=5 11=5 2=1 12=1 3=2 13=2 14=2 15=2 5=1
Split splitncnn_15 1 2 /model.9/m_1/MaxPool_output_0 /model.9/m_1/MaxPool_output_0_splitncnn_0 /model.9/m_1/MaxPool_output_0_splitncnn_1
Pooling /model.9/m_2/MaxPool 1 1 /model.9/m_1/MaxPool_output_0_splitncnn_1 /model.9/m_2/MaxPool_output_0 0=0 1=5 11=5 2=1 12=1 3=2 13=2 14=2 15=2 5=1
Concat /model.9/Concat 4 1 /model.9/cv1/act/Mul_output_0_splitncnn_0 /model.9/m/MaxPool_output_0_splitncnn_0 /model.9/m_1/MaxPool_output_0_splitncnn_0 /model.9/m_2/MaxPool_output_0 /model.9/Concat_output_0 0=0
Convolution /model.9/cv2/conv/Conv 1 1 /model.9/Concat_output_0 /model.9/cv2/conv/Conv_output_0 0=512 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=524288
Swish /model.9/cv2/act/Mul 1 1 /model.9/cv2/conv/Conv_output_0 /model.9/cv2/act/Mul_output_0
Convolution /model.10/conv/Conv 1 1 /model.9/cv2/act/Mul_output_0 /model.10/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=131072
Swish /model.10/act/Mul 1 1 /model.10/conv/Conv_output_0 /model.10/act/Mul_output_0
Split splitncnn_16 1 2 /model.10/act/Mul_output_0 /model.10/act/Mul_output_0_splitncnn_0 /model.10/act/Mul_output_0_splitncnn_1
Interp /model.11/Resize 1 1 /model.10/act/Mul_output_0_splitncnn_1 /model.11/Resize_output_0 0=1 1=2.000000e+00 2=2.000000e+00 3=0 4=0 6=0
Concat /model.12/Concat 2 1 /model.11/Resize_output_0 /model.6/cv3/act/Mul_output_0_splitncnn_0 /model.12/Concat_output_0 0=0
Split splitncnn_17 1 2 /model.12/Concat_output_0 /model.12/Concat_output_0_splitncnn_0 /model.12/Concat_output_0_splitncnn_1
Convolution /model.13/cv1/conv/Conv 1 1 /model.12/Concat_output_0_splitncnn_1 /model.13/cv1/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=65536
Swish /model.13/cv1/act/Mul 1 1 /model.13/cv1/conv/Conv_output_0 /model.13/cv1/act/Mul_output_0
Convolution /model.13/m/m.0/cv1/conv/Conv 1 1 /model.13/cv1/act/Mul_output_0 /model.13/m/m.0/cv1/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.13/m/m.0/cv1/act/Mul 1 1 /model.13/m/m.0/cv1/conv/Conv_output_0 /model.13/m/m.0/cv1/act/Mul_output_0
Convolution /model.13/m/m.0/cv2/conv/Conv 1 1 /model.13/m/m.0/cv1/act/Mul_output_0 /model.13/m/m.0/cv2/conv/Conv_output_0 0=128 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=147456
Swish /model.13/m/m.0/cv2/act/Mul 1 1 /model.13/m/m.0/cv2/conv/Conv_output_0 /model.13/m/m.0/cv2/act/Mul_output_0
Convolution /model.13/cv2/conv/Conv 1 1 /model.12/Concat_output_0_splitncnn_0 /model.13/cv2/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=65536
Swish /model.13/cv2/act/Mul 1 1 /model.13/cv2/conv/Conv_output_0 /model.13/cv2/act/Mul_output_0
Concat /model.13/Concat 2 1 /model.13/m/m.0/cv2/act/Mul_output_0 /model.13/cv2/act/Mul_output_0 /model.13/Concat_output_0 0=0
Convolution /model.13/cv3/conv/Conv 1 1 /model.13/Concat_output_0 /model.13/cv3/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=65536
Swish /model.13/cv3/act/Mul 1 1 /model.13/cv3/conv/Conv_output_0 /model.13/cv3/act/Mul_output_0
Convolution /model.14/conv/Conv 1 1 /model.13/cv3/act/Mul_output_0 /model.14/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=32768
Swish /model.14/act/Mul 1 1 /model.14/conv/Conv_output_0 /model.14/act/Mul_output_0
Split splitncnn_18 1 2 /model.14/act/Mul_output_0 /model.14/act/Mul_output_0_splitncnn_0 /model.14/act/Mul_output_0_splitncnn_1
Interp /model.15/Resize 1 1 /model.14/act/Mul_output_0_splitncnn_1 /model.15/Resize_output_0 0=1 1=2.000000e+00 2=2.000000e+00 3=0 4=0 6=0
Concat /model.16/Concat 2 1 /model.15/Resize_output_0 /model.4/cv3/act/Mul_output_0_splitncnn_0 /model.16/Concat_output_0 0=0
Split splitncnn_19 1 2 /model.16/Concat_output_0 /model.16/Concat_output_0_splitncnn_0 /model.16/Concat_output_0_splitncnn_1
Convolution /model.17/cv1/conv/Conv 1 1 /model.16/Concat_output_0_splitncnn_1 /model.17/cv1/conv/Conv_output_0 0=64 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.17/cv1/act/Mul 1 1 /model.17/cv1/conv/Conv_output_0 /model.17/cv1/act/Mul_output_0
Convolution /model.17/m/m.0/cv1/conv/Conv 1 1 /model.17/cv1/act/Mul_output_0 /model.17/m/m.0/cv1/conv/Conv_output_0 0=64 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=4096
Swish /model.17/m/m.0/cv1/act/Mul 1 1 /model.17/m/m.0/cv1/conv/Conv_output_0 /model.17/m/m.0/cv1/act/Mul_output_0
Convolution /model.17/m/m.0/cv2/conv/Conv 1 1 /model.17/m/m.0/cv1/act/Mul_output_0 /model.17/m/m.0/cv2/conv/Conv_output_0 0=64 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=36864
Swish /model.17/m/m.0/cv2/act/Mul 1 1 /model.17/m/m.0/cv2/conv/Conv_output_0 /model.17/m/m.0/cv2/act/Mul_output_0
Convolution /model.17/cv2/conv/Conv 1 1 /model.16/Concat_output_0_splitncnn_0 /model.17/cv2/conv/Conv_output_0 0=64 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.17/cv2/act/Mul 1 1 /model.17/cv2/conv/Conv_output_0 /model.17/cv2/act/Mul_output_0
Concat /model.17/Concat 2 1 /model.17/m/m.0/cv2/act/Mul_output_0 /model.17/cv2/act/Mul_output_0 /model.17/Concat_output_0 0=0
Convolution /model.17/cv3/conv/Conv 1 1 /model.17/Concat_output_0 /model.17/cv3/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.17/cv3/act/Mul 1 1 /model.17/cv3/conv/Conv_output_0 /model.17/cv3/act/Mul_output_0
Split splitncnn_20 1 2 /model.17/cv3/act/Mul_output_0 /model.17/cv3/act/Mul_output_0_splitncnn_0 /model.17/cv3/act/Mul_output_0_splitncnn_1
Convolution /model.18/conv/Conv 1 1 /model.17/cv3/act/Mul_output_0_splitncnn_1 /model.18/conv/Conv_output_0 0=128 1=3 11=3 2=1 12=1 3=2 13=2 4=1 14=1 15=1 16=1 5=1 6=147456
Swish /model.18/act/Mul 1 1 /model.18/conv/Conv_output_0 /model.18/act/Mul_output_0
Concat /model.19/Concat 2 1 /model.18/act/Mul_output_0 /model.14/act/Mul_output_0_splitncnn_0 /model.19/Concat_output_0 0=0
Split splitncnn_21 1 2 /model.19/Concat_output_0 /model.19/Concat_output_0_splitncnn_0 /model.19/Concat_output_0_splitncnn_1
Convolution /model.20/cv1/conv/Conv 1 1 /model.19/Concat_output_0_splitncnn_1 /model.20/cv1/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=32768
Swish /model.20/cv1/act/Mul 1 1 /model.20/cv1/conv/Conv_output_0 /model.20/cv1/act/Mul_output_0
Convolution /model.20/m/m.0/cv1/conv/Conv 1 1 /model.20/cv1/act/Mul_output_0 /model.20/m/m.0/cv1/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16384
Swish /model.20/m/m.0/cv1/act/Mul 1 1 /model.20/m/m.0/cv1/conv/Conv_output_0 /model.20/m/m.0/cv1/act/Mul_output_0
Convolution /model.20/m/m.0/cv2/conv/Conv 1 1 /model.20/m/m.0/cv1/act/Mul_output_0 /model.20/m/m.0/cv2/conv/Conv_output_0 0=128 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=147456
Swish /model.20/m/m.0/cv2/act/Mul 1 1 /model.20/m/m.0/cv2/conv/Conv_output_0 /model.20/m/m.0/cv2/act/Mul_output_0
Convolution /model.20/cv2/conv/Conv 1 1 /model.19/Concat_output_0_splitncnn_0 /model.20/cv2/conv/Conv_output_0 0=128 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=32768
Swish /model.20/cv2/act/Mul 1 1 /model.20/cv2/conv/Conv_output_0 /model.20/cv2/act/Mul_output_0
Concat /model.20/Concat 2 1 /model.20/m/m.0/cv2/act/Mul_output_0 /model.20/cv2/act/Mul_output_0 /model.20/Concat_output_0 0=0
Convolution /model.20/cv3/conv/Conv 1 1 /model.20/Concat_output_0 /model.20/cv3/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=65536
Swish /model.20/cv3/act/Mul 1 1 /model.20/cv3/conv/Conv_output_0 /model.20/cv3/act/Mul_output_0
Split splitncnn_22 1 2 /model.20/cv3/act/Mul_output_0 /model.20/cv3/act/Mul_output_0_splitncnn_0 /model.20/cv3/act/Mul_output_0_splitncnn_1
Convolution /model.21/conv/Conv 1 1 /model.20/cv3/act/Mul_output_0_splitncnn_1 /model.21/conv/Conv_output_0 0=256 1=3 11=3 2=1 12=1 3=2 13=2 4=1 14=1 15=1 16=1 5=1 6=589824
Swish /model.21/act/Mul 1 1 /model.21/conv/Conv_output_0 /model.21/act/Mul_output_0
Concat /model.22/Concat 2 1 /model.21/act/Mul_output_0 /model.10/act/Mul_output_0_splitncnn_0 /model.22/Concat_output_0 0=0
Split splitncnn_23 1 2 /model.22/Concat_output_0 /model.22/Concat_output_0_splitncnn_0 /model.22/Concat_output_0_splitncnn_1
Convolution /model.23/cv1/conv/Conv 1 1 /model.22/Concat_output_0_splitncnn_1 /model.23/cv1/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=131072
Swish /model.23/cv1/act/Mul 1 1 /model.23/cv1/conv/Conv_output_0 /model.23/cv1/act/Mul_output_0
Convolution /model.23/m/m.0/cv1/conv/Conv 1 1 /model.23/cv1/act/Mul_output_0 /model.23/m/m.0/cv1/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=65536
Swish /model.23/m/m.0/cv1/act/Mul 1 1 /model.23/m/m.0/cv1/conv/Conv_output_0 /model.23/m/m.0/cv1/act/Mul_output_0
Convolution /model.23/m/m.0/cv2/conv/Conv 1 1 /model.23/m/m.0/cv1/act/Mul_output_0 /model.23/m/m.0/cv2/conv/Conv_output_0 0=256 1=3 11=3 2=1 12=1 3=1 13=1 4=1 14=1 15=1 16=1 5=1 6=589824
Swish /model.23/m/m.0/cv2/act/Mul 1 1 /model.23/m/m.0/cv2/conv/Conv_output_0 /model.23/m/m.0/cv2/act/Mul_output_0
Convolution /model.23/cv2/conv/Conv 1 1 /model.22/Concat_output_0_splitncnn_0 /model.23/cv2/conv/Conv_output_0 0=256 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=131072
Swish /model.23/cv2/act/Mul 1 1 /model.23/cv2/conv/Conv_output_0 /model.23/cv2/act/Mul_output_0
Concat /model.23/Concat 2 1 /model.23/m/m.0/cv2/act/Mul_output_0 /model.23/cv2/act/Mul_output_0 /model.23/Concat_output_0 0=0
Convolution /model.23/cv3/conv/Conv 1 1 /model.23/Concat_output_0 /model.23/cv3/conv/Conv_output_0 0=512 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=262144
Swish /model.23/cv3/act/Mul 1 1 /model.23/cv3/conv/Conv_output_0 /model.23/cv3/act/Mul_output_0
Convolution /model.24/m.0/Conv 1 1 /model.17/cv3/act/Mul_output_0_splitncnn_0 /model.24/m.0/Conv_output_0 0=66 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=8448
Reshape /model.24/Reshape 1 1 /model.24/m.0/Conv_output_0 /model.24/Reshape_output_0 0=-1 1=22 2=3
Permute /model.24/Transpose 1 1 /model.24/Reshape_output_0 output 0=1
Convolution /model.24/m.1/Conv 1 1 /model.20/cv3/act/Mul_output_0_splitncnn_0 /model.24/m.1/Conv_output_0 0=66 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=16896
Reshape /model.24/Reshape_1 1 1 /model.24/m.1/Conv_output_0 /model.24/Reshape_1_output_0 0=-1 1=22 2=3
Permute /model.24/Transpose_1 1 1 /model.24/Reshape_1_output_0 354 0=1
Convolution /model.24/m.2/Conv 1 1 /model.23/cv3/act/Mul_output_0 /model.24/m.2/Conv_output_0 0=66 1=1 11=1 2=1 12=1 3=1 13=1 4=0 14=0 15=0 16=0 5=1 6=33792
Reshape /model.24/Reshape_2 1 1 /model.24/m.2/Conv_output_0 /model.24/Reshape_2_output_0 0=-1 1=22 2=3
Permute /model.24/Transpose_2 1 1 /model.24/Reshape_2_output_0 366 0=1

@ -1 +1 @@
{"absHeartbeats":[33449,85836],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","syncTime":0,"timeForKeepingLogs":15,"usingAbsHbTime":1}
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":1,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1,39 +1,52 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.18.1)
# Declares and names the project.
add_definitions(-DTERMINAL_CLIENT)
add_definitions(-DTERMINAL_CLIENT_MNTN)
project("mpmaster")
#
add_definitions(-DTERMINAL_CLIENT -DTERMINAL_CLIENT_MNTN)
#
include_directories(
${TERM_CORE_ROOT}
${CMAKE_CURRENT_SOURCE_DIR}/bzip2
${CMAKE_CURRENT_SOURCE_DIR}/bspatch
)
# bzip2
add_library(bz2 STATIC
bzip2/blocksort.c
bzip2/huffman.c
bzip2/crctable.c
bzip2/randtable.c
bzip2/compress.c
bzip2/decompress.c
bzip2/bzlib.c
)
# bspatchbzip2
add_definitions(-DBSPATCH_EXECUTABLE)
add_definitions(-DBSDIFF_EXECUTABLE)
add_library(bspatch STATIC bspatch/bspatch.c)
add_library(bsdiff STATIC bspatch/bsdiff.c)
target_link_libraries(bspatch bsdiff bz2 log) #
# JNIbspatchbzip2
add_library(mpmaster SHARED mpmaster.cpp)
target_link_libraries(mpmaster
bspatch # bspatch
bsdiff
bz2 # bzip2
${log-lib}
)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
include_directories(${TERM_CORE_ROOT})
add_library( # Sets the name of the library.
mpmaster
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
mpmaster.cpp )
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
mpmaster
# Links the target library to the log library
# included in the NDK.
${log-lib} )

@ -1,443 +0,0 @@
/*-
* Copyright 2003-2005 Colin Percival
* Copyright 2012 Matthew Endsley
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted providing that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "bsdiff.h"
#include <limits.h>
#include <string.h>
#define MIN(x,y) (((x)<(y)) ? (x) : (y))
static void split(int64_t *I,int64_t *V,int64_t start,int64_t len,int64_t h)
{
int64_t i,j,k,x,tmp,jj,kk;
if(len<16) {
for(k=start;k<start+len;k+=j) {
j=1;x=V[I[k]+h];
for(i=1;k+i<start+len;i++) {
if(V[I[k+i]+h]<x) {
x=V[I[k+i]+h];
j=0;
};
if(V[I[k+i]+h]==x) {
tmp=I[k+j];I[k+j]=I[k+i];I[k+i]=tmp;
j++;
};
};
for(i=0;i<j;i++) V[I[k+i]]=k+j-1;
if(j==1) I[k]=-1;
};
return;
};
x=V[I[start+len/2]+h];
jj=0;kk=0;
for(i=start;i<start+len;i++) {
if(V[I[i]+h]<x) jj++;
if(V[I[i]+h]==x) kk++;
};
jj+=start;kk+=jj;
i=start;j=0;k=0;
while(i<jj) {
if(V[I[i]+h]<x) {
i++;
} else if(V[I[i]+h]==x) {
tmp=I[i];I[i]=I[jj+j];I[jj+j]=tmp;
j++;
} else {
tmp=I[i];I[i]=I[kk+k];I[kk+k]=tmp;
k++;
};
};
while(jj+j<kk) {
if(V[I[jj+j]+h]==x) {
j++;
} else {
tmp=I[jj+j];I[jj+j]=I[kk+k];I[kk+k]=tmp;
k++;
};
};
if(jj>start) split(I,V,start,jj-start,h);
for(i=0;i<kk-jj;i++) V[I[jj+i]]=kk-1;
if(jj==kk-1) I[jj]=-1;
if(start+len>kk) split(I,V,kk,start+len-kk,h);
}
static void qsufsort(int64_t *I,int64_t *V,const uint8_t *old,int64_t oldsize)
{
int64_t buckets[256];
int64_t i,h,len;
for(i=0;i<256;i++) buckets[i]=0;
for(i=0;i<oldsize;i++) buckets[old[i]]++;
for(i=1;i<256;i++) buckets[i]+=buckets[i-1];
for(i=255;i>0;i--) buckets[i]=buckets[i-1];
buckets[0]=0;
for(i=0;i<oldsize;i++) I[++buckets[old[i]]]=i;
I[0]=oldsize;
for(i=0;i<oldsize;i++) V[i]=buckets[old[i]];
V[oldsize]=0;
for(i=1;i<256;i++) if(buckets[i]==buckets[i-1]+1) I[buckets[i]]=-1;
I[0]=-1;
for(h=1;I[0]!=-(oldsize+1);h+=h) {
len=0;
for(i=0;i<oldsize+1;) {
if(I[i]<0) {
len-=I[i];
i-=I[i];
} else {
if(len) I[i-len]=-len;
len=V[I[i]]+1-i;
split(I,V,i,len,h);
i+=len;
len=0;
};
};
if(len) I[i-len]=-len;
};
for(i=0;i<oldsize+1;i++) I[V[i]]=i;
}
static int64_t matchlen(const uint8_t *old,int64_t oldsize,const uint8_t *new,int64_t newsize)
{
int64_t i;
for(i=0;(i<oldsize)&&(i<newsize);i++)
if(old[i]!=new[i]) break;
return i;
}
static int64_t search(const int64_t *I,const uint8_t *old,int64_t oldsize,
const uint8_t *new,int64_t newsize,int64_t st,int64_t en,int64_t *pos)
{
int64_t x,y;
if(en-st<2) {
x=matchlen(old+I[st],oldsize-I[st],new,newsize);
y=matchlen(old+I[en],oldsize-I[en],new,newsize);
if(x>y) {
*pos=I[st];
return x;
} else {
*pos=I[en];
return y;
}
};
x=st+(en-st)/2;
if(memcmp(old+I[x],new,MIN(oldsize-I[x],newsize))<0) {
return search(I,old,oldsize,new,newsize,x,en,pos);
} else {
return search(I,old,oldsize,new,newsize,st,x,pos);
};
}
static void offtout(int64_t x,uint8_t *buf)
{
int64_t y;
if(x<0) y=-x; else y=x;
buf[0]=y%256;y-=buf[0];
y=y/256;buf[1]=y%256;y-=buf[1];
y=y/256;buf[2]=y%256;y-=buf[2];
y=y/256;buf[3]=y%256;y-=buf[3];
y=y/256;buf[4]=y%256;y-=buf[4];
y=y/256;buf[5]=y%256;y-=buf[5];
y=y/256;buf[6]=y%256;y-=buf[6];
y=y/256;buf[7]=y%256;
if(x<0) buf[7]|=0x80;
}
static int64_t writedata(struct bsdiff_stream* stream, const void* buffer, int64_t length)
{
int64_t result = 0;
while (length > 0)
{
const int smallsize = (int)MIN(length, INT_MAX);
const int writeresult = stream->write(stream, buffer, smallsize);
if (writeresult == -1)
{
return -1;
}
result += writeresult;
length -= smallsize;
buffer = (uint8_t*)buffer + smallsize;
}
return result;
}
struct bsdiff_request
{
const uint8_t* old;
int64_t oldsize;
const uint8_t* new;
int64_t newsize;
struct bsdiff_stream* stream;
int64_t *I;
uint8_t *buffer;
};
static int bsdiff_internal(const struct bsdiff_request req)
{
int64_t *I,*V;
int64_t scan,pos,len;
int64_t lastscan,lastpos,lastoffset;
int64_t oldscore,scsc;
int64_t s,Sf,lenf,Sb,lenb;
int64_t overlap,Ss,lens;
int64_t i;
uint8_t *buffer;
uint8_t buf[8 * 3];
if((V=req.stream->malloc((req.oldsize+1)*sizeof(int64_t)))==NULL) return -1;
I = req.I;
qsufsort(I,V,req.old,req.oldsize);
req.stream->free(V);
buffer = req.buffer;
/* Compute the differences, writing ctrl as we go */
scan=0;len=0;pos=0;
lastscan=0;lastpos=0;lastoffset=0;
while(scan<req.newsize) {
oldscore=0;
for(scsc=scan+=len;scan<req.newsize;scan++) {
len=search(I,req.old,req.oldsize,req.new+scan,req.newsize-scan,
0,req.oldsize,&pos);
for(;scsc<scan+len;scsc++)
if((scsc+lastoffset<req.oldsize) &&
(req.old[scsc+lastoffset] == req.new[scsc]))
oldscore++;
if(((len==oldscore) && (len!=0)) ||
(len>oldscore+8)) break;
if((scan+lastoffset<req.oldsize) &&
(req.old[scan+lastoffset] == req.new[scan]))
oldscore--;
};
if((len!=oldscore) || (scan==req.newsize)) {
s=0;Sf=0;lenf=0;
for(i=0;(lastscan+i<scan)&&(lastpos+i<req.oldsize);) {
if(req.old[lastpos+i]==req.new[lastscan+i]) s++;
i++;
if(s*2-i>Sf*2-lenf) { Sf=s; lenf=i; };
};
lenb=0;
if(scan<req.newsize) {
s=0;Sb=0;
for(i=1;(scan>=lastscan+i)&&(pos>=i);i++) {
if(req.old[pos-i]==req.new[scan-i]) s++;
if(s*2-i>Sb*2-lenb) { Sb=s; lenb=i; };
};
};
if(lastscan+lenf>scan-lenb) {
overlap=(lastscan+lenf)-(scan-lenb);
s=0;Ss=0;lens=0;
for(i=0;i<overlap;i++) {
if(req.new[lastscan+lenf-overlap+i]==
req.old[lastpos+lenf-overlap+i]) s++;
if(req.new[scan-lenb+i]==
req.old[pos-lenb+i]) s--;
if(s>Ss) { Ss=s; lens=i+1; };
};
lenf+=lens-overlap;
lenb-=lens;
};
offtout(lenf,buf);
offtout((scan-lenb)-(lastscan+lenf),buf+8);
offtout((pos-lenb)-(lastpos+lenf),buf+16);
/* Write control data */
if (writedata(req.stream, buf, sizeof(buf)))
return -1;
/* Write diff data */
for(i=0;i<lenf;i++)
buffer[i]=req.new[lastscan+i]-req.old[lastpos+i];
if (writedata(req.stream, buffer, lenf))
return -1;
/* Write extra data */
for(i=0;i<(scan-lenb)-(lastscan+lenf);i++)
buffer[i]=req.new[lastscan+lenf+i];
if (writedata(req.stream, buffer, (scan-lenb)-(lastscan+lenf)))
return -1;
lastscan=scan-lenb;
lastpos=pos-lenb;
lastoffset=pos-scan;
};
};
return 0;
}
int bsdiff(const uint8_t* old, int64_t oldsize, const uint8_t* new, int64_t newsize, struct bsdiff_stream* stream)
{
int result;
struct bsdiff_request req;
if((req.I=stream->malloc((oldsize+1)*sizeof(int64_t)))==NULL)
return -1;
if((req.buffer=stream->malloc(newsize+1))==NULL)
{
stream->free(req.I);
return -1;
}
req.old = old;
req.oldsize = oldsize;
req.new = new;
req.newsize = newsize;
req.stream = stream;
result = bsdiff_internal(req);
stream->free(req.buffer);
stream->free(req.I);
return result;
}
#include <sys/types.h>
#include <bzlib.h>
#include <err.h>
#include <fcntl.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
static int bz2_write(struct bsdiff_stream* stream, const void* buffer, int size)
{
int bz2err;
BZFILE* bz2;
bz2 = (BZFILE*)stream->opaque;
BZ2_bzWrite(&bz2err, bz2, (void*)buffer, size);
if (bz2err != BZ_STREAM_END && bz2err != BZ_OK)
return -1;
return 0;
}
int bsdiff_main(int argc, const char **argv)
{
int fd;
int bz2err;
uint8_t *old,*new;
off_t oldsize,newsize;
uint8_t buf[8];
FILE * pf;
struct bsdiff_stream stream;
BZFILE* bz2;
memset(&bz2, 0, sizeof(bz2));
stream.malloc = malloc;
stream.free = free;
stream.write = bz2_write;
if(argc!=4) errx(1,"usage: %s oldfile newfile patchfile\n",argv[0]);
/* Allocate oldsize+1 bytes instead of oldsize bytes to ensure
that we never try to malloc(0) and get a NULL pointer */
if(((fd=open(argv[1],O_RDONLY,0))<0) ||
((oldsize=lseek(fd,0,SEEK_END))==-1) ||
((old=malloc(oldsize+1))==NULL) ||
(lseek(fd,0,SEEK_SET)!=0) ||
(read(fd,old,oldsize)!=oldsize) ||
(close(fd)==-1)) err(1,"%s",argv[1]);
/* Allocate newsize+1 bytes instead of newsize bytes to ensure
that we never try to malloc(0) and get a NULL pointer */
if(((fd=open(argv[2],O_RDONLY,0))<0) ||
((newsize=lseek(fd,0,SEEK_END))==-1) ||
((new=malloc(newsize+1))==NULL) ||
(lseek(fd,0,SEEK_SET)!=0) ||
(read(fd,new,newsize)!=newsize) ||
(close(fd)==-1)) err(1,"%s",argv[2]);
/* Create the patch file */
if ((pf = fopen(argv[3], "w")) == NULL)
err(1, "%s", argv[3]);
/* Write header (signature+newsize)*/
offtout(newsize, buf);
if (fwrite("ENDSLEY/BSDIFF43", 16, 1, pf) != 1 ||
fwrite(buf, sizeof(buf), 1, pf) != 1)
err(1, "Failed to write header");
if (NULL == (bz2 = BZ2_bzWriteOpen(&bz2err, pf, 9, 0, 0)))
errx(1, "BZ2_bzWriteOpen, bz2err=%d", bz2err);
stream.opaque = bz2;
if (bsdiff(old, oldsize, new, newsize, &stream))
err(1, "bsdiff");
BZ2_bzWriteClose(&bz2err, bz2, 0, NULL, NULL);
if (bz2err != BZ_OK)
err(1, "BZ2_bzWriteClose, bz2err=%d", bz2err);
if (fclose(pf))
err(1, "fclose");
/* Free the memory we used */
free(old);
free(new);
return 0;
}

@ -1,44 +0,0 @@
/*-
* Copyright 2003-2005 Colin Percival
* Copyright 2012 Matthew Endsley
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted providing that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef BSDIFF_H
# define BSDIFF_H
# include <stddef.h>
# include <stdint.h>
struct bsdiff_stream
{
void* opaque;
void* (*malloc)(size_t size);
void (*free)(void* ptr);
int (*write)(struct bsdiff_stream* stream, const void* buffer, int size);
};
int bsdiff(const uint8_t* old, int64_t oldsize, const uint8_t* new, int64_t newsize, struct bsdiff_stream* stream);
#endif

@ -1,201 +0,0 @@
/*-
* Copyright 2003-2005 Colin Percival
* Copyright 2012 Matthew Endsley
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted providing that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <limits.h>
#include "bspatch.h"
#ifdef __cplusplus
extern "C" {
#endif
int bspatch_main(int argc, char** argv);
#ifdef __cplusplus
}
#endif
static int64_t offtin(uint8_t *buf)
{
int64_t y;
y=buf[7]&0x7F;
y=y*256;y+=buf[6];
y=y*256;y+=buf[5];
y=y*256;y+=buf[4];
y=y*256;y+=buf[3];
y=y*256;y+=buf[2];
y=y*256;y+=buf[1];
y=y*256;y+=buf[0];
if(buf[7]&0x80) y=-y;
return y;
}
int bspatch(const uint8_t* old, int64_t oldsize, uint8_t* new, int64_t newsize, struct bspatch_stream* stream)
{
uint8_t buf[8];
int64_t oldpos,newpos;
int64_t ctrl[3];
int64_t i;
oldpos=0;newpos=0;
while(newpos<newsize) {
/* Read control data */
for(i=0;i<=2;i++) {
if (stream->read(stream, buf, 8))
return -1;
ctrl[i]=offtin(buf);
};
/* Sanity-check */
if (ctrl[0]<0 || ctrl[0]>INT_MAX ||
ctrl[1]<0 || ctrl[1]>INT_MAX ||
newpos+ctrl[0]>newsize)
return -1;
/* Read diff string */
if (stream->read(stream, new + newpos, ctrl[0]))
return -1;
/* Add old data to diff string */
for(i=0;i<ctrl[0];i++)
if((oldpos+i>=0) && (oldpos+i<oldsize))
new[newpos+i]+=old[oldpos+i];
/* Adjust pointers */
newpos+=ctrl[0];
oldpos+=ctrl[0];
/* Sanity-check */
if(newpos+ctrl[1]>newsize)
return -1;
/* Read extra string */
if (stream->read(stream, new + newpos, ctrl[1]))
return -1;
/* Adjust pointers */
newpos+=ctrl[1];
oldpos+=ctrl[2];
};
return 0;
}
#if defined(BSPATCH_EXECUTABLE)
#include <bzlib.h>
#include <stdlib.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <err.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
#include <fcntl.h>
static int bz2_read(const struct bspatch_stream* stream, void* buffer, int length)
{
int n;
int bz2err;
BZFILE* bz2;
bz2 = (BZFILE*)stream->opaque;
n = BZ2_bzRead(&bz2err, bz2, buffer, length);
if (n != length)
return -1;
return 0;
}
int bspatch_main(int argc,char * argv[])
{
FILE * f;
int fd;
int bz2err;
uint8_t header[24];
uint8_t *old, *new;
int64_t oldsize, newsize;
BZFILE* bz2;
struct bspatch_stream stream;
struct stat sb;
if(argc!=4) errx(1,"usage: %s oldfile newfile patchfile\n",argv[0]);
/* Open patch file */
if ((f = fopen(argv[3], "r")) == NULL)
err(1, "fopen(%s)", argv[3]);
/* Read header */
if (fread(header, 1, 24, f) != 24) {
if (feof(f))
errx(1, "Corrupt patch\n");
err(1, "fread(%s)", argv[3]);
}
/* Check for appropriate magic */
if (memcmp(header, "ENDSLEY/BSDIFF43", 16) != 0)
errx(1, "Corrupt patch\n");
/* Read lengths from header */
newsize=offtin(header+16);
if(newsize<0)
errx(1,"Corrupt patch\n");
/* Close patch file and re-open it via libbzip2 at the right places */
if(((fd=open(argv[1],O_RDONLY,0))<0) ||
((oldsize=lseek(fd,0,SEEK_END))==-1) ||
((old=malloc(oldsize+1))==NULL) ||
(lseek(fd,0,SEEK_SET)!=0) ||
(read(fd,old,oldsize)!=oldsize) ||
(fstat(fd, &sb)) ||
(close(fd)==-1)) err(1,"%s",argv[1]);
if((new=malloc(newsize+1))==NULL) err(1,NULL);
if (NULL == (bz2 = BZ2_bzReadOpen(&bz2err, f, 0, 0, NULL, 0)))
errx(1, "BZ2_bzReadOpen, bz2err=%d", bz2err);
stream.read = bz2_read;
stream.opaque = bz2;
if (bspatch(old, oldsize, new, newsize, &stream))
errx(1, "bspatch");
/* Clean up the bzip2 reads */
BZ2_bzReadClose(&bz2err, bz2);
fclose(f);
/* Write the new file */
if(((fd=open(argv[2],O_CREAT|O_TRUNC|O_WRONLY,sb.st_mode))<0) ||
(write(fd,new,newsize)!=newsize) || (close(fd)==-1))
err(1,"%s",argv[2]);
free(new);
free(old);
return 0;
}
#endif

@ -1,41 +0,0 @@
/*-
* Copyright 2003-2005 Colin Percival
* Copyright 2012 Matthew Endsley
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted providing that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef BSPATCH_H
# define BSPATCH_H
# include <stdint.h>
struct bspatch_stream
{
void* opaque;
int (*read)(const struct bspatch_stream* stream, void* buffer, int length);
};
int bspatch(const uint8_t* old, int64_t oldsize, uint8_t* new, int64_t newsize, struct bspatch_stream* stream);
#endif

@ -1,97 +0,0 @@
/* config.h. Generated from config.h.in by configure. */
/* config.h.in. Generated from configure.ac by autoheader. */
/* Define to 1 if you have the <fcntl.h> header file. */
#define HAVE_FCNTL_H 1
/* Define to 1 if you have the <inttypes.h> header file. */
#define HAVE_INTTYPES_H 1
/* Define to 1 if you have the `bz2' library (-lbz2). */
#define HAVE_LIBBZ2 1
/* Define to 1 if you have the <limits.h> header file. */
#define HAVE_LIMITS_H 1
/* Define to 1 if your system has a GNU libc compatible `malloc' function, and
to 0 otherwise. */
#define HAVE_MALLOC 1
/* Define to 1 if you have the <memory.h> header file. */
#define HAVE_MEMORY_H 1
/* Define to 1 if you have the `memset' function. */
#define HAVE_MEMSET 1
/* Define to 1 if you have the <stddef.h> header file. */
#define HAVE_STDDEF_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <stdlib.h> header file. */
#define HAVE_STDLIB_H 1
/* Define to 1 if you have the <strings.h> header file. */
#define HAVE_STRINGS_H 1
/* Define to 1 if you have the <string.h> header file. */
#define HAVE_STRING_H 1
/* Define to 1 if you have the <sys/stat.h> header file. */
#define HAVE_SYS_STAT_H 1
/* Define to 1 if you have the <sys/types.h> header file. */
#define HAVE_SYS_TYPES_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Name of package */
#define PACKAGE "bsdiff"
/* Define to the address where bug reports for this package should be sent. */
#define PACKAGE_BUGREPORT ""
/* Define to the full name of this package. */
#define PACKAGE_NAME "bsdiff"
/* Define to the full name and version of this package. */
#define PACKAGE_STRING "bsdiff 0.1"
/* Define to the one symbol short name of this package. */
#define PACKAGE_TARNAME "bsdiff"
/* Define to the home page for this package. */
#define PACKAGE_URL ""
/* Define to the version of this package. */
#define PACKAGE_VERSION "0.1"
/* Define to 1 if you have the ANSI C header files. */
#define STDC_HEADERS 1
/* Version number of package */
#define VERSION "0.1"
/* Define for Solaris 2.5.1 so the uint8_t typedef from <sys/synch.h>,
<pthread.h>, or <semaphore.h> is not used. If the typedef were allowed, the
#define below would cause a syntax error. */
/* #undef _UINT8_T */
/* Define to the type of a signed integer type of width exactly 64 bits if
such a type exists and the standard includes do not define it. */
/* #undef int64_t */
/* Define to rpl_malloc if the replacement function should be used. */
/* #undef malloc */
/* Define to `long int' if <sys/types.h> does not define. */
/* #undef off_t */
/* Define to `unsigned int' if <sys/types.h> does not define. */
/* #undef size_t */
/* Define to the type of an unsigned integer type of width exactly 8 bits if
such a type exists and the standard includes do not define it. */
/* #undef uint8_t */

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save