diff --git a/app/build.gradle b/app/build.gradle index 489eae03..1b0869e2 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -5,7 +5,7 @@ plugins { // 10,00,000 major-minor-build def AppMajorVersion = 1 def AppMinorVersion = 3 -def AppBuildNumber = 70 +def AppBuildNumber = 72 def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber diff --git a/app/src/main/cpp/CMakeLists.txt b/app/src/main/cpp/CMakeLists.txt index 9d18a0af..6ffc05ee 100644 --- a/app/src/main/cpp/CMakeLists.txt +++ b/app/src/main/cpp/CMakeLists.txt @@ -21,7 +21,7 @@ if(ANDROID_ABI STREQUAL "armeabi-v7a") add_definitions(-DUSING_N938) elseif(ANDROID_ABI STREQUAL "arm64-v8a") # add_definitions(-DUSING_N938) - add_definitions(-DUSING_PLZ) + # add_definitions(-DUSING_PLZ) endif() # OUTPUT_DBG_INFO: 输出调试相关信息 @@ -154,7 +154,7 @@ add_definitions(-DDISABLE_RTTI) # include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLToolKit/src/ ) # SET(ZLMEDIAKIT_LIBS ${ZLMEDIAKIT_LIBS} zlmediakit zltoolkit) -SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp ) +SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp media/Streaming.cpp ) SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX) @@ -396,6 +396,9 @@ add_library( # Sets the name of the library. ncnn/yolov5ncnn.cpp netcamera/httpclient.cpp + netcamera/VendorCtrl.cpp + netcamera/YuShiCtrl.cpp + netcamera/HangYuCtrl.cpp ${STREAMING_SRCS} @@ -445,6 +448,7 @@ add_library( # Sets the name of the library. ${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp ${TERM_CORE_ROOT}/Client/Database.cpp ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp + ${TERM_CORE_ROOT}/Client/DataController.cpp ) diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp index f4021b17..bfc0e992 100644 --- a/app/src/main/cpp/PhoneDevice.cpp +++ b/app/src/main/cpp/PhoneDevice.cpp @@ -9,6 +9,11 @@ #include "CvText.h" #include "PositionHelper.h" #include "DngCreator.h" +#include "media/Streaming.h" + +#include "netcamera/VendorCtrl.h" +#include "netcamera/YuShiCtrl.h" +#include "netcamera/HangYuCtrl.h" #include "media/RTSPRecorder.h" @@ -1711,8 +1716,8 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c } XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON"); - // std::shared_ptr ethernetPowerCtrl = std::make_shared(1); - std::shared_ptr ethernetPowerCtrl; + std::shared_ptr ethernetPowerCtrl = std::make_shared(1); + // std::shared_ptr ethernetPowerCtrl; net_handle_t netHandle = GetEthnetHandle(); if (netHandle == 0) @@ -1747,64 +1752,37 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c // SetStaticIp(); std::this_thread::sleep_for(std::chrono::milliseconds(256)); - NET_PHOTO_INFO netPhotoInfo = { netHandle, 0 }; - if (localPhotoInfo.vendor == 1) - { - // Hai Kang - netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST; - snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/ISAPI/Streaming/channels/1/picture?"); - } - else if (localPhotoInfo.vendor == 2) - { - // Hang Yu - strcpy(netPhotoInfo.url, "/cgi-bin/snapshot.cgi"); - } - else if (localPhotoInfo.vendor == 3) - { - // Yu Shi - netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST; - int streamSid = 0; // should put into config - // rtsp://192.168.0.13:554/media/video1 - snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/media/video%u", (uint32_t)localPhotoInfo.cameraId); - // strcpy(netPhotoInfo.url, "rtsp://192.168.50.224/live/0"); - } - else if (localPhotoInfo.vendor == 5) - { - // Hang Yu - New - netPhotoInfo.authType = HTTP_AUTH_TYPE_BASIC; - // http://192.168.1.46/Snapshot/%u/RemoteImageCapture?ImageFormat=2&HorizontalPixel=1920&VerticalPixel=1080 - // http://192.168.1.101/Snapshot/1/2/RemoteImageCaptureV2?ImageFormat=jpg - // http://192.168.1.101/Snapshot/1/1/RemoteImageCaptureV2?ImageFormat=jpg - snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", (uint32_t)localPhotoInfo.cameraId); - } - else + struct in_addr addr; + char ip[32] = { 0 }; + addr.s_addr = localPhotoInfo.ip; + strcpy(ip, inet_ntoa(addr)); + // strcpy(netPhotoInfo.outputPath, path.c_str()); + + VendorCtrl* vendorCtrl = MakeVendorCtrl(localPhotoInfo.vendor, localPhotoInfo.channel, ip, localPhotoInfo.userName, localPhotoInfo.password, netHandle); + if (vendorCtrl == NULL) { XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.vendor, (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId); TakePhotoCb(0, localPhotoInfo, "", 0); return false; } - struct in_addr addr; - addr.s_addr = localPhotoInfo.ip; - strcpy(netPhotoInfo.ip, inet_ntoa(addr)); - strcpy(netPhotoInfo.outputPath, path.c_str()); - if (!localPhotoInfo.userName.empty()) - { - size_t len = std::min(sizeof(netPhotoInfo.userName) - 1, localPhotoInfo.userName.size()); - strncpy(netPhotoInfo.userName, localPhotoInfo.userName.c_str(), len); - } - if (!localPhotoInfo.password.empty()) + std::string streamingUrl = vendorCtrl->GetStreamingUrl(localPhotoInfo.cameraId); + + if (streamingUrl.empty()) { - size_t len = std::min(sizeof(netPhotoInfo.password) - 1, localPhotoInfo.password.size()); - strncpy(netPhotoInfo.password, localPhotoInfo.password.c_str(), len); + XYLOG(XYLOG_SEVERITY_ERROR, "Invalid Streaming URL CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId); + TakePhotoCb(0, localPhotoInfo, "", 0); + return false; } + // strcpy(netPhotoInfo.outputPath, path.c_str()); + // strcpy(netPhotoInfo.interface, "eth0"); localPhotoInfo.photoTime = time(NULL); std::string tmpFile = m_appPath + (APP_PATH_TMP DIR_SEP_STR) + std::to_string(localPhotoInfo.photoId) + ".mp4"; // RTSPToMP4 dumper(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000); // dumper.start(); - dumpRtspToMp4(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000); + dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, GetEthnetHandle()); ethernetPowerCtrl.reset(); XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF"); @@ -1820,7 +1798,7 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c { TakePhotoCb(0, localPhotoInfo, "", 0); XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, - localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url); + localPhotoInfo.photoId, ip, streamingUrl.c_str()); } // Notify to take next photo // TakePhotoCb(1, localPhotoInfo, "", takingTime); @@ -1832,8 +1810,92 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c return true; } -bool CPhoneDevice::StartPushStreaming(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, std::vector& osds, std::shared_ptr powerCtrlPtr) +bool CPhoneDevice::StartPushStreaming(IDevice::PHOTO_INFO& photoInfo, const std::string& url, std::vector& osds, std::shared_ptr powerCtrlPtr) { +#if 0 + if (photoInfo.mediaType == XY_MEDIA_TYPE_STREAM) + { + std::map >::iterator it = m_streamings.find(photoInfo.channel); + if (it != m_streamings.end()) + { + it->second->stop(); + it->second.reset(); + m_streamings.erase(it); + } + + NET_PHOTO_INFO netPhotoInfo = { 0, 0 }; + if (photoInfo.vendor == 1) + { + // Hai Kang + netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST; + snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/ISAPI/Streaming/channels/1/picture?"); + } + else if (photoInfo.vendor == 2) + { + // Hang Yu + strcpy(netPhotoInfo.url, "/cgi-bin/snapshot.cgi"); + } + else if (photoInfo.vendor == 3) + { + // Yu Shi + netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST; + int streamSid = 0; // should put into config + // rtsp://192.168.0.13:554/media/video1 + snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/media/video%u", (uint32_t)photoInfo.cameraId); + // strcpy(netPhotoInfo.url, "rtsp://192.168.50.224/live/0"); + } + else if (photoInfo.vendor == 5) + { + // Hang Yu - New + netPhotoInfo.authType = HTTP_AUTH_TYPE_BASIC; + // http://192.168.1.46/Snapshot/%u/RemoteImageCapture?ImageFormat=2&HorizontalPixel=1920&VerticalPixel=1080 + // http://192.168.1.101/Snapshot/1/2/RemoteImageCaptureV2?ImageFormat=jpg + // http://192.168.1.101/Snapshot/1/1/RemoteImageCaptureV2?ImageFormat=jpg + snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", (uint32_t)photoInfo.cameraId); + } + else + { + XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.vendor, (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); + TakePhotoCb(0, photoInfo, "", 0); + return false; + } + + StreamForwarder* forwarder = new StreamForwarder(); + m_streamings[photoInfo.channel] = std::shared_ptr((Streaming*)forwarder); + // Initialize with RTSP input and RTMP output + if (!forwarder->initialize(std::string(netPhotoInfo.url), url)) { + std::cerr << "Failed to initialize stream forwarder" << std::endl; + return -1; + } + + // Optional: Set callback to process video frames +#if 0 + forwarder->setFrameCallback([](uint8_t* data, int linesize, int width, int height) { + // Process frame data here + // Example: Add OSD overlay + }); +#endif + + // Start forwarding + forwarder->start(); + + // Wait for user input to stop + // std::cout << "Press Enter to stop streaming..." << std::endl; + // std::cin.get(); + + // forwarder.stop(); + } + else if (photoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF) + { + auto it = m_streamings.find(photoInfo.channel); + if (it != m_streamings.end()) + { + it->second->stop(); + it->second.reset(); + m_streamings.erase(it); + } + } +#endif return true; } @@ -2037,7 +2099,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector< std::this_thread::sleep_for(std::chrono::seconds(5)); } - CameraPhotoCmd(ts, localPhotoInfo.channel, 0, localPhotoInfo.resolution, localPhotoInfo.preset, param.serfile, param.baud, param.addr); + CameraPhotoCmd(ts, localPhotoInfo.channel, TAKE_PHOTO, localPhotoInfo.resolution, localPhotoInfo.preset, param.serfile, param.baud, param.addr); XYLOG(XYLOG_SEVERITY_INFO, "Taking photo over"); if(localPhotoInfo.scheduleTime == 0) { @@ -4957,4 +5019,25 @@ void CPhoneDevice::SetStaticIp() #endif XYLOG(XYLOG_SEVERITY_WARNING, "No Static IP Confg"); } -} \ No newline at end of file +} + +VendorCtrl* CPhoneDevice::MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle) +{ + VendorCtrl* vendorCtrl = NULL; + switch (vendor) + { + case 1: + // Hai Kang + break; + case 2: + break; + case 3: + // Yu Shi + vendorCtrl = new YuShiCtrl(ip, userName, password, channel, netHandle); + break; + case 5: + // Hang Yu - New + vendorCtrl = new HangYuCtrl(ip, userName, password, channel, netHandle); + } + return vendorCtrl; +} diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h index 50c1be77..daced027 100644 --- a/app/src/main/cpp/PhoneDevice.h +++ b/app/src/main/cpp/PhoneDevice.h @@ -154,6 +154,8 @@ void MatToBitmap(JNIEnv *env, cv::Mat& mat, jobject& bitmap) { #endif class PowerControl; +class VendorCtrl; +class Streaming; class CPhoneDevice : public IDevice { @@ -266,6 +268,8 @@ public: net_handle_t GetEthnetHandle() const; + VendorCtrl* MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle); + protected: std::string GetFileName() const; @@ -422,6 +426,8 @@ protected: std::atomic m_collecting; unsigned long long localDelayTime; + std::map > m_streamings; + }; diff --git a/app/src/main/cpp/SensorsProtocol.h b/app/src/main/cpp/SensorsProtocol.h index 7a5b648b..034e2985 100644 --- a/app/src/main/cpp/SensorsProtocol.h +++ b/app/src/main/cpp/SensorsProtocol.h @@ -106,7 +106,7 @@ #define D_OPEN_MODULE_POWER 0x0009000C /* 打开机芯电源(1 有效)*/ /* 摄像机下发命令宏定义*/ -#define TAKE_PHOTO 0 /* 拍照*/ +#define TAKE_PHOTO 20000 /* 拍照*/ #define SET_BAUD 10000 /* 设置球机波特率*/ #define STOP_CMD 10005 /* 取消或停止指令*/ #define AUTO_SCAN 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ diff --git a/app/src/main/cpp/media/RTSPRecorder.cpp b/app/src/main/cpp/media/RTSPRecorder.cpp index 11edccfa..85cbf668 100644 --- a/app/src/main/cpp/media/RTSPRecorder.cpp +++ b/app/src/main/cpp/media/RTSPRecorder.cpp @@ -6,6 +6,7 @@ #include #include #include +#include extern "C" { #include #include @@ -21,15 +22,17 @@ extern "C" { #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__) -void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration) +void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle) { AVFormatContext* inputFormatContext = nullptr; AVFormatContext* outputFormatContext = nullptr; AVPacket packet; + AVDictionary *options = NULL; av_register_all(); avformat_network_init(); + // Open input RTMP stream if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) { fprintf(stderr, "Could not open input file '%s'\n", rtmpUrl); @@ -126,28 +129,61 @@ void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duratio } -void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration) +void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle) { AVFormatContext* inputFormatContext = nullptr; AVFormatContext* outputFormatContext = nullptr; AVPacket packet; + AVDictionary *options = NULL; + int res = 0; av_register_all(); avformat_network_init(); + // Set RTSP transport protocol option before opening + av_dict_set(&options, "rtsp_transport", "tcp", 0); + + // Set custom socket options via protocol whitelist and options + inputFormatContext->protocol_whitelist = av_strdup("file,udp,rtp,tcp,rtsp"); + // Open input RTSP stream if (avformat_open_input(&inputFormatContext, rtspUrl, nullptr, nullptr) != 0) { - fprintf(stderr, "Could not open input file '%s'\n", rtspUrl); + // fprintf(stderr, "Could not open input file '%s'\n", rtspUrl); return; } // Retrieve input stream information if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) { - fprintf(stderr, "Could not find stream information\n"); + // fprintf(stderr, "Could not find stream information\n"); avformat_close_input(&inputFormatContext); return; } + // Get socket file descriptor + if (NETWORK_UNSPECIFIED != netHandle) + { + int fd = -1; + if (inputFormatContext->pb) { + AVIOContext *io_ctx = inputFormatContext->pb; + // const char *url = io_ctx->filename; + + // You can access socket options using av_opt API + res = av_opt_get_int(io_ctx, "fd", AV_OPT_SEARCH_CHILDREN, (int64_t*)&fd); + if (res >= 0 && fd >= 0) { + // printf("Socket file descriptor: %d\n", fd); + + int res = android_setsocknetwork(netHandle, fd); + if (res == -1) + { + int errcode = errno; + // printf("android_setsocknetwork errno=%d", errcode); + // XYLOG(XYLOG_SEVERITY_ERROR,"setsocknetwork -1, errcode=%d",errcode); + } + } + } + } + + // Open output MP4 file if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) { fprintf(stderr, "Could not create output context\n"); diff --git a/app/src/main/cpp/media/RTSPRecorder.h b/app/src/main/cpp/media/RTSPRecorder.h index 1133c8e0..c406a43e 100644 --- a/app/src/main/cpp/media/RTSPRecorder.h +++ b/app/src/main/cpp/media/RTSPRecorder.h @@ -6,10 +6,11 @@ #define MICROPHOTO_RTSPRECORDER_H #include +#include // void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs); -void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration); -void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration); +void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle); +void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle); class RTSPRecorder { diff --git a/app/src/main/cpp/media/Streaming.cpp b/app/src/main/cpp/media/Streaming.cpp new file mode 100644 index 00000000..a0bd9b3a --- /dev/null +++ b/app/src/main/cpp/media/Streaming.cpp @@ -0,0 +1,159 @@ +// +// Created by Matthew on 2025/3/11. +// + +#include "Streaming.h" + +#include +#include +#include +#include +#include + +#if 0 +StreamForwarder::~StreamForwarder() { + stop(); + if (inputCtx) { + avformat_close_input(&inputCtx); + } + if (outputCtx) { + if (outputCtx->pb) { + avio_closep(&outputCtx->pb); + } + avformat_free_context(outputCtx); + } +} + +bool StreamForwarder::initialize(const std::string& inputUrl, const std::string& outputUrl) { + if (!openInput(inputUrl)) { + return false; + } + + if (!openOutput(outputUrl)) { + return false; + } + + return true; +} + +bool StreamForwarder::openInput(const std::string& inputUrl) { + inputCtx = avformat_alloc_context(); + if (!inputCtx) { + return false; + } + + if (avformat_open_input(&inputCtx, inputUrl.c_str(), nullptr, nullptr) < 0) { + return false; + } + + if (avformat_find_stream_info(inputCtx, nullptr) < 0) { + return false; + } + + return true; +} + +bool StreamForwarder::openOutput(const std::string& outputUrl) { + int ret = avformat_alloc_output_context2(&outputCtx, nullptr, "flv", outputUrl.c_str()); + if (ret < 0) { + return false; + } + + // Copy streams from input to output + for (unsigned int i = 0; i < inputCtx->nb_streams; i++) { + AVStream* inStream = inputCtx->streams[i]; + AVStream* outStream = avformat_new_stream(outputCtx, inStream->codec->codec); + if (!outStream) { + return false; + } + + ret = avcodec_copy_context(outStream->codec, inStream->codec); + if (ret < 0) { + return false; + } + } + + // Open output file + if (!(outputCtx->oformat->flags & AVFMT_NOFILE)) { + ret = avio_open(&outputCtx->pb, outputUrl.c_str(), AVIO_FLAG_WRITE); + if (ret < 0) { + return false; + } + } + + // Write header + ret = avformat_write_header(outputCtx, nullptr); + if (ret < 0) { + return false; + } + + return true; +} + +void StreamForwarder::setFrameCallback(std::function callback) { + frameCallback = callback; +} + +void StreamForwarder::start() { + isRunning = true; + forwardPackets(); +} + +void StreamForwarder::stop() { + isRunning = false; +} + +void StreamForwarder::forwardPackets() { + AVPacket packet; + AVFrame* frame = av_frame_alloc(); + + while (isRunning) { + if (av_read_frame(inputCtx, &packet) < 0) { + break; + } + + // Process video frames if callback is set + if (frameCallback && packet.stream_index == 0) { // Assuming video is stream 0 + AVCodecContext* codecCtx = inputCtx->streams[packet.stream_index]->codec; + int ret = avcodec_send_packet(codecCtx, &packet); + if (ret < 0) { + continue; + } + + while (ret >= 0) { + ret = avcodec_receive_frame(codecCtx, frame); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; + } else if (ret < 0) { + goto end; + } + + processFrame(frame); + } + } + + // Forward packet + av_packet_rescale_ts(&packet, + inputCtx->streams[packet.stream_index]->time_base, + outputCtx->streams[packet.stream_index]->time_base); + + int ret = av_interleaved_write_frame(outputCtx, &packet); + if (ret < 0) { + break; + } + + av_packet_unref(&packet); + } + + end: + av_frame_free(&frame); + av_write_trailer(outputCtx); +} + +void StreamForwarder::processFrame(AVFrame* frame) { + if (frameCallback) { + frameCallback(frame->data[0], frame->linesize[0], + frame->width, frame->height); + } +} +#endif \ No newline at end of file diff --git a/app/src/main/cpp/media/Streaming.h b/app/src/main/cpp/media/Streaming.h new file mode 100644 index 00000000..9819f40f --- /dev/null +++ b/app/src/main/cpp/media/Streaming.h @@ -0,0 +1,50 @@ +// +// Created by Matthew on 2025/3/11. +// + +#ifndef MICROPHOTO_STREAMING_H +#define MICROPHOTO_STREAMING_H + +#include +#include + +#include + +extern "C" { +#include +#include +#include +#include +} + +class Streaming +{ +public: + virtual ~Streaming() {} + virtual void start() {} + virtual void stop() {} +}; +#if 0 +class StreamForwarder : public Streaming +{ +private: + AVFormatContext* inputCtx = nullptr; + AVFormatContext* outputCtx = nullptr; + bool isRunning = false; + +public: + StreamForwarder() = default; + virtual ~StreamForwarder(); + + bool initialize(const std::string& inputUrl, const std::string& outputUrl); + virtual void start(); + virtual void stop(); + +private: + bool openInput(const std::string& inputUrl); + bool openOutput(const std::string& outputUrl); + void forwardPackets(); +}; +#endif + +#endif //MICROPHOTO_STREAMING_H diff --git a/app/src/main/cpp/netcamera/HangYuCtrl.cpp b/app/src/main/cpp/netcamera/HangYuCtrl.cpp new file mode 100644 index 00000000..66200efd --- /dev/null +++ b/app/src/main/cpp/netcamera/HangYuCtrl.cpp @@ -0,0 +1,95 @@ +// +// Created by Matthew on 2025/3/4. +// + +#include "HangYuCtrl.h" +#include "netcamera.h" +#include "httpclient.h" + +#include + +HangYuCtrl::~HangYuCtrl() +{ + +} + +bool HangYuCtrl::SetOsd() +{ + // /LAPI/V1.0/Channels//Media/OSDs/Contents +} + +void HangYuCtrl::EnableOsd(bool enable) +{ + // return false; +} + +std::string HangYuCtrl::GetStreamingUrl(uint8_t channel) +{ + // /LAPI/V1.0/Channels//Media/Video/Streams//LiveStreamURL?TransType=&TransProtocol= + char url[128] = { 0 }; + snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel); + + std::vector resData; + + int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData); + if (res != 0 || resData.empty()) + { + return ""; + } + + resData.push_back(0); + const char* start = strstr((const char*)&resData[0], ""); + if (start == NULL) + { + return ""; + } + start += 9; + const char* end = strstr(start, ""); + if (end == NULL) + { + return ""; + } + + return std::string(start, end); +} + +bool HangYuCtrl::UpdateTime(time_t ts) +{ + // /LAPI/V1.0/System/Time + + // + // + + std::string reqData = ""; + + std::string url = "http://" + m_ip + " /System/Time"; + std::vector resData; + int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData); + + if (res != 0) + { + return false; + } + + return true; +} + +bool HangYuCtrl::TakePhoto(std::vector& img) +{ + return false; +} + +bool HangYuCtrl::TakeVideo(uint32_t duration, std::string path) +{ + +} \ No newline at end of file diff --git a/app/src/main/cpp/netcamera/HangYuCtrl.h b/app/src/main/cpp/netcamera/HangYuCtrl.h new file mode 100644 index 00000000..3cb86ede --- /dev/null +++ b/app/src/main/cpp/netcamera/HangYuCtrl.h @@ -0,0 +1,28 @@ +// +// Created by Matthew on 2025/3/4. +// + +#ifndef __MICROPHOTO_HANGYUCTRL_H__ +#define __MICROPHOTO_HANGYUCTRL_H__ + +#include "VendorCtrl.h" + +class HangYuCtrl : public VendorCtrl +{ +public: + using VendorCtrl::VendorCtrl; + virtual ~HangYuCtrl(); + + virtual bool SetOsd(); + virtual void EnableOsd(bool enable); + virtual std::string GetStreamingUrl(uint8_t channel); + virtual bool UpdateTime(time_t ts); + virtual bool TakePhoto(std::vector& img); + virtual bool TakeVideo(uint32_t duration, std::string path); + +private: + +}; + + +#endif //__MICROPHOTO_HANGYUCTRL_H__ diff --git a/app/src/main/cpp/netcamera/VendorCtrl.cpp b/app/src/main/cpp/netcamera/VendorCtrl.cpp index 81f5d1c8..96ee7913 100644 --- a/app/src/main/cpp/netcamera/VendorCtrl.cpp +++ b/app/src/main/cpp/netcamera/VendorCtrl.cpp @@ -3,8 +3,8 @@ // #include "VendorCtrl.h" -VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password) : - m_ip(ip), m_userName(userName), m_password(password), m_channel(channel) +VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle) : + m_ip(ip), m_userName(userName), m_password(password), m_channel(channel), m_netHandle(netHandle) { } std::string VendorCtrl::CvtJSONToString(const Json::Value& data) diff --git a/app/src/main/cpp/netcamera/VendorCtrl.h b/app/src/main/cpp/netcamera/VendorCtrl.h index 66a18e4a..faf4b7ae 100644 --- a/app/src/main/cpp/netcamera/VendorCtrl.h +++ b/app/src/main/cpp/netcamera/VendorCtrl.h @@ -7,17 +7,19 @@ #include #include +#include class VendorCtrl { public: - VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel); - virtual ~VendorCtrl() = 0; + VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle); + virtual ~VendorCtrl() {} virtual bool SetOsd() = 0; virtual void EnableOsd(bool enable) = 0; virtual std::string GetStreamingUrl(uint8_t channel) = 0; virtual bool UpdateTime(time_t ts) = 0; virtual bool TakePhoto(std::vector& img) = 0; + virtual bool TakeVideo(uint32_t duration, std::string path) = 0; protected: @@ -28,6 +30,7 @@ protected: std::string m_userName; std::string m_password; uint8_t m_channel; + net_handle_t m_netHandle; }; diff --git a/app/src/main/cpp/netcamera/YuShiCtrl.cpp b/app/src/main/cpp/netcamera/YuShiCtrl.cpp index 9c51f8f0..24eeacf7 100644 --- a/app/src/main/cpp/netcamera/YuShiCtrl.cpp +++ b/app/src/main/cpp/netcamera/YuShiCtrl.cpp @@ -17,10 +17,10 @@ bool YuShiCtrl::SetOsd() void YuShiCtrl::EnableOsd(bool enable) { - return false; + // return false; } -std::string GetStreamingUrl(uint8_t channel) +std::string YuShiCtrl::GetStreamingUrl(uint8_t channel) { // /LAPI/V1.0/Channels//Media/Video/Streams//LiveStreamURL?TransType=&TransProtocol= @@ -29,12 +29,12 @@ std::string GetStreamingUrl(uint8_t channel) bool YuShiCtrl::UpdateTime(time_t ts) { - /LAPI/V1.0/System/Time + // /LAPI/V1.0/System/Time Json::Value jsonData(Json::objectValue); jsonData["TimeZone"] = "GMT+08:00"; - jsonData["DeviceTime"] = ts; + jsonData["DeviceTime"] = (int64_t)ts; jsonData["DateFormat"] = 0; // YYYY-MM-DD jsonData["HourFormat"] = 1; // 24H @@ -44,4 +44,9 @@ bool YuShiCtrl::UpdateTime(time_t ts) bool YuShiCtrl::TakePhoto(std::vector& img) { return false; +} + +bool YuShiCtrl::TakeVideo(uint32_t duration, std::string path) +{ + } \ No newline at end of file diff --git a/app/src/main/cpp/netcamera/YuShiCtrl.h b/app/src/main/cpp/netcamera/YuShiCtrl.h index d5da62ad..d0103ad1 100644 --- a/app/src/main/cpp/netcamera/YuShiCtrl.h +++ b/app/src/main/cpp/netcamera/YuShiCtrl.h @@ -18,6 +18,7 @@ public: virtual std::string GetStreamingUrl(uint8_t channel); virtual bool UpdateTime(time_t ts); virtual bool TakePhoto(std::vector& img); + virtual bool TakeVideo(uint32_t duration, std::string path); private: diff --git a/app/src/main/cpp/netcamera/httpclient.cpp b/app/src/main/cpp/netcamera/httpclient.cpp index 532b5353..6f4e4ac4 100644 --- a/app/src/main/cpp/netcamera/httpclient.cpp +++ b/app/src/main/cpp/netcamera/httpclient.cpp @@ -119,7 +119,7 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char return ((0 == nRet) && (responseCode == 200)) ? 0 : 1; } -int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, char* data) +int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector& data) { std::string auth; @@ -239,10 +239,11 @@ int UniviewResolutionSet(const NET_PHOTO_INFO& photoInfo, int channel, unsigned Json::StreamWriterBuilder writer; std::string sendbuf = Json::writeString(writer, outdata); - char respContent[1024]; + std::vector respContent; DoPutRequest(path.c_str(), photoInfo.authType, photoInfo.userName, photoInfo.password, photoInfo.netHandle, sendbuf.c_str(), respContent); - XYLOG(XYLOG_SEVERITY_DEBUG, "Sendlen= %zu, respContent=%s", sendbuf.size(), respContent); + // respContent.push_back(0); + // XYLOG(XYLOG_SEVERITY_DEBUG, "Sendlen= %zu, respContent=%s", sendbuf.size(), (const char*)&respContent[0]); return 0; } diff --git a/app/src/main/cpp/netcamera/httpclient.h b/app/src/main/cpp/netcamera/httpclient.h index 496c8515..fe57564c 100644 --- a/app/src/main/cpp/netcamera/httpclient.h +++ b/app/src/main/cpp/netcamera/httpclient.h @@ -19,6 +19,6 @@ bool setIPAddress(const char *if_name, const char *ip_addr, const char *net_mask, const char *gateway_addr); int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector& data); -int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, char* data); +int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector& data); #endif // __HTTP_CLIENT__ \ No newline at end of file diff --git a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java index 87bd2016..efa9d299 100644 --- a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java +++ b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java @@ -56,6 +56,7 @@ import android.telephony.TelephonyManager; import android.text.TextUtils; import android.text.format.DateFormat; import android.util.Log; +import android.view.Gravity; import android.widget.RemoteViews; import android.widget.Toast; @@ -529,8 +530,7 @@ public class MicroPhotoService extends Service { } // Register Next Photo Timer - Date date = new Date(); - long startTime = (date.getTime() + 999) / 1000 + 1; // Add one second + long startTime = (ts == 0) ? (((new Date()).getTime() + 999) / 1000 + 1) : (ts + 1); // Add one second mService.updateCaptureSchedule(startTime); } else if (TextUtils.equals(ACTION_HEARTBEAT_MANUALLY, action)) { Log.i(TAG, "HB Timer Fired ACTION=" + action); @@ -1238,7 +1238,9 @@ public class MicroPhotoService extends Service { new Runnable() { public void run() { // Log.d(TAG, "Bluetooth Low Energy device is connected!!"); - Toast.makeText(getApplicationContext(), "MP Connected!", Toast.LENGTH_SHORT).show(); + Toast toast = Toast.makeText(getApplicationContext(), "MP Connected!", Toast.LENGTH_SHORT); + toast.setGravity(Gravity.TOP, 0, 0); + toast.show(); mStateService = STATE_SERVICE.CONNECTED; startForeground(NOTIFICATION_ID_FOREGROUND_SERVICE, prepareNotification()); } diff --git a/mpmaster/build.gradle b/mpmaster/build.gradle index a816e021..5dbc9298 100644 --- a/mpmaster/build.gradle +++ b/mpmaster/build.gradle @@ -4,7 +4,7 @@ plugins { def AppMajorVersion = 1 def AppMinorVersion = 1 -def AppBuildNumber = 8 +def AppBuildNumber = 9 def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber diff --git a/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java b/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java index 0ffee577..6aa41b3d 100644 --- a/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java +++ b/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java @@ -50,7 +50,6 @@ import java.lang.reflect.Method; import java.nio.channels.FileLock; import java.text.SimpleDateFormat; import java.util.Arrays; -import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.logging.Level; @@ -766,64 +765,65 @@ public class MpMasterService extends Service { } private void registerHeartbeatTimer() { - long delaySec = 20; // 比i1延迟20秒 - long interval = mHeartbeatDuration; // 距离下一次间隔,默认10分钟 - boolean keepAlive = false; - long currentSec = System.currentTimeMillis() / 1000; + long timeout = mHeartbeatDuration; + boolean keepAlive = false; + long currentTimeMs = System.currentTimeMillis(); if (mMntnMode) { - // 无延迟 - delaySec = 0; if (mQuickHbMode) { - interval = mQuickHeartbeatDuration; + timeout = mQuickHeartbeatDuration; } - registerHeartbeatTimer((currentSec + interval + delaySec) * 1000, keepAlive); + registerHeartbeatTimer(currentTimeMs + timeout * 1000, keepAlive); } else { + long closestTime = -1; if (mUsingAbsHbTime) { - long expandSec = 120; // 扩展2分钟之内的都触发 - Calendar calendar = Calendar.getInstance(); - calendar.set(Calendar.HOUR_OF_DAY, 0); - calendar.set(Calendar.MINUTE, 0); - calendar.set(Calendar.SECOND, 0); - calendar.set(Calendar.MILLISECOND, 0); - long todayZero = calendar.getTimeInMillis() / 1000; - long todaySec = currentSec - todayZero; - - long nextAbsSec = this.getNextAbsSec(todaySec); - if (nextAbsSec - todaySec > interval + expandSec) { + Date dt = new Date(); + long ts = dt.getTime(); + ts -= ts % 1000; + + dt.setHours(0); + dt.setMinutes(0); + dt.setSeconds(0); + + long zeroPoint = dt.getTime(); + zeroPoint -= zeroPoint % 1000; + long offsetTs = (ts - zeroPoint) / 1000; + + if (mAbsHeartbeatTimes != null && mAbsHeartbeatTimes.length > 0) { + + for (int i = 0; i < mAbsHeartbeatTimes.length; i++) { + if (mAbsHeartbeatTimes[i] > offsetTs) { + closestTime = mAbsHeartbeatTimes[i]; + break; + } + } + + if (closestTime == -1) { + // next day + closestTime = mAbsHeartbeatTimes[0] + 86400; + } + } else { + closestTime = 9 * 3600 + 13 * 60; + if (offsetTs > closestTime) { + closestTime += 86400; + } + } + + if (zeroPoint + closestTime * 1000 > currentTimeMs + mMpHeartbeatDuration) { keepAlive = true; - registerHeartbeatTimer((currentSec + interval + delaySec) * 1000, keepAlive); + registerHeartbeatTimer(currentTimeMs + mMpHeartbeatDuration + 5000, keepAlive); } else { - registerHeartbeatTimer((todayZero + nextAbsSec) * 1000, keepAlive); + registerHeartbeatTimer(zeroPoint + closestTime * 1000, keepAlive); } } else { - // mUsingAbsHbTime=false,间隔10分钟 + 延迟 - registerHeartbeatTimer((currentSec + interval + delaySec) * 1000, keepAlive); - } - } - } - - private long getNextAbsSec(long todaySec) { - long nextAbsSec = -1; - if (mAbsHeartbeatTimes != null && mAbsHeartbeatTimes.length > 0) { - for (int i = 0; i < mAbsHeartbeatTimes.length; i++) { - if (mAbsHeartbeatTimes[i] > todaySec) { - nextAbsSec = mAbsHeartbeatTimes[i]; - break; + // mUsingAbsHbTime: false + if ((mPreviousHeartbeatTime != 0) && (mPreviousHeartbeatTime - currentTimeMs < mHeartbeatDuration * 1000)) { + registerHeartbeatTimer(mPreviousHeartbeatTime + mHeartbeatDuration * 1000, keepAlive); + } else { + registerHeartbeatTimer(currentTimeMs + timeout * 1000, keepAlive); } } - - if (nextAbsSec == -1) { - // next day - nextAbsSec = mAbsHeartbeatTimes[0] + 24 * 3600; - } - } else { - nextAbsSec = 9 * 3600 + 13 * 60; - if (todaySec > nextAbsSec) { - nextAbsSec += 24 * 3600; - } } - return nextAbsSec; } private void registerHeartbeatTimer(long triggerTime, boolean keepAlive) {