diff --git a/app/libs/arm64-v8a/libavcodec.so b/app/libs/arm64-v8a/libavcodec.so
new file mode 100644
index 00000000..42246066
Binary files /dev/null and b/app/libs/arm64-v8a/libavcodec.so differ
diff --git a/app/libs/arm64-v8a/libavdevice.so b/app/libs/arm64-v8a/libavdevice.so
new file mode 100644
index 00000000..b9e8a65b
Binary files /dev/null and b/app/libs/arm64-v8a/libavdevice.so differ
diff --git a/app/libs/arm64-v8a/libavfilter.so b/app/libs/arm64-v8a/libavfilter.so
new file mode 100644
index 00000000..bd1c4e5c
Binary files /dev/null and b/app/libs/arm64-v8a/libavfilter.so differ
diff --git a/app/libs/arm64-v8a/libavformat.so b/app/libs/arm64-v8a/libavformat.so
new file mode 100644
index 00000000..07f6c139
Binary files /dev/null and b/app/libs/arm64-v8a/libavformat.so differ
diff --git a/app/libs/arm64-v8a/libavutil.so b/app/libs/arm64-v8a/libavutil.so
new file mode 100644
index 00000000..485ea360
Binary files /dev/null and b/app/libs/arm64-v8a/libavutil.so differ
diff --git a/app/libs/arm64-v8a/libswresample.so b/app/libs/arm64-v8a/libswresample.so
new file mode 100644
index 00000000..de0f2b25
Binary files /dev/null and b/app/libs/arm64-v8a/libswresample.so differ
diff --git a/app/libs/arm64-v8a/libswscale.so b/app/libs/arm64-v8a/libswscale.so
new file mode 100644
index 00000000..1301fac8
Binary files /dev/null and b/app/libs/arm64-v8a/libswscale.so differ
diff --git a/app/libs/arm64-v8a/libx264.so b/app/libs/arm64-v8a/libx264.so
new file mode 100644
index 00000000..15f9d75b
Binary files /dev/null and b/app/libs/arm64-v8a/libx264.so differ
diff --git a/app/libs/armeabi-v7a/libavcodec.so b/app/libs/armeabi-v7a/libavcodec.so
new file mode 100644
index 00000000..e0e7a188
Binary files /dev/null and b/app/libs/armeabi-v7a/libavcodec.so differ
diff --git a/app/libs/armeabi-v7a/libavdevice.so b/app/libs/armeabi-v7a/libavdevice.so
new file mode 100644
index 00000000..304f7564
Binary files /dev/null and b/app/libs/armeabi-v7a/libavdevice.so differ
diff --git a/app/libs/armeabi-v7a/libavfilter.so b/app/libs/armeabi-v7a/libavfilter.so
new file mode 100644
index 00000000..d0d5dc13
Binary files /dev/null and b/app/libs/armeabi-v7a/libavfilter.so differ
diff --git a/app/libs/armeabi-v7a/libavformat.so b/app/libs/armeabi-v7a/libavformat.so
new file mode 100644
index 00000000..5e4c9f22
Binary files /dev/null and b/app/libs/armeabi-v7a/libavformat.so differ
diff --git a/app/libs/armeabi-v7a/libavutil.so b/app/libs/armeabi-v7a/libavutil.so
new file mode 100644
index 00000000..e15b72cb
Binary files /dev/null and b/app/libs/armeabi-v7a/libavutil.so differ
diff --git a/app/libs/armeabi-v7a/libswresample.so b/app/libs/armeabi-v7a/libswresample.so
new file mode 100644
index 00000000..50f860cd
Binary files /dev/null and b/app/libs/armeabi-v7a/libswresample.so differ
diff --git a/app/libs/armeabi-v7a/libswscale.so b/app/libs/armeabi-v7a/libswscale.so
new file mode 100644
index 00000000..0663239a
Binary files /dev/null and b/app/libs/armeabi-v7a/libswscale.so differ
diff --git a/app/libs/armeabi-v7a/libx264.so b/app/libs/armeabi-v7a/libx264.so
new file mode 100644
index 00000000..139305c5
Binary files /dev/null and b/app/libs/armeabi-v7a/libx264.so differ
diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index cdab483d..58d9f162 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -86,6 +86,7 @@
+
(powerControlHandle);
delete powerControl;
+
+ return JNI_TRUE;
}
diff --git a/app/src/main/cpp/PhoneDevice.cpp b/app/src/main/cpp/PhoneDevice.cpp
index 45cbe277..a415a3d6 100644
--- a/app/src/main/cpp/PhoneDevice.cpp
+++ b/app/src/main/cpp/PhoneDevice.cpp
@@ -10,6 +10,8 @@
#include "PositionHelper.h"
#include "DngCreator.h"
+#include "media/RTSPRecorder.h"
+
#include
#include
#include
@@ -1686,6 +1688,142 @@ bool CPhoneDevice::TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
return true;
}
+
+bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector& osds, std::shared_ptr powerCtrlPtr)
+{
+ // AutoEnv autoEnv(pThis->m_vm);
+ time_t ts = time(NULL);
+ uint32_t waitTime = localPhotoInfo.selfTestingTime;
+ if(!GpioControl::GetSelftestStatus(waitTime))
+ {
+ m_isSelfTesting.store(true);
+ waitTime = (waitTime != 0) ? (waitTime * 1024) : 10240;
+ std::this_thread::sleep_for(std::chrono::milliseconds(waitTime));
+ m_isSelfTesting.store(false);
+ }
+
+ XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON");
+ // std::shared_ptr ethernetPowerCtrl = std::make_shared(1);
+ std::shared_ptr ethernetPowerCtrl;
+
+ net_handle_t netHandle = GetEthnetHandle();
+ if (netHandle == 0)
+ {
+ // Wait about 10s
+ for (int idx = 0; idx < 84; idx++)
+ {
+ std::this_thread::sleep_for(std::chrono::milliseconds(128));
+ netHandle = GetEthnetHandle();
+
+ if (netHandle != 0)
+ {
+ break;
+ }
+ }
+ }
+
+ if (netHandle == 0)
+ {
+ // timeout
+ XYLOG(XYLOG_SEVERITY_ERROR, "Ethernet not existing CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
+#ifdef NDEBUG
+ TakePhotoCb(0, localPhotoInfo, "", 0);
+ return false;
+#endif
+ }
+ else
+ {
+ XYLOG(XYLOG_SEVERITY_INFO, "Ethernet is Available CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
+ }
+
+ SetStaticIp();
+ std::this_thread::sleep_for(std::chrono::milliseconds(256));
+
+ NET_PHOTO_INFO netPhotoInfo = { netHandle, 0 };
+ if (localPhotoInfo.vendor == 1)
+ {
+ // Hai Kang
+ netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
+ snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/ISAPI/Streaming/channels/1/picture?");
+ }
+ else if (localPhotoInfo.vendor == 2)
+ {
+ // Hang Yu
+ strcpy(netPhotoInfo.url, "/cgi-bin/snapshot.cgi");
+ }
+ else if (localPhotoInfo.vendor == 3)
+ {
+ // Yu Shi
+ netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
+ int streamSid = 0; // should put into config
+ // rtsp://192.168.0.13:554/media/video1
+ snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/media/video%u", (uint32_t)localPhotoInfo.cameraId);
+ // strcpy(netPhotoInfo.url, "rtsp://192.168.50.224/live/0");
+ }
+ else if (localPhotoInfo.vendor == 5)
+ {
+ // Hang Yu - New
+ netPhotoInfo.authType = HTTP_AUTH_TYPE_BASIC;
+ // http://192.168.1.46/Snapshot/%u/RemoteImageCapture?ImageFormat=2&HorizontalPixel=1920&VerticalPixel=1080
+ // http://192.168.1.101/Snapshot/1/2/RemoteImageCaptureV2?ImageFormat=jpg
+ // http://192.168.1.101/Snapshot/1/1/RemoteImageCaptureV2?ImageFormat=jpg
+ snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", (uint32_t)localPhotoInfo.cameraId);
+ }
+ else
+ {
+ XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.vendor, (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId);
+ TakePhotoCb(0, localPhotoInfo, "", 0);
+ return false;
+ }
+
+ struct in_addr addr;
+ addr.s_addr = localPhotoInfo.ip;
+ strcpy(netPhotoInfo.ip, inet_ntoa(addr));
+ strcpy(netPhotoInfo.outputPath, path.c_str());
+ if (!localPhotoInfo.userName.empty())
+ {
+ size_t len = std::min(sizeof(netPhotoInfo.userName) - 1, localPhotoInfo.userName.size());
+ strncpy(netPhotoInfo.userName, localPhotoInfo.userName.c_str(), len);
+ }
+ if (!localPhotoInfo.password.empty())
+ {
+ size_t len = std::min(sizeof(netPhotoInfo.password) - 1, localPhotoInfo.password.size());
+ strncpy(netPhotoInfo.password, localPhotoInfo.password.c_str(), len);
+ }
+ // strcpy(netPhotoInfo.interface, "eth0");
+
+ localPhotoInfo.photoTime = time(NULL);
+ std::string tmpFile = m_appPath + (APP_PATH_TMP DIR_SEP_STR) + std::to_string(localPhotoInfo.photoId) + ".mp4";
+ // RTSPToMP4 dumper(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000);
+ // dumper.start();
+ dumpRtspToMp4(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000);
+
+ ethernetPowerCtrl.reset();
+ XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF");
+
+ std::string fullPath = endsWith(mPath, ".mp4") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo));
+
+ if (existsFile(tmpFile))
+ {
+ std::rename(tmpFile.c_str(), fullPath.c_str());
+ TakePhotoCb(3, localPhotoInfo, "", localPhotoInfo.photoTime);
+ }
+ else
+ {
+ TakePhotoCb(0, localPhotoInfo, "", 0);
+ XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
+ localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url);
+ }
+ // Notify to take next photo
+ // TakePhotoCb(1, localPhotoInfo, "", takingTime);
+
+ // XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
+ // localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url);
+ // TakePhotoCb(0, localPhotoInfo, "", 0);
+
+ return true;
+}
+
bool CPhoneDevice::StartPushStreaming(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, std::vector& osds, std::shared_ptr powerCtrlPtr)
{
return true;
@@ -1996,6 +2134,71 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
t.detach();
}
+ else if (mPhotoInfo.mediaType == 1 && (mPhotoInfo.cameraType == CAM_TYPE_PLZ))
+ {
+ uint64_t wid_serial = RequestWakelock(0);
+ CPhoneDevice* pThis = this;
+ IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
+ IDevice::SerialsPhotoParam param = { "", 0, 0 };
+ GetPhotoSerialsParamCb(param);
+ vector osds;
+ osds.swap(mOsds);
+
+ std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial, powerCtrlPtr]() mutable
+ {
+ uint32_t waitTime = localPhotoInfo.selfTestingTime;
+ if(!GpioControl::GetSelftestStatus(waitTime))
+ {
+ pThis->m_isSelfTesting.store(true);
+ time_t remaintime = GpioControl::GetSelfTestRemain(waitTime);
+ XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime);
+ remaintime = (remaintime != 0) ? (remaintime * 1024) : 10240;
+ std::this_thread::sleep_for(std::chrono::milliseconds(remaintime));
+ pThis->m_isSelfTesting.store(false);
+ XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over");
+ }
+
+ if (localPhotoInfo.preset != 0 && localPhotoInfo.preset != 0xFF)
+ {
+ XYLOG(XYLOG_SEVERITY_INFO,"Recv CameraCtrl Command, action= MOVE_PRESETNO, preset = %u", localPhotoInfo.preset);
+ CameraPhotoCmd(time(NULL), localPhotoInfo.channel, MOVE_PRESETNO, 0, localPhotoInfo.preset, param.serfile, param.baud, param.addr);
+ std::this_thread::sleep_for(std::chrono::seconds(10));
+ }
+
+ pThis->TakeVideoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr);
+ pThis->ReleaseWakelock(wid_serial);
+ });
+
+ t.detach();
+ }
+ else if (mPhotoInfo.mediaType == 1 && (mPhotoInfo.cameraType == CAM_TYPE_NET))
+ {
+ uint64_t wid_serial = RequestWakelock(0);
+ CPhoneDevice* pThis = this;
+ IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
+ vector osds;
+ osds.swap(mOsds);
+
+ std::thread t([localPhotoInfo, pThis, path, osds, wid_serial, powerCtrlPtr]() mutable
+ {
+ uint32_t waitTime = localPhotoInfo.selfTestingTime;
+ if(!GpioControl::GetSelftestStatus(waitTime))
+ {
+ pThis->m_isSelfTesting.store(true);
+ time_t remaintime = GpioControl::GetSelfTestRemain(waitTime);
+ XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime);
+ remaintime = (remaintime != 0) ? (remaintime * 1024) : 10240;
+ std::this_thread::sleep_for(std::chrono::milliseconds(remaintime));
+ pThis->m_isSelfTesting.store(false);
+ XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over");
+ }
+
+ pThis->TakeVideoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr);
+ pThis->ReleaseWakelock(wid_serial);
+ });
+
+ t.detach();
+ }
else if (mPhotoInfo.usingSysCamera == 1)
{
JNIEnv* env = NULL;
diff --git a/app/src/main/cpp/PhoneDevice.h b/app/src/main/cpp/PhoneDevice.h
index cc9c8b23..3c696365 100644
--- a/app/src/main/cpp/PhoneDevice.h
+++ b/app/src/main/cpp/PhoneDevice.h
@@ -273,6 +273,7 @@ protected:
// bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
bool TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector& osds, std::shared_ptr powerCtrlPtr);
+ bool TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector& osds, std::shared_ptr powerCtrlPtr);
bool StartPushStreaming(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, std::vector& osds, std::shared_ptr powerCtrlPtr);
bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat);
inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector& objects) const
diff --git a/app/src/main/cpp/media/RTSPRecorder.cpp b/app/src/main/cpp/media/RTSPRecorder.cpp
new file mode 100644
index 00000000..11edccfa
--- /dev/null
+++ b/app/src/main/cpp/media/RTSPRecorder.cpp
@@ -0,0 +1,231 @@
+//
+// Created by Matthew on 2025/3/1.
+//
+
+#include "RTSPRecorder.h"
+#include
+#include
+#include
+extern "C" {
+#include
+#include
+#include
+#include
+}
+
+
+#define LOG_TAG "libcurl"
+
+#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__)
+#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
+#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
+
+
+void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration)
+{
+ AVFormatContext* inputFormatContext = nullptr;
+ AVFormatContext* outputFormatContext = nullptr;
+ AVPacket packet;
+
+ av_register_all();
+ avformat_network_init();
+
+ // Open input RTMP stream
+ if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) {
+ fprintf(stderr, "Could not open input file '%s'\n", rtmpUrl);
+ return;
+ }
+
+ // Retrieve input stream information
+ if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
+ fprintf(stderr, "Could not find stream information\n");
+ avformat_close_input(&inputFormatContext);
+ return;
+ }
+
+ // Open output MP4 file
+ if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
+ fprintf(stderr, "Could not create output context\n");
+ avformat_close_input(&inputFormatContext);
+ return;
+ }
+
+ // Copy stream information from input to output
+ for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
+ AVStream* inStream = inputFormatContext->streams[i];
+ AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
+ if (!outStream) {
+ fprintf(stderr, "Failed to allocate output stream\n");
+ avformat_close_input(&inputFormatContext);
+ avformat_free_context(outputFormatContext);
+ return;
+ }
+
+ if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
+ fprintf(stderr, "Failed to copy codec parameters\n");
+ avformat_close_input(&inputFormatContext);
+ avformat_free_context(outputFormatContext);
+ return;
+ }
+ outStream->codecpar->codec_tag = 0;
+ }
+
+ // Open output file
+ if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
+ if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) {
+ fprintf(stderr, "Could not open output file '%s'\n", outputPath);
+ avformat_close_input(&inputFormatContext);
+ avformat_free_context(outputFormatContext);
+ return;
+ }
+ }
+
+ // Write output file header
+ if (avformat_write_header(outputFormatContext, nullptr) < 0) {
+ fprintf(stderr, "Error occurred when writing header to output file\n");
+ avformat_close_input(&inputFormatContext);
+ avformat_free_context(outputFormatContext);
+ return;
+ }
+
+ // Start a thread to stop the streaming after the specified duration
+ std::thread stop_thread([&]() {
+ std::this_thread::sleep_for(std::chrono::milliseconds(duration));
+ av_read_pause(inputFormatContext);
+ });
+
+ // Read packets from input and write them to output
+ while (av_read_frame(inputFormatContext, &packet) >= 0) {
+ AVStream* inStream = inputFormatContext->streams[packet.stream_index];
+ AVStream* outStream = outputFormatContext->streams[packet.stream_index];
+
+ packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
+ packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
+ packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base);
+ packet.pos = -1;
+
+ if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
+ fprintf(stderr, "Error muxing packet\n");
+ break;
+ }
+
+ av_packet_unref(&packet);
+ }
+
+ stop_thread.join();
+
+ // Write output file trailer
+ av_write_trailer(outputFormatContext);
+
+ // Clean up
+ avformat_close_input(&inputFormatContext);
+ if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
+ avio_closep(&outputFormatContext->pb);
+ }
+ avformat_free_context(outputFormatContext);
+}
+
+
+void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration)
+{
+ AVFormatContext* inputFormatContext = nullptr;
+ AVFormatContext* outputFormatContext = nullptr;
+ AVPacket packet;
+
+ av_register_all();
+ avformat_network_init();
+
+ // Open input RTSP stream
+ if (avformat_open_input(&inputFormatContext, rtspUrl, nullptr, nullptr) != 0) {
+ fprintf(stderr, "Could not open input file '%s'\n", rtspUrl);
+ return;
+ }
+
+ // Retrieve input stream information
+ if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
+ fprintf(stderr, "Could not find stream information\n");
+ avformat_close_input(&inputFormatContext);
+ return;
+ }
+
+ // Open output MP4 file
+ if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
+ fprintf(stderr, "Could not create output context\n");
+ avformat_close_input(&inputFormatContext);
+ return;
+ }
+
+ // Copy stream information from input to output
+ for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
+ AVStream* inStream = inputFormatContext->streams[i];
+ AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
+ if (!outStream) {
+ fprintf(stderr, "Failed to allocate output stream\n");
+ avformat_close_input(&inputFormatContext);
+ avformat_free_context(outputFormatContext);
+ return;
+ }
+
+ if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
+ fprintf(stderr, "Failed to copy codec parameters\n");
+ avformat_close_input(&inputFormatContext);
+ avformat_free_context(outputFormatContext);
+ return;
+ }
+ outStream->codecpar->codec_tag = 0;
+ }
+
+ // Open output file
+ if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
+ if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) {
+ fprintf(stderr, "Could not open output file '%s'\n", outputPath);
+ avformat_close_input(&inputFormatContext);
+ avformat_free_context(outputFormatContext);
+ return;
+ }
+ }
+
+ // Write output file header
+ if (avformat_write_header(outputFormatContext, nullptr) < 0) {
+ fprintf(stderr, "Error occurred when writing header to output file\n");
+ avformat_close_input(&inputFormatContext);
+ avformat_free_context(outputFormatContext);
+ return;
+ }
+
+ // Start a thread to stop the streaming after the specified duration
+ std::thread stop_thread([&]() {
+ std::this_thread::sleep_for(std::chrono::milliseconds(duration));
+ av_read_pause(inputFormatContext);
+ });
+
+ // Read packets from input and write them to output
+ while (av_read_frame(inputFormatContext, &packet) >= 0) {
+ AVStream* inStream = inputFormatContext->streams[packet.stream_index];
+ AVStream* outStream = outputFormatContext->streams[packet.stream_index];
+
+ packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
+ packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
+ packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base);
+ packet.pos = -1;
+
+ if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
+ fprintf(stderr, "Error muxing packet\n");
+ break;
+ }
+
+ av_packet_unref(&packet);
+ }
+
+ stop_thread.join();
+
+ // Write output file trailer
+ av_write_trailer(outputFormatContext);
+
+ // Clean up
+ avformat_close_input(&inputFormatContext);
+ if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
+ avio_closep(&outputFormatContext->pb);
+ }
+ avformat_free_context(outputFormatContext);
+}
diff --git a/app/src/main/cpp/media/RTSPRecorder.h b/app/src/main/cpp/media/RTSPRecorder.h
new file mode 100644
index 00000000..1133c8e0
--- /dev/null
+++ b/app/src/main/cpp/media/RTSPRecorder.h
@@ -0,0 +1,19 @@
+//
+// Created by Matthew on 2025/3/1.
+//
+
+#ifndef MICROPHOTO_RTSPRECORDER_H
+#define MICROPHOTO_RTSPRECORDER_H
+
+#include
+
+// void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs);
+void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration);
+void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration);
+
+class RTSPRecorder {
+
+};
+
+
+#endif //MICROPHOTO_RTSPRECORDER_H
diff --git a/app/src/main/cpp/media/RTSPToMP4.cpp b/app/src/main/cpp/media/RTSPToMP4.cpp
new file mode 100644
index 00000000..26b096a2
--- /dev/null
+++ b/app/src/main/cpp/media/RTSPToMP4.cpp
@@ -0,0 +1,186 @@
+//
+// Created by Matthew on 2025/2/28.
+//
+
+#include "RTSPToMP4.h"
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+int32_t getMaxInputSize(AMediaExtractor* extractor, size_t trackIndex)
+{
+ AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, trackIndex);
+ int32_t maxInputSize = 0;
+ if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, &maxInputSize)) {
+ // LOGI("Max input size for track %zu: %d", trackIndex, maxInputSize);
+ } else {
+ // LOGE("Failed to get max input size for track %zu", trackIndex);
+ }
+ AMediaFormat_delete(format);
+ return maxInputSize;
+}
+
+RTSPToMP4::RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs/* = 0*/)
+ : fd(-1), codec(nullptr), extractor(nullptr), muxer(nullptr), videoTrackIndex(-1), durationInMs(durationInMs), running(false) {
+ initExtractor(rtspUrl);
+ initCodec("video/avc");
+ initMuxer(outputPath);
+}
+
+RTSPToMP4::~RTSPToMP4() {
+ if (codec) AMediaCodec_delete(codec);
+ if (extractor) AMediaExtractor_delete(extractor);
+ if (muxer) AMediaMuxer_delete(muxer);
+
+ if (fd != -1)
+ {
+ fdatasync(fd);
+ close(fd);
+ fd = -1;
+ }
+}
+
+void RTSPToMP4::initCodec(const char* mime) {
+ codec = AMediaCodec_createDecoderByType(mime);
+ AMediaFormat* format = AMediaFormat_new();
+ AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mime);
+ // Set other format parameters as needed
+ // ...
+ AMediaCodec_configure(codec, format, nullptr, nullptr, 0);
+ AMediaFormat_delete(format);
+}
+
+void RTSPToMP4::initExtractor(const char* rtspUrl) {
+ extractor = AMediaExtractor_new();
+ media_status_t status = AMediaExtractor_setDataSource(extractor, rtspUrl);
+ if (status != AMEDIA_OK) {
+ // Handle error
+ // ...
+ }
+}
+
+void RTSPToMP4::initMuxer(const char* outputPath) {
+ fd = open(outputPath, O_CREAT | O_WRONLY, 0644);
+ muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
+
+ int numTracks = AMediaExtractor_getTrackCount(extractor);
+ if (numTracks <= 0) {
+ // LOGE("No tracks found in RTSP stream");
+ AMediaExtractor_delete(extractor);
+ return;
+ }
+
+ for (int i = 0; i < numTracks; ++i) {
+ AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, i);
+ const char* mime;
+ if (AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime) && strncmp(mime, "video/", 6) == 0) {
+ videoTrackIndex = AMediaMuxer_addTrack(muxer, format);
+ AMediaExtractor_selectTrack(extractor, i);
+ }
+ AMediaFormat_delete(format);
+ }
+
+ if (videoTrackIndex == -1) {
+ // LOGE("No video track found in RTSP stream");
+ AMediaExtractor_delete(extractor);
+ AMediaMuxer_delete(muxer);
+ return;
+ }
+
+ int32_t maxInputSize = getMaxInputSize(extractor, videoTrackIndex);
+ if (maxInputSize <= 0) {
+ // LOGE("Invalid max input size");
+ // releaseMediaExtractor(extractor);
+ sampleData.resize(1920 * 1080 * 4, 0);
+ return;
+ }
+
+ sampleData.resize(maxInputSize, 0);
+}
+
+void RTSPToMP4::startDecodingAndMuxing() {
+ AMediaCodec_start(codec);
+ size_t bufferSize = sampleData.size();
+ uint8_t* buffer = &sampleData[0];
+ int64_t sampleTime = 0;
+ int64_t startTime = 0;
+ bool firstSampleData = true;
+
+ int64_t durationTime = (durationInMs == 0) ? std::numeric_limits::max() : (int64_t)durationInMs * 1000;
+
+
+ while (running) {
+ // Extract data from RTSP stream
+ ssize_t sampleSize = AMediaExtractor_readSampleData(extractor, buffer, bufferSize);
+ if (sampleSize < 0) {
+ break; // End of stream
+ }
+
+ sampleTime = AMediaExtractor_getSampleTime(extractor);
+ if (firstSampleData)
+ {
+ startTime = sampleTime;
+ firstSampleData = false;
+ }
+
+ sampleTime -= startTime;
+
+ // Feed data to codec
+ size_t inputBufferIndex;
+ uint8_t* inputBuffer = AMediaCodec_getInputBuffer(codec, inputBufferIndex, &bufferSize);
+ memcpy(inputBuffer, buffer, sampleSize);
+ AMediaCodec_queueInputBuffer(codec, inputBufferIndex, 0, sampleSize, sampleTime, 0);
+
+ // Retrieve decoded frames and write to muxer
+ AMediaCodecBufferInfo bufferInfo;
+ ssize_t outputBufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &bufferInfo, 0);
+ if (outputBufferIndex >= 0) {
+
+ bufferInfo.offset = 0;
+ bufferInfo.size = sampleSize;
+ bufferInfo.presentationTimeUs = sampleTime;
+ bufferInfo.flags = AMediaExtractor_getSampleFlags(extractor);
+
+ uint8_t* outputBuffer = AMediaCodec_getOutputBuffer(codec, outputBufferIndex, &bufferSize);
+ AMediaMuxer_writeSampleData(muxer, videoTrackIndex, outputBuffer, &bufferInfo);
+ AMediaCodec_releaseOutputBuffer(codec, outputBufferIndex, false);
+ }
+
+ AMediaExtractor_advance(extractor);
+
+ if (sampleTime > durationTime)
+ {
+ break;
+ }
+ }
+
+ AMediaCodec_stop(codec);
+ AMediaMuxer_stop(muxer);
+
+ if (fd != -1)
+ {
+ fdatasync(fd);
+ close(fd);
+ fd = -1;
+ }
+}
+
+void RTSPToMP4::start() {
+ // Add video track to muxer
+ AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, 0);
+ videoTrackIndex = AMediaMuxer_addTrack(muxer, format);
+ running = true;
+ AMediaMuxer_start(muxer);
+
+ startDecodingAndMuxing();
+}
+
+void RTSPToMP4::stop() {
+ running = false;
+}
diff --git a/app/src/main/cpp/media/RTSPToMP4.h b/app/src/main/cpp/media/RTSPToMP4.h
new file mode 100644
index 00000000..6759a8fd
--- /dev/null
+++ b/app/src/main/cpp/media/RTSPToMP4.h
@@ -0,0 +1,38 @@
+//
+// Created by Matthew on 2025/2/28.
+//
+
+#ifndef MICROPHOTO_RTSPTOMP4_H
+#define MICROPHOTO_RTSPTOMP4_H
+
+#include
+#include
+#include
+#include
+
+class RTSPToMP4 {
+public:
+ RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs = 0);
+ ~RTSPToMP4();
+ void start();
+ void stop();
+
+private:
+ void initCodec(const char* mime);
+ void initExtractor(const char* rtspUrl);
+ void initMuxer(const char* outputPath);
+ void startDecodingAndMuxing();
+
+ int fd;
+ AMediaCodec* codec;
+ AMediaExtractor* extractor;
+ AMediaMuxer* muxer;
+ int videoTrackIndex;
+ uint64_t durationInMs;
+ bool running;
+
+ std::vector sampleData;
+};
+
+
+#endif //MICROPHOTO_RTSPTOMP4_H
diff --git a/app/src/main/java/com/xypower/mpapp/BridgeProvider.java b/app/src/main/java/com/xypower/mpapp/BridgeProvider.java
index 87ad2a67..8fee79c1 100644
--- a/app/src/main/java/com/xypower/mpapp/BridgeProvider.java
+++ b/app/src/main/java/com/xypower/mpapp/BridgeProvider.java
@@ -46,6 +46,9 @@ public class BridgeProvider extends ContentProvider {
private final static String PATH_RECOG_PIC = "/recogPic";
+ private final static String PATH_REQUEST_PWR_CTRL = "/requestPwrCtrl";
+ private final static String PATH_RELEASE_PWR_CTRL = "/releasePwrCtrl";
+
public BridgeProvider() {
Log.i(TAG, "BridgeProvider");
}
@@ -85,6 +88,9 @@ public class BridgeProvider extends ContentProvider {
matcher.addURI(AUTHORITY, PATH_QUERY_SEC_VERSION, 1);
matcher.addURI(AUTHORITY, PATH_QUERY_BATTERY_VOLTAGE, 2);
matcher.addURI(AUTHORITY, PATH_RECOG_PIC, 3);
+ matcher.addURI(AUTHORITY, PATH_REQUEST_PWR_CTRL, 4);
+ matcher.addURI(AUTHORITY, PATH_RELEASE_PWR_CTRL, 5);
+
Cursor cursor = null;
int matched = matcher.match(uri);
@@ -98,6 +104,12 @@ public class BridgeProvider extends ContentProvider {
case 3:
cursor = recoganizePicture(uri, selection, selectionArgs);
break;
+ case 4:
+ cursor = requestPowerControl(uri, selection, selectionArgs);
+ break;
+ case 5:
+ cursor = recoganizePicture(uri, selection, selectionArgs);
+ break;
default:
break;
}
@@ -169,6 +181,48 @@ public class BridgeProvider extends ContentProvider {
return matrixCursor;
}
+ private Cursor requestPowerControl(Uri uri, String selection, String[] selectionArgs) {
+ String decodedSelection = stringFromBase64(selection);
+ int type = 0;
+ if (!TextUtils.isEmpty(decodedSelection)) {
+ Uri u = Uri.parse("http://a.com/?" + decodedSelection);
+ String val = u.getQueryParameter("type");
+ try {
+ type = Integer.parseInt(val);
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ long nativeHandle = MicroPhotoService.requestPowerControl(type);
+
+ String[] columns = { "pwrCtrl" };
+ MatrixCursor matrixCursor = new MatrixCursor(columns, 1);
+ matrixCursor.addRow(new Object[] { Long.valueOf(nativeHandle) });
+ return matrixCursor;
+ }
+
+ private Cursor releasePowerControl(Uri uri, String selection, String[] selectionArgs) {
+ String decodedSelection = stringFromBase64(selection);
+ long nativeHandle = 0;
+ if (!TextUtils.isEmpty(decodedSelection)) {
+ Uri u = Uri.parse("http://a.com/?" + decodedSelection);
+ String val = u.getQueryParameter("handle");
+ try {
+ nativeHandle = Long.parseLong(val);
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ boolean res = MicroPhotoService.releasePowerControl(nativeHandle);
+
+ String[] columns = { "result" };
+ MatrixCursor matrixCursor = new MatrixCursor(columns, 1);
+ matrixCursor.addRow(new Object[] { Integer.valueOf(res ? 1 : 0) });
+ return matrixCursor;
+ }
+
private Cursor recoganizePicture(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);
diff --git a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java
index f0c312e8..3a31560e 100644
--- a/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java
+++ b/app/src/main/java/com/xypower/mpapp/MicroPhotoService.java
@@ -91,6 +91,7 @@ public class MicroPhotoService extends Service {
// Used to load the 'microphoto' library on application startup.
static {
+
loadLibrary("microphoto");
}
@@ -316,6 +317,7 @@ public class MicroPhotoService extends Service {
intentFilter.addAction(ACTION_UPDATE_CONFIGS);
intentFilter.addAction(ACTION_IMP_PUBKRY);
intentFilter.addAction(ACTION_TAKE_PHOTO_MANUALLY);
+ intentFilter.addAction(ACTION_HEARTBEAT_MANUALLY);
intentFilter.addAction(ACTION_GPS_TIMEOUT);
intentFilter.addAction(ACTION_RESTART);
getApplicationContext().registerReceiver(mAlarmReceiver, intentFilter, Context.RECEIVER_EXPORTED | Context.RECEIVER_VISIBLE_TO_INSTANT_APPS);
@@ -1680,7 +1682,7 @@ cellSignalStrengthGsm.getDbm();
public static native boolean exportPrivateFile(int index, String outputPath);
public static native long requestPowerControl(int type);
- public static native long releasePowerControl(long powerControlHandle);
+ public static native boolean releasePowerControl(long powerControlHandle);
////////////////////////GPS////////////////////
// private static final String GPS_LOCATION_NAME = android.location.LocationManager.GPS_PROVIDER;