Merge remote-tracking branch 'origin/main'

PtzNew
jxjajs 3 months ago
commit 9f8e92030a

@ -5,7 +5,7 @@ plugins {
// 10,00,000 major-minor-build
def AppMajorVersion = 1
def AppMinorVersion = 3
def AppBuildNumber = 86
def AppBuildNumber = 90
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber

@ -583,7 +583,7 @@ public:
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PLZ
// MicroPhoto
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_OTG_STATE, closeDelayTime)
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_OTG_STATE, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PLZ
#endif // USING_N938
{

@ -27,6 +27,12 @@
#include <mosquitto.h>
#endif
#ifdef USING_FFMPEG
extern "C" {
#include <libavformat/avformat.h>
}
#endif
#include <android/native_window.h>
#include <android/native_window_jni.h>
@ -235,6 +241,11 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
mosquitto_lib_init();
#endif
#ifdef USING_FFMPEG
// av_register_all();
avformat_network_init();
#endif
return result;
}
@ -245,6 +256,11 @@ JNIEXPORT void JNICALL JNI_OnUnload(JavaVM* vm, void* reserved)
#endif
curl_global_cleanup();
#ifdef USING_FFMPEG
// av_register_all();
avformat_network_deinit();
#endif
}
bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread)
@ -373,7 +389,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
JNIEnv* env,
jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jboolean photoOrVideo) {
jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jstring url, jint mediaType) {
if (channel < 0 || channel > 0xFFFF)
{
@ -385,12 +401,27 @@ Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
return JNI_FALSE;
}
unsigned char type = photoOrVideo ? 0 : 1;
uint8_t type = (uint8_t)mediaType;
// std::thread th(&Runner::RequestCapture, pTerminal, (unsigned int)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
// th.detach();
if (channel < 0x100)
{
pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
if (mediaType == XY_MEDIA_TYPE_PHOTO || mediaType == XY_MEDIA_TYPE_VIDEO)
{
pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
}
else if (mediaType == XY_MEDIA_TYPE_STREAM)
{
// virtual bool StartStream(unsigned char channel, unsigned char preset, const std::string& url, uint32_t* photoId = NULL);
// virtual bool StopStream(unsigned char channel, unsigned char preset, uint32_t photoId);
uint32_t photoId = 0;
std::string urlStr = jstring2string(env, url);
pTerminal->StartStream(channel, preset, urlStr, &photoId);
}
else if (mediaType == XY_MEDIA_TYPE_STREAM_OFF)
{
pTerminal->StopStream(channel, preset, 0);
}
}
else
{
@ -402,7 +433,6 @@ Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
return JNI_TRUE;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
JNIEnv* env,

@ -1763,7 +1763,7 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
if (netHandle == 0)
{
// Wait about 10s
for (int idx = 0; idx < 84; idx++)
for (int idx = 0; idx < 128; idx++)
{
std::this_thread::sleep_for(std::chrono::milliseconds(128));
netHandle = GetEthnetHandle();
@ -1792,12 +1792,7 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
// SetStaticIp();
std::this_thread::sleep_for(std::chrono::milliseconds(256));
struct in_addr addr;
char ip[32] = { 0 };
addr.s_addr = localPhotoInfo.ip;
strcpy(ip, inet_ntoa(addr));
// strcpy(netPhotoInfo.outputPath, path.c_str());
std::string ip = GetIpStr(localPhotoInfo.ip);
VendorCtrl* vendorCtrl = MakeVendorCtrl(localPhotoInfo.vendor, localPhotoInfo.channel, ip, localPhotoInfo.userName, localPhotoInfo.password, netHandle);
if (vendorCtrl == NULL)
{
@ -1822,7 +1817,19 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
std::string tmpFile = m_appPath + (APP_PATH_TMP DIR_SEP_STR) + std::to_string(localPhotoInfo.photoId) + ".mp4";
// RTSPToMP4 dumper(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000);
// dumper.start();
dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, GetEthnetHandle());
XYLOG(XYLOG_SEVERITY_DEBUG, "Start Recording CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId);
if (vendorCtrl->HasAuthOnStreaming())
{
dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, localPhotoInfo.userName, localPhotoInfo.password, GetEthnetHandle());
}
else
{
dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, "", "", GetEthnetHandle());
}
XYLOG(XYLOG_SEVERITY_DEBUG, "Stop Recording CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId);
ethernetPowerCtrl.reset();
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF");
@ -1832,12 +1839,12 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
if (existsFile(tmpFile))
{
std::rename(tmpFile.c_str(), fullPath.c_str());
TakePhotoCb(3, localPhotoInfo, "", localPhotoInfo.photoTime);
TakePhotoCb(3, localPhotoInfo, fullPath, localPhotoInfo.photoTime);
}
else
{
TakePhotoCb(0, localPhotoInfo, "", 0);
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
localPhotoInfo.photoId, ip, streamingUrl.c_str());
}
// Notify to take next photo
@ -1852,62 +1859,97 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
bool CPhoneDevice::StartPushStreaming(IDevice::PHOTO_INFO& photoInfo, const std::string& url, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr)
{
#if 0
if (photoInfo.mediaType == XY_MEDIA_TYPE_STREAM)
{
std::map<uint8_t, std::shared_ptr<Streaming> >::iterator it = m_streamings.find(photoInfo.channel);
if (it != m_streamings.end())
time_t ts = time(NULL);
uint32_t waitTime = photoInfo.selfTestingTime;
if(!GpioControl::GetSelftestStatus(waitTime))
{
it->second->stop();
it->second.reset();
m_streamings.erase(it);
m_isSelfTesting.store(true);
waitTime = (waitTime != 0) ? (waitTime * 1024) : 10240;
std::this_thread::sleep_for(std::chrono::milliseconds(waitTime));
m_isSelfTesting.store(false);
}
NET_PHOTO_INFO netPhotoInfo = { 0, 0 };
if (photoInfo.vendor == 1)
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON");
std::shared_ptr<PowerControl> ethernetPowerCtrl = std::make_shared<EthernetPowerCtrl>(1);
// std::shared_ptr<PowerControl> ethernetPowerCtrl;
net_handle_t netHandle = GetEthnetHandle();
if (netHandle == 0)
{
// Hai Kang
netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/ISAPI/Streaming/channels/1/picture?");
// Wait about 10s
for (int idx = 0; idx < 84; idx++)
{
std::this_thread::sleep_for(std::chrono::milliseconds(128));
netHandle = GetEthnetHandle();
if (netHandle != 0)
{
break;
}
}
}
else if (photoInfo.vendor == 2)
if (netHandle == 0)
{
// Hang Yu
strcpy(netPhotoInfo.url, "/cgi-bin/snapshot.cgi");
// timeout
XYLOG(XYLOG_SEVERITY_ERROR, "Ethernet not existing CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.preset, photoInfo.photoId);
#ifdef NDEBUG
TakePhotoCb(0, photoInfo, "", 0);
return false;
#endif
}
else if (photoInfo.vendor == 3)
else
{
// Yu Shi
netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
int streamSid = 0; // should put into config
// rtsp://192.168.0.13:554/media/video1
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/media/video%u", (uint32_t)photoInfo.cameraId);
// strcpy(netPhotoInfo.url, "rtsp://192.168.50.224/live/0");
XYLOG(XYLOG_SEVERITY_INFO, "Ethernet is Available CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.preset, photoInfo.photoId);
}
else if (photoInfo.vendor == 5)
// SetStaticIp();
std::this_thread::sleep_for(std::chrono::milliseconds(256));
std::map<uint8_t, STREAMING_CONTEXT>::iterator it = m_streamings.find(photoInfo.channel);
if (it != m_streamings.end())
{
// Hang Yu - New
netPhotoInfo.authType = HTTP_AUTH_TYPE_BASIC;
// http://192.168.1.46/Snapshot/%u/RemoteImageCapture?ImageFormat=2&HorizontalPixel=1920&VerticalPixel=1080
// http://192.168.1.101/Snapshot/1/2/RemoteImageCaptureV2?ImageFormat=jpg
// http://192.168.1.101/Snapshot/1/1/RemoteImageCaptureV2?ImageFormat=jpg
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", (uint32_t)photoInfo.cameraId);
it->second.stream->stop();
it->second.stream.reset();
it->second.powerCtrl.reset();
it->second.ethernetPowerCtrl.reset();
m_streamings.erase(it);
}
else
std::string ip = GetIpStr(photoInfo.ip);
VendorCtrl* vendorCtrl = MakeVendorCtrl(photoInfo.vendor, photoInfo.channel, ip, photoInfo.userName, photoInfo.password, netHandle);
if (vendorCtrl == NULL)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.vendor, (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
TakePhotoCb(0, photoInfo, "", 0);
return false;
}
StreamForwarder* forwarder = new StreamForwarder();
m_streamings[photoInfo.channel] = std::shared_ptr<Streaming>((Streaming*)forwarder);
// Initialize with RTSP input and RTMP output
if (!forwarder->initialize(std::string(netPhotoInfo.url), url)) {
std::cerr << "Failed to initialize stream forwarder" << std::endl;
return -1;
std::string streamingUrl = vendorCtrl->GetStreamingUrl(photoInfo.cameraId);
if (streamingUrl.empty())
{
XYLOG(XYLOG_SEVERITY_ERROR, "Invalid Streaming URL CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
TakePhotoCb(0, photoInfo, "", 0);
return false;
}
RtspForwarder* forwarder = new RtspForwarder(streamingUrl, url);
bool res = false;
if (vendorCtrl->HasAuthOnStreaming())
{
forwarder->setAuth(photoInfo.userName, photoInfo.password);
}
STREAMING_CONTEXT ctx;
ctx.stream = std::shared_ptr<Streaming>((Streaming*)forwarder);
ctx.powerCtrl = powerCtrlPtr;
ctx.ethernetPowerCtrl = ethernetPowerCtrl;
m_streamings[photoInfo.channel] = ctx;
// Optional: Set callback to process video frames
#if 0
forwarder->setFrameCallback([](uint8_t* data, int linesize, int width, int height) {
@ -1916,9 +1958,18 @@ bool CPhoneDevice::StartPushStreaming(IDevice::PHOTO_INFO& photoInfo, const std:
});
#endif
XYLOG(XYLOG_SEVERITY_INFO, "Start Streaming CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
// Start forwarding
forwarder->start();
res = forwarder->start();
#if 0
// Initialize with RTSP input and RTMP output
if (!res)
{
XYLOG(XYLOG_SEVERITY_ERROR, "TP: Failed to open stream: %s (%u/%02X) PHOTOID=%u", streamingUrl.c_str(), (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
delete forwarder;
return -1;
}
#endif
// Wait for user input to stop
// std::cout << "Press Enter to stop streaming..." << std::endl;
// std::cin.get();
@ -1927,15 +1978,19 @@ bool CPhoneDevice::StartPushStreaming(IDevice::PHOTO_INFO& photoInfo, const std:
}
else if (photoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF)
{
XYLOG(XYLOG_SEVERITY_INFO, "Stop Streaming CH=%u PR=%X PHOTOID=%u", (uint32_t)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
auto it = m_streamings.find(photoInfo.channel);
if (it != m_streamings.end())
{
it->second->stop();
it->second.reset();
it->second.stream->stop();
it->second.stream.reset();
it->second.powerCtrl.reset();
it->second.ethernetPowerCtrl.reset();
m_streamings.erase(it);
}
}
#endif
return true;
}
@ -2239,6 +2294,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
std::thread t([localPhotoInfo, path, pThis, osds, powerCtrlPtr]() mutable
{
pThis->TakePhotoCb(1, localPhotoInfo, "", 0);
pThis->StartPushStreaming(localPhotoInfo, path, osds, powerCtrlPtr);
});
@ -5079,5 +5135,9 @@ VendorCtrl* CPhoneDevice::MakeVendorCtrl(int vendor, uint8_t channel, const std:
// Hang Yu - New
vendorCtrl = new HangYuCtrl(ip, userName, password, channel, netHandle);
}
if (vendorCtrl != NULL)
{
vendorCtrl->UpdateTime(time(NULL));
}
return vendorCtrl;
}

@ -157,6 +157,13 @@ class PowerControl;
class VendorCtrl;
class Streaming;
struct STREAMING_CONTEXT
{
std::shared_ptr<Streaming> stream;
std::shared_ptr<PowerControl> powerCtrl;
std::shared_ptr<PowerControl> ethernetPowerCtrl;
};
class CPhoneDevice : public IDevice
{
public:
@ -428,7 +435,7 @@ protected:
std::atomic<bool> m_collecting;
unsigned long long localDelayTime;
std::map<uint8_t, std::shared_ptr<Streaming> > m_streamings;
std::map<uint8_t, STREAMING_CONTEXT > m_streamings;
};

@ -17,6 +17,7 @@
#include <string>
#include <thread>
#include <numeric>
#include <fstream>
#include <android/log.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
@ -29,6 +30,55 @@
#include "DngCreator.h"
void saveYuvToFile(AImage* image, const std::string& filePath) {
int32_t width, height;
AImage_getWidth(image, &width);
AImage_getHeight(image, &height);
// 获取 YUV 数据
uint8_t* yPlane = nullptr;
uint8_t* uPlane = nullptr;
uint8_t* vPlane = nullptr;
int yLength, uLength, vLength;
AImage_getPlaneData(image, 0, &yPlane, &yLength); // Y 分量
AImage_getPlaneData(image, 1, &uPlane, &uLength); // U 分量
AImage_getPlaneData(image, 2, &vPlane, &vLength); // V 分量
int32_t yStride, uStride, vStride;
AImage_getPlaneRowStride(image, 0, &yStride); // Y 分量的 Stride
AImage_getPlaneRowStride(image, 1, &uStride); // U 分量的 Stride
AImage_getPlaneRowStride(image, 2, &vStride); // V 分量的 Stride
// 打开文件
std::ofstream file(filePath, std::ios::binary);
if (!file.is_open()) {
// 文件打开失败
return;
}
// 写入 Y 分量(逐行复制,处理 Stride
for (int i = 0; i < height; i++) {
file.write(reinterpret_cast<const char*>(yPlane + i * yStride), width);
}
// 写入 U 分量(逐行复制,处理 Stride
for (int i = 0; i < height / 2; i++) {
file.write(reinterpret_cast<const char*>(uPlane + i * uStride), width / 2);
}
// 写入 V 分量(逐行复制,处理 Stride
for (int i = 0; i < height / 2; i++) {
file.write(reinterpret_cast<const char*>(vPlane + i * vStride), width / 2);
}
// 关闭文件
file.close();
}
#ifdef _DEBUG
void Auto_AImage_delete(AImage* image)
{
@ -1327,6 +1377,16 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
int64_t frameTs = 0;
mstatus = AImage_getTimestamp(image, &frameTs);
#ifdef OUTPUT_DBG_INFO
if (mWidth == 1920)
{
std::string dt = FormatLocalDateTime("%d%02d%02d%02d%02d%02d", time(NULL));
std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt;
fileName += "_" + mCameraId + std::to_string(frameTs) + ".yuv";
saveYuvToFile(image, fileName.c_str());
}
#endif
AImage_delete(image);
bool captureCompleted = false;
@ -1926,6 +1986,8 @@ void NdkCamera::FireOneCapture(uint64_t ts)
cv::imwrite(fileName, it->second, params);
}
}
#endif
onOneCapture(mCharacteristics, mCaptureResults.back(), mFinalLdr, ts - m_startTime, mOneFrame.back().second);
}

@ -12,6 +12,7 @@ extern "C" {
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
@ -21,17 +22,96 @@ extern "C" {
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#include <libavutil/log.h>
#include <android/log.h>
void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl) {
// Map FFmpeg log levels to Android log levels
int android_log_level;
switch (level) {
case AV_LOG_PANIC:
case AV_LOG_FATAL:
android_log_level = ANDROID_LOG_FATAL;
break;
case AV_LOG_ERROR:
android_log_level = ANDROID_LOG_ERROR;
break;
case AV_LOG_WARNING:
android_log_level = ANDROID_LOG_WARN;
break;
case AV_LOG_INFO:
android_log_level = ANDROID_LOG_INFO;
break;
case AV_LOG_VERBOSE:
android_log_level = ANDROID_LOG_VERBOSE;
break;
case AV_LOG_DEBUG:
case AV_LOG_TRACE:
android_log_level = ANDROID_LOG_DEBUG;
break;
default:
android_log_level = ANDROID_LOG_INFO;
break;
}
// Format the log message
char log_message[1024];
vsnprintf(log_message, sizeof(log_message), fmt, vl);
// Send the log message to logcat
__android_log_print(android_log_level, "FFmpeg", "%s", log_message);
}
int setup_output_streams(AVFormatContext *input_ctx, AVFormatContext *output_ctx) {
// Copy streams and fix time_base
for (unsigned int i = 0; i < input_ctx->nb_streams; i++) {
AVStream *in_stream = input_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(output_ctx, NULL);
if (!out_stream) {
return AVERROR_UNKNOWN;
}
// Copy codec parameters
int ret = avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
if (ret < 0) {
return ret;
}
// Fix time base
out_stream->time_base = in_stream->time_base;
// Clear any existing flags
out_stream->codecpar->codec_tag = 0;
}
return 0;
}
int write_mp4_header(AVFormatContext *output_ctx) {
AVDictionary *opts = NULL;
// MP4 specific options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write header
int ret = avformat_write_header(output_ctx, &opts);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Header write failed: %s (code: %d)\n", errbuf, ret);
}
av_dict_free(&opts);
return ret;
}
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
AVDictionary *options = NULL;
av_register_all();
avformat_network_init();
// Open input RTMP stream
if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) {
@ -129,29 +209,47 @@ void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duratio
}
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle)
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
AVDictionary *options = NULL;
int res = 0;
av_register_all();
avformat_network_init();
#ifndef NDEBUG
// Set the custom log callback
av_log_set_callback(ffmpeg_log_callback);
av_log_set_level(AV_LOG_WARNING);
// Set RTSP transport protocol option before opening
#endif
std::string url = rtspUrl;
AVDictionary* options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
av_dict_set(&options, "stimeout", "5000000", 0);
if (!userName.empty())
{
av_dict_set(&options, "username", userName.c_str(), 0); // Replace with actual username
av_dict_set(&options, "password", password.c_str(), 0); // Replace with actual password
// Set custom socket options via protocol whitelist and options
inputFormatContext->protocol_whitelist = av_strdup("file,udp,rtp,tcp,rtsp");
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", userName.c_str(), password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
// Open input RTSP stream
if (avformat_open_input(&inputFormatContext, rtspUrl, nullptr, nullptr) != 0) {
int res = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &options);
av_dict_free(&options);
if (res != 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Could not open input: %s (error code: %d)\n", errbuf, res);
// fprintf(stderr, "Could not open input file '%s'\n", rtspUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
// fprintf(stderr, "Could not find stream information\n");
@ -159,31 +257,6 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
return;
}
// Get socket file descriptor
if (NETWORK_UNSPECIFIED != netHandle)
{
int fd = -1;
if (inputFormatContext->pb) {
AVIOContext *io_ctx = inputFormatContext->pb;
// const char *url = io_ctx->filename;
// You can access socket options using av_opt API
res = av_opt_get_int(io_ctx, "fd", AV_OPT_SEARCH_CHILDREN, (int64_t*)&fd);
if (res >= 0 && fd >= 0) {
// printf("Socket file descriptor: %d\n", fd);
int res = android_setsocknetwork(netHandle, fd);
if (res == -1)
{
int errcode = errno;
// printf("android_setsocknetwork errno=%d", errcode);
// XYLOG(XYLOG_SEVERITY_ERROR,"setsocknetwork -1, errcode=%d",errcode);
}
}
}
}
// Open output MP4 file
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
fprintf(stderr, "Could not create output context\n");
@ -194,21 +267,49 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
fprintf(stderr, "Failed to allocate output stream\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
const AVCodecParameters *in_codecpar = inStream->codecpar;
// Skip audio streams
if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
continue;
}
if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
fprintf(stderr, "Failed to copy codec parameters\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
if (in_codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
// Copy video stream as-is
const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id);
AVStream *out_stream = avformat_new_stream(outputFormatContext, codec);
if (!out_stream) {
return;
}
avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
out_stream->codecpar->codec_tag = 0;
out_stream->time_base = (AVRational){1, 90000};
out_stream->avg_frame_rate = inStream->avg_frame_rate;
}
else if (in_codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
// Setup AAC audio stream
const AVCodec *aac_encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!aac_encoder) {
fprintf(stderr, "AAC encoder not found\n");
return;
}
AVStream *out_stream = avformat_new_stream(outputFormatContext, aac_encoder);
if (!out_stream) {
return;
}
// Set AAC parameters
out_stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
out_stream->codecpar->codec_id = AV_CODEC_ID_AAC;
out_stream->codecpar->sample_rate = in_codecpar->sample_rate;
out_stream->codecpar->format = AV_SAMPLE_FMT_FLTP;
out_stream->codecpar->channels = in_codecpar->channels;
out_stream->codecpar->channel_layout = av_get_default_channel_layout(in_codecpar->channels);
out_stream->codecpar->bit_rate = 128000;
out_stream->codecpar->frame_size = 1024; // AAC frame size
out_stream->time_base = (AVRational){1, in_codecpar->sample_rate};
}
outStream->codecpar->codec_tag = 0;
}
// Open output file
@ -221,22 +322,58 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
}
}
AVDictionary *opts = NULL;
// Set output format options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write output file header
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error occurred when writing header to output file\n");
res = avformat_write_header(outputFormatContext, &opts);
av_dict_free(&opts);
if (res < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Error occurred when writing header to output file: %s (error code: %d)\n", errbuf, res);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
#if 0
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
#endif
uint32_t framesToSkip = 16;
uint32_t framesSkipped = 0;
// Skip initial frames
while (framesSkipped < framesToSkip) {
if (av_read_frame(inputFormatContext, &packet) < 0)
break;
if (packet.stream_index == 0) { // Video stream
framesSkipped++;
}
av_packet_unref(&packet);
}
auto startTime = av_gettime();
// int64_t durationNs = (int64_t)duration * 1000000;
int64_t durationNs = (int64_t)(duration + 32) * 1000;
// Read packets from input and write them to output
while (av_read_frame(inputFormatContext, &packet) >= 0) {
while (1) {
if ((av_gettime() - startTime) >= durationNs) {
// printf("Duration limit reached (%d seconds)\n", ctx->duration_secs);
break;
}
#if 0
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
@ -249,11 +386,35 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
fprintf(stderr, "Error muxing packet\n");
break;
}
#endif
if (av_read_frame(inputFormatContext, &packet) < 0) break;
// Skip audio packets
if (inputFormatContext->streams[packet.stream_index]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
av_packet_unref(&packet);
continue;
}
// Adjust packet timebase
AVStream *in_stream = inputFormatContext->streams[packet.stream_index];
AVStream *out_stream = outputFormatContext->streams[packet.stream_index];
av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base);
packet.pos = -1;
res = av_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (res < 0)
{
break;
}
}
stop_thread.join();
// stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);

@ -10,7 +10,7 @@
// void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs);
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle);
class RTSPRecorder {

@ -4,12 +4,27 @@
#include "Streaming.h"
#include <iostream>
#include <string>
#include <thread>
#include <atomic>
#include <android/api-level.h>
#include <android/log.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
extern void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl);
#if 0
StreamForwarder::~StreamForwarder() {
stop();
@ -156,4 +171,216 @@ void StreamForwarder::processFrame(AVFrame* frame) {
frame->width, frame->height);
}
}
#endif
#endif
RtspForwarder::RtspForwarder(const std::string& input, const std::string& output)
: inputUrl(input), outputUrl(output), isRunning(false)
{
}
bool RtspForwarder::isStreaming() const
{
return isRunning;
}
bool RtspForwarder::start()
{
run();
return true;
}
bool RtspForwarder::stop()
{
isRunning = false;
return true;
}
int RtspForwarder::run()
{
isRunning = true;
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
int ret;
int videoStreamIndex = -1;
int64_t startTime = AV_NOPTS_VALUE;
std::string url = inputUrl;
if (!m_userName.empty())
{
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", m_userName.c_str(), m_password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
// Input options
AVDictionary* inputOptions = nullptr;
av_dict_set(&inputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&inputOptions, "stimeout", "5000000", 0); // 5 second timeout
av_dict_set(&inputOptions, "buffer_size", "1024000", 0); // 1MB buffer
// Output options
AVDictionary* outputOptions = nullptr;
av_dict_set(&outputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&outputOptions, "f", "rtsp", 0);
std::cout << "Opening input: " << url << std::endl;
// Open input
ret = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &inputOptions);
if (ret < 0) {
std::cerr << "Could not open input: " << av_err2str(ret) << std::endl;
return ret;
}
// Get stream info
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) {
// std::cerr << "Failed to get stream info: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
return ret;
}
// Find video stream
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
if (inputFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex == -1) {
// std::cerr << "No video stream found" << std::endl;
avformat_close_input(&inputFormatContext);
return -1;
}
// Allocate output context
avformat_alloc_output_context2(&outputFormatContext, nullptr, "rtsp", outputUrl.c_str());
if (!outputFormatContext) {
std::cerr << "Could not create output context" << std::endl;
avformat_close_input(&inputFormatContext);
return AVERROR_UNKNOWN;
}
// Create output streams by copying from input
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
AVCodecParameters* inCodecpar = inStream->codecpar;
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
std::cerr << "Failed to allocate output stream" << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return AVERROR_UNKNOWN;
}
ret = avcodec_parameters_copy(outStream->codecpar, inCodecpar);
if (ret < 0) {
std::cerr << "Failed to copy codec parameters" << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return ret;
}
// Fix codec tag
outStream->codecpar->codec_tag = 0;
// Copy time base
outStream->time_base = inStream->time_base;
}
// Open output
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&outputFormatContext->pb, outputUrl.c_str(), AVIO_FLAG_WRITE);
if (ret < 0) {
std::cerr << "Could not open output URL: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return ret;
}
}
// Write header
ret = avformat_write_header(outputFormatContext, &outputOptions);
if (ret < 0) {
std::cerr << "Error writing header: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return ret;
}
// Main loop - read and write packets
AVPacket packet;
while (isRunning) {
ret = av_read_frame(inputFormatContext, &packet);
if (ret < 0) {
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
std::cerr << "End of stream or timeout, reconnecting in "
<< reconnectDelayMs << "ms" << std::endl;
std::this_thread::sleep_for(std::chrono::milliseconds(reconnectDelayMs));
avformat_close_input(&inputFormatContext);
ret = avformat_open_input(&inputFormatContext, inputUrl.c_str(), nullptr, &inputOptions);
if (ret < 0) continue;
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) continue;
continue;
}
break;
}
// Fix timestamps if enabled
if (fixTimestamps) {
// Handle timestamp issues similar to FFmpeg warning
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
if (packet.pts == AV_NOPTS_VALUE) {
// Generate PTS if missing
if (startTime == AV_NOPTS_VALUE) {
startTime = av_gettime();
}
packet.pts = av_rescale_q(av_gettime() - startTime,
AV_TIME_BASE_Q,
inStream->time_base);
packet.dts = packet.pts;
}
// Rescale timestamps to output timebase
packet.pts = av_rescale_q_rnd(packet.pts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration,
inStream->time_base,
outStream->time_base);
}
// Write packet to output
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
std::cerr << "Error writing frame: " << av_err2str(ret) << std::endl;
break;
}
}
// Write trailer
av_write_trailer(outputFormatContext);
// Cleanup
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return 0;
}

@ -7,6 +7,11 @@
#include <string>
#include <memory>
#include <functional>
#include <iostream>
#include <thread>
#include <atomic>
#include <android/multinetwork.h>
@ -21,9 +26,21 @@ class Streaming
{
public:
virtual ~Streaming() {}
virtual void start() {}
virtual void stop() {}
virtual bool start() { return false; }
virtual bool stop() { return false; }
virtual bool isStreaming() const { return false; }
void setAuth(const std::string& userName, const std::string& password)
{
m_userName = userName;
m_password = password;
}
protected:
std::string m_userName;
std::string m_password;
};
#if 0
class StreamForwarder : public Streaming
{
@ -44,7 +61,30 @@ private:
bool openInput(const std::string& inputUrl);
bool openOutput(const std::string& outputUrl);
void forwardPackets();
void setFrameCallback(std::function<void(uint8_t*, int, int, int)> callback);
};
#endif
class RtspForwarder : public Streaming {
private:
std::string inputUrl;
std::string outputUrl;
std::atomic<bool> isRunning;
// Options
int reconnectDelayMs = 5000;
bool fixTimestamps = true;
public:
RtspForwarder(const std::string& input, const std::string& output);
virtual bool start();
virtual bool stop();
virtual bool isStreaming() const;
int run();
};
#endif //MICROPHOTO_STREAMING_H

@ -32,7 +32,15 @@ std::string HangYuCtrl::GetStreamingUrl(uint8_t channel)
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
int res = 0;
for (int idx = 0; idx < 10; idx++)
{
res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res == 0 && !resData.empty())
{
break;
}
}
if (res != 0 || resData.empty())
{
return "";
@ -72,7 +80,7 @@ bool HangYuCtrl::UpdateTime(time_t ts)
std::string reqData = "<?xml version=\"1.0\" encoding=\"utf-8\"?><Time><SystemTime>"
+ FormatLocalDateTime("%d%02d%02dT%02d%02d%02d") + "+08</SystemTime></Time>";
std::string url = "http://" + m_ip + " /System/Time";
std::string url = "http://" + m_ip + "/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData);

@ -19,6 +19,7 @@ public:
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(std::vector<uint8_t>& img);
virtual bool TakeVideo(uint32_t duration, std::string path);
virtual bool HasAuthOnStreaming() const { return true; }
private:

@ -3,7 +3,7 @@
//
#include "VendorCtrl.h"
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle) :
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime/* = true*/) :
m_ip(ip), m_userName(userName), m_password(password), m_channel(channel), m_netHandle(netHandle)
{
}

@ -11,7 +11,7 @@
class VendorCtrl {
public:
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle);
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime = true);
virtual ~VendorCtrl() {}
virtual bool SetOsd() = 0;
@ -20,6 +20,7 @@ public:
virtual bool UpdateTime(time_t ts) = 0;
virtual bool TakePhoto(std::vector<uint8_t>& img) = 0;
virtual bool TakeVideo(uint32_t duration, std::string path) = 0;
virtual bool HasAuthOnStreaming() const { return false; }
protected:

@ -4,6 +4,9 @@
#include "YuShiCtrl.h"
#include "httpclient.h"
#include "netcamera.h"
#include <json/json.h>
YuShiCtrl::~YuShiCtrl()
{
@ -24,6 +27,38 @@ std::string YuShiCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/0/LiveStreamURL", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
Json::CharReaderBuilder builder;
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
Json::Value json;
const char* doc = (const char*)&(resData[0]);
if (reader->parse(doc, doc + resData.size() - 1, &json, NULL))
{
if (json.isMember("Response"))
{
Json::Value& jsonRes = json["Response"];
if (jsonRes.isMember("Data"))
{
Json::Value& jsonData = jsonRes["Data"];
if (jsonData.isMember("URL"))
{
return std::string(jsonData["URL"].asCString());
}
}
}
}
return "";
}
@ -31,14 +66,27 @@ bool YuShiCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
#if 0
Json::Value jsonData(Json::objectValue);
jsonData["TimeZone"] = "GMT+08:00";
jsonData["DeviceTime"] = (int64_t)ts;
jsonData["DateFormat"] = 0; // YYYY-MM-DD
jsonData["HourFormat"] = 1; // 24H
#endif
return false;
std::string contents = "{\"TimeZone\":\"GMT+08:00\",\"DateFormat\":0,\"HourFormat\":1,\"DeviceTime\":" + std::to_string(ts) + "}";
std::string url = "http://" + m_ip + "/LAPI/V1.0/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, contents.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool YuShiCtrl::TakePhoto(std::vector<uint8_t>& img)

@ -98,10 +98,17 @@ public class MicroPhotoService extends Service {
public static final int MSG_WHAT_LOG = 10;
public final static int MSG_WHAT_SENDING_HB = 40;
public final static int MSG_WHAT_MAX = 1000;
public final static int MEDIA_TYPE_PHOTO = 0;
public final static int MEDIA_TYPE_VIDEO = 1;
public final static int MEDIA_TYPE_LOG = 2;
public final static int MEDIA_TYPE_STREAMING = 0x10;
public final static int MEDIA_TYPE_STREAMING_OFF = 0x11;
public final static int BROADCAST_REQUEST_CODE_HEARTBEAT = 1;
public final static int BROADCAST_REQUEST_CODE_TAKING_PHOTO = 2;
public final static int BROADCAST_REQUEST_CODE_GPS = 3;
@ -127,15 +134,17 @@ public class MicroPhotoService extends Service {
private static final String EXTRA_PARAM_CHANNEL = "Channel";
private static final String EXTRA_PARAM_PRESET = "Preset";
private static final String EXTRA_PARAM_PHOTO_OR_VIDEO = "PhotoOrVideo";
private static final String EXTRA_PARAM_MEDIA_TYPE = "MediaType";
private static final String EXTRA_PARAM_URL = "Url";
private static final String EXTRA_PARAM_SCHEDULES = "Schedules";
private static final String EXTRA_PARAM_SCHEDULE = "Schedule_";
private static final String EXTRA_PARAM_TAKING_TIME = "TakingTime";
private static final String EXTRA_PARAM_TIME = "Time";
// private static final String EXTRA_PARAM_TIMER_UID = "TimerUid";
// private static final String EXTRA_PARAM_TIMEOUT = "Timeout";
// private static final String EXTRA_PARAM_TIMES = "Times";
// private static final String EXTRA_PARAM_ELASPED_TIMES = "ElapsedTimes";
private static final String FOREGROUND_CHANNEL_ID = "fg_mpapp";
public static class STATE_SERVICE {
public static final int CONNECTED = 10;
public static final int NOT_CONNECTED = 0;
@ -513,7 +522,7 @@ public class MicroPhotoService extends Service {
int channel = (int) ((val & 0xFFFF000L) >> 12);
int preset = (int) ((val & 0xFF0L) >> 4);
boolean photoOrVideo = ((val & 0xFL) == 0);
int mediaType = (int)(val & 0xFL);
if (channel >= 256)
{
@ -523,7 +532,7 @@ public class MicroPhotoService extends Service {
{
infoLog("IMG Timer Fired: CH=" + channel + " PR=" + preset);
}
mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, ts, photoOrVideo);
mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, ts, null, mediaType);
}
}
@ -536,12 +545,20 @@ public class MicroPhotoService extends Service {
} else if (TextUtils.equals(ACTION_TAKE_PHOTO_MANUALLY, action)) {
int channel = intent.getIntExtra(EXTRA_PARAM_CHANNEL, 0);
int preset = intent.getIntExtra(EXTRA_PARAM_PRESET, 0xFF);
String url = intent.getStringExtra(EXTRA_PARAM_URL);
// long ts = intent.getLongExtra(EXTRA_PARAM_TIME, 0);
boolean photoOrVideo = intent.getBooleanExtra(EXTRA_PARAM_PHOTO_OR_VIDEO, true);
int mediaType = 0;
if (intent.hasExtra(EXTRA_PARAM_MEDIA_TYPE)) {
mediaType = intent.getIntExtra(EXTRA_PARAM_MEDIA_TYPE, 0);
} else if (intent.hasExtra(EXTRA_PARAM_PHOTO_OR_VIDEO)) {
boolean photoOrVideo = intent.getBooleanExtra(EXTRA_PARAM_PHOTO_OR_VIDEO, true);
mediaType = photoOrVideo ? 0 : 1;
}
long ts = System.currentTimeMillis() / 1000;
Log.i(TAG, "Take Photo CH=" + channel + " PR=" + preset + " Mannually");
mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, 0, photoOrVideo);
mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, 0, url, mediaType);
} else if (TextUtils.equals(ACTION_UPDATE_CONFIGS, action)) {
int restart = intent.getIntExtra("restart", 0);
Log.i(TAG, "UPD CFG Fired ACTION=" + action + " restart=" + restart);
@ -1605,7 +1622,7 @@ cellSignalStrengthGsm.getDbm();
protected native long[] getPhotoTimeData(long handler, long startTime);
protected native long[] getPhotoTimeData2(long handler);
// protected native long[] getNextScheduleItem(long handler);
protected native boolean notifyToTakePhoto(long handler, int channel, int preset, long scheduleTime, boolean photoOrVideo);
protected native boolean notifyToTakePhoto(long handler, int channel, int preset, long scheduleTime, String url, int mediaType);
protected native boolean sendHeartbeat(long handler, int signalLevel);
protected native boolean reloadConfigs(long handler);

@ -38,14 +38,18 @@ public class FileDownloader {
connection.setDoInput(true);
connection.connect();
final File temp = new File(filePath);
if (temp.exists())
temp.delete();
temp.createNewFile();
if (temp.exists()) {
long fileSize = temp.length();
connection.setRequestProperty("Range", "bytes=" + Long.toString(fileSize) + "-");
}
// if (temp.exists())
// temp.delete();
// temp.createNewFile();
temp.setReadable(true, false);
temp.setWritable(true, false);
downloadFile = temp;
Log.d("download", "url " + urlString + "\n save to " + temp);
os = new FileOutputStream(temp);
os = new FileOutputStream(temp, true);
String encoding = connection.getContentEncoding();
is = connection.getInputStream();

@ -4,7 +4,7 @@ plugins {
def AppMajorVersion = 1
def AppMinorVersion = 1
def AppBuildNumber = 11
def AppBuildNumber = 12
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber

Loading…
Cancel
Save