实现网络摄像机拍摄短视频

PtzNew
Matthew 3 months ago
parent fe97b3c5bc
commit b4e7cf8fab

@ -583,7 +583,7 @@ public:
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PLZ
// MicroPhoto
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_OTG_STATE, closeDelayTime)
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_OTG_STATE, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PLZ
#endif // USING_N938
{

@ -1822,7 +1822,7 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
std::string tmpFile = m_appPath + (APP_PATH_TMP DIR_SEP_STR) + std::to_string(localPhotoInfo.photoId) + ".mp4";
// RTSPToMP4 dumper(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000);
// dumper.start();
dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, GetEthnetHandle());
dumpRtspToMp4(streamingUrl.c_str(), tmpFile.c_str(), localPhotoInfo.duration * 1000, localPhotoInfo.userName, localPhotoInfo.password, GetEthnetHandle());
ethernetPowerCtrl.reset();
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF");
@ -1832,7 +1832,7 @@ bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
if (existsFile(tmpFile))
{
std::rename(tmpFile.c_str(), fullPath.c_str());
TakePhotoCb(3, localPhotoInfo, "", localPhotoInfo.photoTime);
TakePhotoCb(3, localPhotoInfo, fullPath, localPhotoInfo.photoTime);
}
else
{
@ -5079,5 +5079,9 @@ VendorCtrl* CPhoneDevice::MakeVendorCtrl(int vendor, uint8_t channel, const std:
// Hang Yu - New
vendorCtrl = new HangYuCtrl(ip, userName, password, channel, netHandle);
}
if (vendorCtrl != NULL)
{
vendorCtrl->UpdateTime(time(NULL));
}
return vendorCtrl;
}

@ -12,6 +12,7 @@ extern "C" {
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
@ -21,18 +22,100 @@ extern "C" {
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#include <libavutil/log.h>
#include <android/log.h>
void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl) {
// Map FFmpeg log levels to Android log levels
int android_log_level;
switch (level) {
case AV_LOG_PANIC:
case AV_LOG_FATAL:
android_log_level = ANDROID_LOG_FATAL;
break;
case AV_LOG_ERROR:
android_log_level = ANDROID_LOG_ERROR;
break;
case AV_LOG_WARNING:
android_log_level = ANDROID_LOG_WARN;
break;
case AV_LOG_INFO:
android_log_level = ANDROID_LOG_INFO;
break;
case AV_LOG_VERBOSE:
android_log_level = ANDROID_LOG_VERBOSE;
break;
case AV_LOG_DEBUG:
case AV_LOG_TRACE:
android_log_level = ANDROID_LOG_DEBUG;
break;
default:
android_log_level = ANDROID_LOG_INFO;
break;
}
// Format the log message
char log_message[1024];
vsnprintf(log_message, sizeof(log_message), fmt, vl);
// Send the log message to logcat
__android_log_print(android_log_level, "FFmpeg", "%s", log_message);
}
int setup_output_streams(AVFormatContext *input_ctx, AVFormatContext *output_ctx) {
// Copy streams and fix time_base
for (unsigned int i = 0; i < input_ctx->nb_streams; i++) {
AVStream *in_stream = input_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(output_ctx, NULL);
if (!out_stream) {
return AVERROR_UNKNOWN;
}
// Copy codec parameters
int ret = avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
if (ret < 0) {
return ret;
}
// Fix time base
out_stream->time_base = in_stream->time_base;
// Clear any existing flags
out_stream->codecpar->codec_tag = 0;
}
return 0;
}
int write_mp4_header(AVFormatContext *output_ctx) {
AVDictionary *opts = NULL;
// MP4 specific options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write header
int ret = avformat_write_header(output_ctx, &opts);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Header write failed: %s (code: %d)\n", errbuf, ret);
}
av_dict_free(&opts);
return ret;
}
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
AVDictionary *options = NULL;
av_register_all();
avformat_network_init();
// Open input RTMP stream
if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) {
fprintf(stderr, "Could not open input file '%s'\n", rtmpUrl);
@ -129,29 +212,45 @@ void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duratio
}
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle)
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
AVDictionary *options = NULL;
int res = 0;
av_register_all();
avformat_network_init();
// Set RTSP transport protocol option before opening
#ifndef NDEBUG
// Set the custom log callback
av_log_set_callback(ffmpeg_log_callback);
av_log_set_level(AV_LOG_TRACE);
#endif
AVDictionary* options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
av_dict_set(&options, "stimeout", "5000000", 0);
if (!userName.empty())
{
// av_dict_set(&options, "rtsp_user", userName.c_str(), 0); // Replace with actual username
// av_dict_set(&options, "rtsp_password", password.c_str(), 0); // Replace with actual password
}
// Set custom socket options via protocol whitelist and options
inputFormatContext->protocol_whitelist = av_strdup("file,udp,rtp,tcp,rtsp");
// Open input RTSP stream
if (avformat_open_input(&inputFormatContext, rtspUrl, nullptr, nullptr) != 0) {
int res = avformat_open_input(&inputFormatContext, rtspUrl, nullptr, &options);
av_dict_free(&options);
if (res != 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Could not open input: %s (error code: %d)\n", errbuf, res);
// fprintf(stderr, "Could not open input file '%s'\n", rtspUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
// fprintf(stderr, "Could not find stream information\n");
@ -159,31 +258,6 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
return;
}
// Get socket file descriptor
if (NETWORK_UNSPECIFIED != netHandle)
{
int fd = -1;
if (inputFormatContext->pb) {
AVIOContext *io_ctx = inputFormatContext->pb;
// const char *url = io_ctx->filename;
// You can access socket options using av_opt API
res = av_opt_get_int(io_ctx, "fd", AV_OPT_SEARCH_CHILDREN, (int64_t*)&fd);
if (res >= 0 && fd >= 0) {
// printf("Socket file descriptor: %d\n", fd);
int res = android_setsocknetwork(netHandle, fd);
if (res == -1)
{
int errcode = errno;
// printf("android_setsocknetwork errno=%d", errcode);
// XYLOG(XYLOG_SEVERITY_ERROR,"setsocknetwork -1, errcode=%d",errcode);
}
}
}
}
// Open output MP4 file
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
fprintf(stderr, "Could not create output context\n");
@ -194,21 +268,44 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
fprintf(stderr, "Failed to allocate output stream\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
const AVCodecParameters *in_codecpar = inStream->codecpar;
if (in_codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
// Copy video stream as-is
const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id);
AVStream *out_stream = avformat_new_stream(outputFormatContext, codec);
if (!out_stream) {
return;
}
avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
out_stream->codecpar->codec_tag = 0;
out_stream->time_base = (AVRational){1, 90000};
out_stream->avg_frame_rate = inStream->avg_frame_rate;
}
else if (in_codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
// Setup AAC audio stream
const AVCodec *aac_encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!aac_encoder) {
fprintf(stderr, "AAC encoder not found\n");
return;
}
if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
fprintf(stderr, "Failed to copy codec parameters\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
AVStream *out_stream = avformat_new_stream(outputFormatContext, aac_encoder);
if (!out_stream) {
return;
}
// Set AAC parameters
out_stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
out_stream->codecpar->codec_id = AV_CODEC_ID_AAC;
out_stream->codecpar->sample_rate = in_codecpar->sample_rate;
out_stream->codecpar->format = AV_SAMPLE_FMT_FLTP;
out_stream->codecpar->channels = in_codecpar->channels;
out_stream->codecpar->channel_layout = av_get_default_channel_layout(in_codecpar->channels);
out_stream->codecpar->bit_rate = 128000;
out_stream->codecpar->frame_size = 1024; // AAC frame size
out_stream->time_base = (AVRational){1, in_codecpar->sample_rate};
}
outStream->codecpar->codec_tag = 0;
}
// Open output file
@ -221,22 +318,58 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
}
}
AVDictionary *opts = NULL;
// Set output format options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write output file header
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error occurred when writing header to output file\n");
res = avformat_write_header(outputFormatContext, &opts);
av_dict_free(&opts);
if (res < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Error occurred when writing header to output file: %s (error code: %d)\n", errbuf, res);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
#if 0
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
#endif
uint32_t framesToSkip = 16;
uint32_t framesSkipped = 0;
// Skip initial frames
while (framesSkipped < framesToSkip) {
if (av_read_frame(inputFormatContext, &packet) < 0)
break;
if (packet.stream_index == 0) { // Video stream
framesSkipped++;
}
av_packet_unref(&packet);
}
auto startTime = av_gettime();
// int64_t durationNs = (int64_t)duration * 1000000;
int64_t durationNs = (int64_t)duration * 1000;
// Read packets from input and write them to output
while (av_read_frame(inputFormatContext, &packet) >= 0) {
while (1) {
if ((av_gettime() - startTime) >= durationNs) {
// printf("Duration limit reached (%d seconds)\n", ctx->duration_secs);
break;
}
#if 0
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
@ -249,11 +382,14 @@ void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duratio
fprintf(stderr, "Error muxing packet\n");
break;
}
#endif
if (av_read_frame(inputFormatContext, &packet) < 0) break;
av_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
}
stop_thread.join();
// stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);

@ -10,7 +10,7 @@
// void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs);
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle);
class RTSPRecorder {

@ -72,7 +72,7 @@ bool HangYuCtrl::UpdateTime(time_t ts)
std::string reqData = "<?xml version=\"1.0\" encoding=\"utf-8\"?><Time><SystemTime>"
+ FormatLocalDateTime("%d%02d%02dT%02d%02d%02d") + "+08</SystemTime></Time>";
std::string url = "http://" + m_ip + " /System/Time";
std::string url = "http://" + m_ip + "/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData);

@ -3,7 +3,7 @@
//
#include "VendorCtrl.h"
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle) :
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime/* = true*/) :
m_ip(ip), m_userName(userName), m_password(password), m_channel(channel), m_netHandle(netHandle)
{
}

@ -11,7 +11,7 @@
class VendorCtrl {
public:
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle);
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime = true);
virtual ~VendorCtrl() {}
virtual bool SetOsd() = 0;

@ -4,6 +4,9 @@
#include "YuShiCtrl.h"
#include "httpclient.h"
#include "netcamera.h"
#include <json/json.h>
YuShiCtrl::~YuShiCtrl()
{
@ -24,6 +27,38 @@ std::string YuShiCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/0/LiveStreamURL", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
Json::CharReaderBuilder builder;
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
Json::Value json;
const char* doc = (const char*)&(resData[0]);
if (reader->parse(doc, doc + resData.size() - 1, &json, NULL))
{
if (json.isMember("Response"))
{
Json::Value& jsonRes = json["Response"];
if (jsonRes.isMember("Data"))
{
Json::Value& jsonData = jsonRes["Data"];
if (jsonData.isMember("URL"))
{
return std::string(jsonData["URL"].asCString());
}
}
}
}
return "";
}
@ -31,14 +66,27 @@ bool YuShiCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
#if 0
Json::Value jsonData(Json::objectValue);
jsonData["TimeZone"] = "GMT+08:00";
jsonData["DeviceTime"] = (int64_t)ts;
jsonData["DateFormat"] = 0; // YYYY-MM-DD
jsonData["HourFormat"] = 1; // 24H
#endif
return false;
std::string contents = "{\"TimeZone\":\"GMT+08:00\",\"DateFormat\":0,\"HourFormat\":1,\"DeviceTime\":" + std::to_string(ts) + "}";
std::string url = "http://" + m_ip + "/LAPI/V1.0/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, contents.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool YuShiCtrl::TakePhoto(std::vector<uint8_t>& img)

@ -21,6 +21,7 @@ static size_t OnWriteData(void* buffer, size_t size, size_t nmemb, void* lpVoid)
static int SockOptCallback(void *clientp, curl_socket_t curlfd, curlsocktype purpose)
{
return CURL_SOCKOPT_OK;
net_handle_t netHandle = *((net_handle_t *)clientp);
int res = android_setsocknetwork(netHandle, curlfd);

@ -38,14 +38,18 @@ public class FileDownloader {
connection.setDoInput(true);
connection.connect();
final File temp = new File(filePath);
if (temp.exists())
temp.delete();
temp.createNewFile();
if (temp.exists()) {
long fileSize = temp.length();
connection.setRequestProperty("Range", "bytes=" + Long.toString(fileSize) + "-");
}
// if (temp.exists())
// temp.delete();
// temp.createNewFile();
temp.setReadable(true, false);
temp.setWritable(true, false);
downloadFile = temp;
Log.d("download", "url " + urlString + "\n save to " + temp);
os = new FileOutputStream(temp);
os = new FileOutputStream(temp, true);
String encoding = connection.getContentEncoding();
is = connection.getInputStream();

Loading…
Cancel
Save