实现视频流

PtzNew
Matthew 3 months ago
parent 120d7fdde7
commit 5d312ed1f0

@ -4,12 +4,27 @@
#include "Streaming.h"
#include <iostream>
#include <string>
#include <thread>
#include <atomic>
#include <android/api-level.h>
#include <android/log.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
extern void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl);
#if 0
StreamForwarder::~StreamForwarder() {
stop();
@ -159,94 +174,213 @@ void StreamForwarder::processFrame(AVFrame* frame) {
#endif
bool StreamForwarder::initialize(const std::string& input, const std::string& output) {
inputUrl = input;
outputUrl = output;
if (avformat_open_input(&inputFormatContext, inputUrl.c_str(), nullptr, nullptr) < 0) {
return false;
RtspForwarder::RtspForwarder(const std::string& input, const std::string& output)
: inputUrl(input), outputUrl(output), isRunning(false)
{
}
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
cleanup();
return false;
bool RtspForwarder::isStreaming() const
{
return isRunning;
}
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mpegts",
outputUrl.c_str()) < 0) {
cleanup();
return false;
bool RtspForwarder::start()
{
run();
return true;
}
bool RtspForwarder::stop()
{
isRunning = false;
return true;
}
bool StreamForwarder::start()
int RtspForwarder::run()
{
if (!inputFormatContext || !outputFormatContext) {
return false;
isRunning = true;
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
int ret;
int videoStreamIndex = -1;
int64_t startTime = AV_NOPTS_VALUE;
std::string url = inputUrl;
if (!m_userName.empty())
{
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", m_userName.c_str(), m_password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
// Input options
AVDictionary* inputOptions = nullptr;
av_dict_set(&inputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&inputOptions, "stimeout", "5000000", 0); // 5 second timeout
av_dict_set(&inputOptions, "buffer_size", "1024000", 0); // 1MB buffer
// Output options
AVDictionary* outputOptions = nullptr;
av_dict_set(&outputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&outputOptions, "f", "rtsp", 0);
std::cout << "Opening input: " << url << std::endl;
// Open input
ret = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &inputOptions);
if (ret < 0) {
std::cerr << "Could not open input: " << av_err2str(ret) << std::endl;
return ret;
}
// Get stream info
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) {
// std::cerr << "Failed to get stream info: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
return ret;
}
// Find video stream
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
if (inputFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
if (videoStreamIndex == -1) {
// std::cerr << "No video stream found" << std::endl;
avformat_close_input(&inputFormatContext);
return -1;
}
// Allocate output context
avformat_alloc_output_context2(&outputFormatContext, nullptr, "rtsp", outputUrl.c_str());
if (!outputFormatContext) {
std::cerr << "Could not create output context" << std::endl;
avformat_close_input(&inputFormatContext);
return AVERROR_UNKNOWN;
}
// Create output streams by copying from input
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
AVStream* outStream = avformat_new_stream(outputFormatContext,
inStream->codec->codec);
AVCodecParameters* inCodecpar = inStream->codecpar;
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
cleanup();
return false;
std::cerr << "Failed to allocate output stream" << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return AVERROR_UNKNOWN;
}
if (avcodec_parameters_copy(outStream->codecpar,
inStream->codecpar) < 0) {
cleanup();
return false;
ret = avcodec_parameters_copy(outStream->codecpar, inCodecpar);
if (ret < 0) {
std::cerr << "Failed to copy codec parameters" << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return ret;
}
// Fix codec tag
outStream->codecpar->codec_tag = 0;
// Copy time base
outStream->time_base = inStream->time_base;
}
// Open output
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&outputFormatContext->pb, outputUrl.c_str(),
AVIO_FLAG_WRITE) < 0) {
cleanup();
return false;
ret = avio_open(&outputFormatContext->pb, outputUrl.c_str(), AVIO_FLAG_WRITE);
if (ret < 0) {
std::cerr << "Could not open output URL: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return ret;
}
}
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
cleanup();
return false;
// Write header
ret = avformat_write_header(outputFormatContext, &outputOptions);
if (ret < 0) {
std::cerr << "Error writing header: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return ret;
}
// Main loop - read and write packets
AVPacket packet;
while (!stopRequested && av_read_frame(inputFormatContext, &packet) >= 0) {
av_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
while (isRunning) {
ret = av_read_frame(inputFormatContext, &packet);
if (ret < 0) {
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
std::cerr << "End of stream or timeout, reconnecting in "
<< reconnectDelayMs << "ms" << std::endl;
std::this_thread::sleep_for(std::chrono::milliseconds(reconnectDelayMs));
avformat_close_input(&inputFormatContext);
ret = avformat_open_input(&inputFormatContext, inputUrl.c_str(), nullptr, &inputOptions);
if (ret < 0) continue;
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) continue;
continue;
}
av_write_trailer(outputFormatContext);
return true;
break;
}
bool StreamForwarder::stop()
{
stopRequested = true;
return true;
// Fix timestamps if enabled
if (fixTimestamps) {
// Handle timestamp issues similar to FFmpeg warning
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
if (packet.pts == AV_NOPTS_VALUE) {
// Generate PTS if missing
if (startTime == AV_NOPTS_VALUE) {
startTime = av_gettime();
}
packet.pts = av_rescale_q(av_gettime() - startTime,
AV_TIME_BASE_Q,
inStream->time_base);
packet.dts = packet.pts;
}
bool StreamForwarder::isStreaming() const
{
return !stopRequested;
// Rescale timestamps to output timebase
packet.pts = av_rescale_q_rnd(packet.pts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration,
inStream->time_base,
outStream->time_base);
}
void StreamForwarder::cleanup() {
if (inputFormatContext) {
avformat_close_input(&inputFormatContext);
// Write packet to output
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
std::cerr << "Error writing frame: " << av_err2str(ret) << std::endl;
break;
}
}
if (outputFormatContext) {
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
// Write trailer
av_write_trailer(outputFormatContext);
// Cleanup
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
}
avformat_free_context(outputFormatContext);
}
return 0;
}

@ -8,6 +8,10 @@
#include <string>
#include <memory>
#include <functional>
#include <iostream>
#include <thread>
#include <atomic>
#include <android/multinetwork.h>
@ -25,6 +29,15 @@ public:
virtual bool start() { return false; }
virtual bool stop() { return false; }
virtual bool isStreaming() const { return false; }
void setAuth(const std::string& userName, const std::string& password)
{
m_userName = userName;
m_password = password;
}
protected:
std::string m_userName;
std::string m_password;
};
@ -53,30 +66,25 @@ private:
#endif
class StreamForwarder : public Streaming {
class RtspForwarder : public Streaming {
private:
AVFormatContext* inputFormatContext;
AVFormatContext* outputFormatContext;
std::string inputUrl;
std::string outputUrl;
std::atomic<bool> isRunning;
public:
StreamForwarder() : inputFormatContext(nullptr), outputFormatContext(nullptr) {}
// Options
int reconnectDelayMs = 5000;
bool fixTimestamps = true;
~StreamForwarder() {
cleanup();
}
public:
RtspForwarder(const std::string& input, const std::string& output);
bool initialize(const std::string& input, const std::string& output);
virtual bool start();
virtual bool stop();
virtual bool isStreaming() const;
private:
void cleanup();
bool stopRequested;
int run();
};
#endif //MICROPHOTO_STREAMING_H

Loading…
Cancel
Save