调整实现

视频仍是黑屏
main
Matthew 2 months ago
parent cff42bc5c3
commit 9b59e0f2cf

@ -63,9 +63,16 @@ bool RtspStreamer::initialize(const std::string& url, int width, int height, int
codecpar->format = AV_PIX_FMT_YUV420P;
codecpar->bit_rate = 2000000; // 2 Mbps
// Stream timebase (90kHz is standard for H.264 in RTSP)
// Set stream timebase (90kHz is standard for H.264 in RTSP)
mVideoStream->time_base = (AVRational){1, 90000};
// Set additional RTSP parameters
av_dict_set(&mFormatCtx->metadata, "rtsp_transport", "tcp", 0);
// Specifically configure FFmpeg to handle timestamps properly
// Disable FFmpeg's internal timestamp generation
mFormatCtx->flags |= AVFMT_FLAG_GENPTS;
// Open output URL
if (!(mFormatCtx->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&mFormatCtx->pb, url.c_str(), AVIO_FLAG_WRITE);
@ -178,9 +185,16 @@ void RtspStreamer::stop() {
}
}
// Modify the streamLoop method to handle timestamps properly:
void RtspStreamer::streamLoop() {
bool firstFrame = true;
int64_t firstPts = 0;
int64_t lastPts = 0;
uint32_t rtpTimestamp = 0;
// Frame duration in RTP clock units (90kHz for H.264)
// For example, at 30 fps: 90000/30 = 3000 units per frame
const int frameRateRtp = 90000 / 30; // Adjust 30 to match your actual fps
while (mRunning) {
EncodedFrame frame;
@ -204,47 +218,68 @@ void RtspStreamer::streamLoop() {
// Reset the packet
av_packet_unref(mPacket);
// Save first timestamp for offset calculation
// For timestamp handling
if (firstFrame) {
firstPts = frame.presentationTimeUs;
lastPts = firstPts;
rtpTimestamp = 0; // Start at 0
firstFrame = false;
}
// Create a copy of the frame data that FFmpeg will manage
// Calculate pts based on frame number rather than actual timestamps
// to ensure smooth, consistent timing
int64_t expectedPts = 0;
if (frame.presentationTimeUs - lastPts > 1000000) { // Gap larger than 1 second
// There's been a significant gap - adjust smoothly
__android_log_print(ANDROID_LOG_WARN, "RtspStreamer",
"Large timestamp gap detected: %lld -> %lld, smoothing",
(long long)lastPts, (long long)frame.presentationTimeUs);
// Just increment by normal frame duration instead of actual gap
rtpTimestamp += frameRateRtp;
} else {
// Calculate proper increment based on time diff, but limit max increase
int64_t diffUs = frame.presentationTimeUs - lastPts;
int32_t rtpIncrement = (diffUs * 90) / 1000; // Convert μs to 90kHz units
// Limit maximum increment to avoid large jumps
if (rtpIncrement > frameRateRtp * 3) {
__android_log_print(ANDROID_LOG_WARN, "RtspStreamer",
"Limiting large timestamp increment: %d", rtpIncrement);
rtpIncrement = frameRateRtp;
}
rtpTimestamp += rtpIncrement;
}
lastPts = frame.presentationTimeUs;
// Create packet from frame data
uint8_t* buffer = (uint8_t*)av_malloc(frame.size);
if (!buffer) {
LOGE("Failed to allocate buffer for frame");
delete[] frame.data; // Free our copy
__android_log_print(ANDROID_LOG_ERROR, "RtspStreamer", "Failed to allocate buffer");
delete[] frame.data;
continue;
}
// Copy frame data to the FFmpeg-managed buffer
// Copy data to FFmpeg-managed buffer
memcpy(buffer, frame.data, frame.size);
delete[] frame.data; // Free our copy
frame.data = nullptr;
// We can now free our copy of the data
delete[] frame.data;
frame.data = nullptr; // Avoid accidental double-delete
// Let FFmpeg manage the buffer
int ret = av_packet_from_data(mPacket, buffer, frame.size);
if (ret < 0) {
LOGE("Failed to create packet from data: %d", ret);
av_free(buffer); // Free FFmpeg buffer on error
__android_log_print(ANDROID_LOG_ERROR, "RtspStreamer",
"Failed to create packet: %d", ret);
av_free(buffer);
continue;
}
// Now mPacket owns the buffer, we don't need to free it manually
// Offset timestamp by first frame for proper timing
int64_t pts = frame.presentationTimeUs - firstPts;
// Convert to stream timebase (90kHz)
pts = av_rescale_q(pts, (AVRational){1, 1000000}, mVideoStream->time_base);
// Set packet properties
mPacket->pts = pts;
mPacket->dts = pts;
mPacket->duration = 0;
// Set packet properties with our smoothed timestamp
mPacket->pts = rtpTimestamp;
mPacket->dts = rtpTimestamp;
mPacket->duration = frameRateRtp; // Set proper duration
mPacket->flags = frame.isKeyFrame ? AV_PKT_FLAG_KEY : 0;
mPacket->stream_index = mVideoStream->index;
@ -253,13 +288,11 @@ void RtspStreamer::streamLoop() {
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
LOGE("Error writing frame: %d (%s)", ret, errbuf);
__android_log_print(ANDROID_LOG_ERROR, "RtspStreamer",
"Error writing frame: %d (%s)", ret, errbuf);
// Handle reconnection logic as before...
// Handle reconnection logic...
}
// We don't need to delete frame.data here anymore - it's already been freed above
// and ownership of the buffer has been transferred to FFmpeg
}
}
}
}

Loading…
Cancel
Save