Add video cached frame counting

To check networking situation according to cached frame counting.

Signed-off-by: Leo Ma <begeekmyfriend@gmail.com>
camera2
Leo Ma 9 years ago
parent 236636ca74
commit 73c6c8e451

@ -35,6 +35,7 @@ public class SrsEncoder {
public static final int ABITRATE = 128 * 1000; // 128kbps public static final int ABITRATE = 128 * 1000; // 128kbps
private SrsRtmp muxer; private SrsRtmp muxer;
private SrsRtmpPublisher publisher;
private MediaCodec vencoder; private MediaCodec vencoder;
private MediaCodecInfo vmci; private MediaCodecInfo vmci;
@ -47,15 +48,15 @@ public class SrsEncoder {
private byte[] mCroppedFrameBuffer; private byte[] mCroppedFrameBuffer;
private boolean mCameraFaceFront = true; private boolean mCameraFaceFront = true;
private long mPresentTimeUs; private long mPresentTimeUs;
private int vfmt_color;
private int vtrack; private int vtrack;
private int vcolor;
private int atrack; private int atrack;
public SrsEncoder() { public SrsEncoder() {
vcolor = chooseVideoEncoder(); vfmt_color = chooseVideoEncoder();
if (vcolor == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) { if (vfmt_color == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
VFORMAT = ImageFormat.YV12; VFORMAT = ImageFormat.YV12;
} else if (vcolor == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) { } else if (vfmt_color == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
VFORMAT = ImageFormat.NV21; VFORMAT = ImageFormat.NV21;
} else { } else {
throw new IllegalStateException("Unsupported color format!"); throw new IllegalStateException("Unsupported color format!");
@ -66,7 +67,8 @@ public class SrsEncoder {
} }
public int start() { public int start() {
muxer = new SrsRtmp(rtmpUrl); publisher = new SrsRtmpPublisher(rtmpUrl);
muxer = new SrsRtmp(publisher);
try { try {
muxer.start(); muxer.start();
} catch (IOException e) { } catch (IOException e) {
@ -113,7 +115,7 @@ public class SrsEncoder {
// setup the vencoder. // setup the vencoder.
// Note: landscape to portrait, 90 degree rotation, so we need to switch VWIDTH and VHEIGHT in configuration // Note: landscape to portrait, 90 degree rotation, so we need to switch VWIDTH and VHEIGHT in configuration
MediaFormat videoFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, VENC_WIDTH, VENC_HEIGHT); MediaFormat videoFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, VENC_WIDTH, VENC_HEIGHT);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, vcolor); videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, vfmt_color);
videoFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0); videoFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, vbitrate); videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, vbitrate);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS); videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS);
@ -171,7 +173,7 @@ public class SrsEncoder {
private int preProcessYuvFrame(byte[] data) { private int preProcessYuvFrame(byte[] data) {
if (mCameraFaceFront) { if (mCameraFaceFront) {
switch (vcolor) { switch (vfmt_color) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
flipYUV420PlannerFrame(data, mFlippedFrameBuffer, VWIDTH, VHEIGHT); flipYUV420PlannerFrame(data, mFlippedFrameBuffer, VWIDTH, VHEIGHT);
rotateYUV420PlannerFrame(mFlippedFrameBuffer, mRotatedFrameBuffer, VWIDTH, VHEIGHT); rotateYUV420PlannerFrame(mFlippedFrameBuffer, mRotatedFrameBuffer, VWIDTH, VHEIGHT);
@ -188,7 +190,7 @@ public class SrsEncoder {
throw new IllegalStateException("Unsupported color format!"); throw new IllegalStateException("Unsupported color format!");
} }
} else { } else {
switch (vcolor) { switch (vfmt_color) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
rotateYUV420PlannerFrame(data, mRotatedFrameBuffer, VWIDTH, VHEIGHT); rotateYUV420PlannerFrame(data, mRotatedFrameBuffer, VWIDTH, VHEIGHT);
cropYUV420PlannerFrame(mRotatedFrameBuffer, VHEIGHT, VWIDTH, cropYUV420PlannerFrame(mRotatedFrameBuffer, VHEIGHT, VWIDTH,
@ -208,27 +210,30 @@ public class SrsEncoder {
} }
public void onGetYuvFrame(byte[] data) { public void onGetYuvFrame(byte[] data) {
preProcessYuvFrame(data); // Check if the networking is good enough.
ByteBuffer[] inBuffers = vencoder.getInputBuffers(); if (publisher.getVideoFrameCacheNumber() < 128) {
ByteBuffer[] outBuffers = vencoder.getOutputBuffers(); preProcessYuvFrame(data);
ByteBuffer[] inBuffers = vencoder.getInputBuffers();
int inBufferIndex = vencoder.dequeueInputBuffer(-1); ByteBuffer[] outBuffers = vencoder.getOutputBuffers();
if (inBufferIndex >= 0) {
ByteBuffer bb = inBuffers[inBufferIndex]; int inBufferIndex = vencoder.dequeueInputBuffer(-1);
bb.clear(); if (inBufferIndex >= 0) {
bb.put(mCroppedFrameBuffer, 0, mCroppedFrameBuffer.length); ByteBuffer bb = inBuffers[inBufferIndex];
long pts = System.nanoTime() / 1000 - mPresentTimeUs; bb.clear();
vencoder.queueInputBuffer(inBufferIndex, 0, mCroppedFrameBuffer.length, pts, 0); bb.put(mCroppedFrameBuffer, 0, mCroppedFrameBuffer.length);
} long pts = System.nanoTime() / 1000 - mPresentTimeUs;
vencoder.queueInputBuffer(inBufferIndex, 0, mCroppedFrameBuffer.length, pts, 0);
}
for (; ; ) { for (; ; ) {
int outBufferIndex = vencoder.dequeueOutputBuffer(vebi, 0); int outBufferIndex = vencoder.dequeueOutputBuffer(vebi, 0);
if (outBufferIndex >= 0) { if (outBufferIndex >= 0) {
ByteBuffer bb = outBuffers[outBufferIndex]; ByteBuffer bb = outBuffers[outBufferIndex];
onEncodedAnnexbFrame(bb, vebi); onEncodedAnnexbFrame(bb, vebi);
vencoder.releaseOutputBuffer(outBufferIndex, false); vencoder.releaseOutputBuffer(outBufferIndex, false);
} else { } else {
break; break;
}
} }
} }
} }
@ -278,12 +283,15 @@ public class SrsEncoder {
// NV12 -> YUV420SP yyyy*2 uv uv // NV12 -> YUV420SP yyyy*2 uv uv
// NV16 -> YUV422SP yyyy uv uv // NV16 -> YUV422SP yyyy uv uv
// YUY2 -> YUV422SP yuyv yuyv // YUY2 -> YUV422SP yuyv yuyv
private byte[] cropYUV420SemiPlannerFrame(byte[] input, int iw, int ih, byte[] output, int ow, int oh) { private byte[] cropYUV420SemiPlannerFrame(byte[] input, int iw, int ih, byte[] output, int ow, int oh) {
assert(iw >= ow && ih >= oh); if (iw < ow || ih < oh) {
// Note: the stride of resolution must be set as 16x for hard encoding with some chip like MTK throw new AssertionError();
// Since Y component is quadruple size as U and V component, the stride must be set as 32x }
assert(ow % 32 == 0 && oh % 32 == 0); if (ow % 32 != 0 || oh % 32 != 0) {
// Note: the stride of resolution must be set as 16x for hard encoding with some chip like MTK
// Since Y component is quadruple size as U and V component, the stride must be set as 32x
throw new AssertionError();
}
int iFrameSize = iw * ih; int iFrameSize = iw * ih;
int oFrameSize = ow * oh; int oFrameSize = ow * oh;
@ -308,10 +316,14 @@ public class SrsEncoder {
} }
private byte[] cropYUV420PlannerFrame(byte[] input, int iw, int ih, byte[] output, int ow, int oh) { private byte[] cropYUV420PlannerFrame(byte[] input, int iw, int ih, byte[] output, int ow, int oh) {
assert(iw >= ow && ih >= oh); if (iw < ow || ih < oh) {
// Note: the stride of resolution must be set as 16x for hard encoding with some chip like MTK throw new AssertionError();
// Since Y component is quadruple size as U and V component, the stride must be set as 32x }
assert(ow % 32 == 0 && oh % 32 == 0); if (ow % 32 != 0 || oh % 32 != 0) {
// Note: the stride of resolution must be set as 16x for hard encoding with some chip like MTK
// Since Y component is quadruple size as U and V component, the stride must be set as 32x
throw new AssertionError();
}
int iFrameSize = iw * ih; int iFrameSize = iw * ih;
int iQFrameSize = iFrameSize / 4; int iQFrameSize = iFrameSize / 4;

@ -46,7 +46,6 @@ import java.util.Comparator;
* muxer.release(); * muxer.release();
*/ */
public class SrsRtmp { public class SrsRtmp {
private String url;
private boolean connected; private boolean connected;
private SrsRtmpPublisher publisher; private SrsRtmpPublisher publisher;
@ -72,14 +71,14 @@ public class SrsRtmp {
* constructor. * constructor.
* @param path the rtmp url to post to. * @param path the rtmp url to post to.
*/ */
public SrsRtmp(String path) { public SrsRtmp(SrsRtmpPublisher p) {
nb_videos = 0; nb_videos = 0;
nb_audios = 0; nb_audios = 0;
sequenceHeaderOk = false; sequenceHeaderOk = false;
connected = false; connected = false;
url = path;
flv = new SrsFlv(); flv = new SrsFlv();
cache = new ArrayList<SrsFlvFrame>(); cache = new ArrayList<SrsFlvFrame>();
publisher = p;
} }
/** /**
@ -162,7 +161,6 @@ public class SrsRtmp {
stop(); stop();
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
// Ignore illegal state. // Ignore illegal state.
//Log.e(TAG, String.format("worker: stop failed. e=%s", e.getMessage()));
} }
} }
@ -198,7 +196,7 @@ public class SrsRtmp {
publisher = null; publisher = null;
} }
Log.i(TAG, String.format("worker: muxer closed, url=%s", url)); Log.i(TAG, String.format("worker: muxer closed, url=%s", publisher.getRtmpUrl()));
} }
/** /**
@ -245,11 +243,10 @@ public class SrsRtmp {
private void connect() throws IllegalStateException, IOException { private void connect() throws IllegalStateException, IOException {
if (!connected) { if (!connected) {
Log.i(TAG, String.format("worker: connecting to RTMP server by url=%s\n", url)); Log.i(TAG, String.format("worker: connecting to RTMP server by url=%s\n", publisher.getRtmpUrl()));
publisher = new SrsRtmpPublisher(url);
publisher.connect(); publisher.connect();
publisher.publish("live"); publisher.publish("live");
Log.i(TAG, String.format("worker: connect to RTMP server by url=%s\n", url)); Log.i(TAG, String.format("worker: connect to RTMP server by url=%s\n", publisher.getRtmpUrl()));
connected = true; connected = true;
} }

@ -66,4 +66,12 @@ public class SrsRtmpPublisher implements RtmpPublisher {
} }
rtmpConnection.publishAudioData(data, dts); rtmpConnection.publishAudioData(data, dts);
} }
public final int getVideoFrameCacheNumber() {
return ((RtmpConnection) rtmpConnection).getVideoFrameCacheNumber();
}
public final String getRtmpUrl() {
return ((RtmpConnection) rtmpConnection).getRtmpUrl();
}
} }

@ -13,6 +13,7 @@ import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.concurrent.atomic.AtomicInteger;
import android.util.Log; import android.util.Log;
import net.ossrs.sea.rtmp.RtmpPublisher; import net.ossrs.sea.rtmp.RtmpPublisher;
import net.ossrs.sea.rtmp.amf.AmfNull; import net.ossrs.sea.rtmp.amf.AmfNull;
@ -42,6 +43,7 @@ public class RtmpConnection implements RtmpPublisher, PacketRxHandler, ThreadCon
private String host; private String host;
private String streamName; private String streamName;
private String publishType; private String publishType;
private String rtmpUrl = "";
private String swfUrl = ""; private String swfUrl = "";
private String tcUrl = ""; private String tcUrl = "";
private String pageUrl = ""; private String pageUrl = "";
@ -58,10 +60,12 @@ public class RtmpConnection implements RtmpPublisher, PacketRxHandler, ThreadCon
private final Object connectingLock = new Object(); private final Object connectingLock = new Object();
private final Object publishLock = new Object(); private final Object publishLock = new Object();
private volatile boolean connecting = false; private volatile boolean connecting = false;
private AtomicInteger videoFrameCacheNumber = new AtomicInteger(0);
private int currentStreamId = -1; private int currentStreamId = -1;
public RtmpConnection(String url) { public RtmpConnection(String url) {
this.tcUrl = url.substring(0, url.lastIndexOf('/')); rtmpUrl = url;
this.tcUrl = rtmpUrl.substring(0, url.lastIndexOf('/'));
Matcher matcher = rtmpUrlPattern.matcher(url); Matcher matcher = rtmpUrlPattern.matcher(url);
if (matcher.matches()) { if (matcher.matches()) {
this.host = matcher.group(1); this.host = matcher.group(1);
@ -89,7 +93,7 @@ public class RtmpConnection implements RtmpPublisher, PacketRxHandler, ThreadCon
active = true; active = true;
Log.d(TAG, "connect(): handshake done"); Log.d(TAG, "connect(): handshake done");
ReadThread readThread = new ReadThread(rtmpSessionInfo, in, this, this); ReadThread readThread = new ReadThread(rtmpSessionInfo, in, this, this);
writeThread = new WriteThread(rtmpSessionInfo, out, this); writeThread = new WriteThread(rtmpSessionInfo, out, videoFrameCacheNumber, this);
readThread.start(); readThread.start();
writeThread.start(); writeThread.start();
@ -427,6 +431,7 @@ public class RtmpConnection implements RtmpPublisher, PacketRxHandler, ThreadCon
video.getHeader().setMessageStreamId(currentStreamId); video.getHeader().setMessageStreamId(currentStreamId);
video.getHeader().setAbsoluteTimestamp(dts); video.getHeader().setAbsoluteTimestamp(dts);
writeThread.send(video); writeThread.send(video);
videoFrameCacheNumber.getAndIncrement();
} }
@Override @Override
@ -443,4 +448,12 @@ public class RtmpConnection implements RtmpPublisher, PacketRxHandler, ThreadCon
audio.getHeader().setAbsoluteTimestamp(dts); audio.getHeader().setAbsoluteTimestamp(dts);
writeThread.send(audio); writeThread.send(audio);
} }
public final int getVideoFrameCacheNumber() {
return videoFrameCacheNumber.get();
}
public final String getRtmpUrl() {
return rtmpUrl;
}
} }

@ -5,9 +5,12 @@ import java.io.OutputStream;
import java.net.SocketException; import java.net.SocketException;
import java.util.Arrays; import java.util.Arrays;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
import android.util.Log; import android.util.Log;
import net.ossrs.sea.rtmp.packets.Command; import net.ossrs.sea.rtmp.packets.Command;
import net.ossrs.sea.rtmp.packets.RtmpPacket; import net.ossrs.sea.rtmp.packets.RtmpPacket;
import net.ossrs.sea.rtmp.packets.Video;
/** /**
* RTMPConnection's write thread * RTMPConnection's write thread
@ -23,12 +26,14 @@ public class WriteThread extends Thread {
private ConcurrentLinkedQueue<RtmpPacket> writeQueue = new ConcurrentLinkedQueue<RtmpPacket>(); private ConcurrentLinkedQueue<RtmpPacket> writeQueue = new ConcurrentLinkedQueue<RtmpPacket>();
private final Object txPacketLock = new Object(); private final Object txPacketLock = new Object();
private volatile boolean active = true; private volatile boolean active = true;
private AtomicInteger videoFrameCacheNumber;
private ThreadController threadController; private ThreadController threadController;
public WriteThread(RtmpSessionInfo rtmpSessionInfo, OutputStream out, ThreadController threadController) { public WriteThread(RtmpSessionInfo rtmpSessionInfo, OutputStream out, AtomicInteger i, ThreadController threadController) {
super("RtmpWriteThread"); super("RtmpWriteThread");
this.rtmpSessionInfo = rtmpSessionInfo; this.rtmpSessionInfo = rtmpSessionInfo;
this.out = out; this.out = out;
this.videoFrameCacheNumber = i;
this.threadController = threadController; this.threadController = threadController;
} }
@ -39,13 +44,16 @@ public class WriteThread extends Thread {
try { try {
while (!writeQueue.isEmpty()) { while (!writeQueue.isEmpty()) {
RtmpPacket rtmpPacket = writeQueue.poll(); RtmpPacket rtmpPacket = writeQueue.poll();
final ChunkStreamInfo chunkStreamInfo = rtmpSessionInfo.getChunkStreamInfo(rtmpPacket.getHeader().getChunkStreamId()); ChunkStreamInfo chunkStreamInfo = rtmpSessionInfo.getChunkStreamInfo(rtmpPacket.getHeader().getChunkStreamId());
chunkStreamInfo.setPrevHeaderTx(rtmpPacket.getHeader()); chunkStreamInfo.setPrevHeaderTx(rtmpPacket.getHeader());
rtmpPacket.writeTo(out, rtmpSessionInfo.getTxChunkSize(), chunkStreamInfo); rtmpPacket.writeTo(out, rtmpSessionInfo.getTxChunkSize(), chunkStreamInfo);
Log.d(TAG, "WriteThread: wrote packet: " + rtmpPacket + ", size: " + rtmpPacket.getHeader().getPacketLength()); Log.d(TAG, "WriteThread: wrote packet: " + rtmpPacket + ", size: " + rtmpPacket.getHeader().getPacketLength());
if (rtmpPacket instanceof Command) { if (rtmpPacket instanceof Command) {
rtmpSessionInfo.addInvokedCommand(((Command) rtmpPacket).getTransactionId(), ((Command) rtmpPacket).getCommandName()); rtmpSessionInfo.addInvokedCommand(((Command) rtmpPacket).getTransactionId(), ((Command) rtmpPacket).getCommandName());
} }
if (rtmpPacket instanceof Video) {
videoFrameCacheNumber.getAndDecrement();
}
} }
out.flush(); out.flush();
} catch (SocketException se) { } catch (SocketException se) {

Loading…
Cancel
Save