Merge branch 'master' into master

camera2
Leo Ma 8 years ago committed by GitHub
commit 77c692ae39

@ -9,7 +9,7 @@ android {
minSdkVersion 16
targetSdkVersion 22
versionCode 1
versionName "2.5"
versionName "2.6"
ndk {
abiFilters "armeabi-v7a", "x86"
}

@ -72,7 +72,7 @@ public class MainActivity extends AppCompatActivity implements RtmpHandler.RtmpL
mPublisher.setRtmpHandler(new RtmpHandler(this));
mPublisher.setRecordHandler(new SrsRecordHandler(this));
mPublisher.setPreviewResolution(640, 360);
mPublisher.setOutputResolution(720, 1280);
mPublisher.setOutputResolution(360, 640);
mPublisher.setVideoHDMode();
mPublisher.startCamera();

@ -5,7 +5,7 @@ buildscript {
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:2.3.1'
classpath 'com.android.tools.build:gradle:2.3.3'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files

@ -8,7 +8,7 @@ android {
minSdkVersion 16
targetSdkVersion 22
versionCode 1
versionName "2.5"
versionName "2.6"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
ndk {

@ -1,9 +1,10 @@
#include <jni.h>
#include <android/log.h>
#include <string.h>
#include <libyuv.h>
#include <x264.h>
#include <android/log.h>
#define LIBENC_LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, "libenc", __VA_ARGS__))
#define LIBENC_LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO , "libenc", __VA_ARGS__))
#define LIBENC_LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN , "libenc", __VA_ARGS__))
@ -515,6 +516,8 @@ static jboolean libenc_openSoftEncoder(JNIEnv *env, jobject thiz) {
x264_param_default_preset(&x264_ctx.params, x264_ctx.preset, "zerolatency");
x264_ctx.params.b_repeat_headers = 0;
// for iOS HW decoding
x264_ctx.params.b_sliced_threads = 0;
x264_ctx.global_nal_header = true;
// resolution

@ -191,6 +191,7 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
}
public void setCameraId(int id) {
stopTorch();
mCamId = id;
setPreviewOrientation(mPreviewOrientation);
}
@ -320,6 +321,7 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
public void stopCamera() {
disableEncoding();
stopTorch();
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
@ -388,6 +390,29 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
return closestRange;
}
public boolean startTorch() {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<String> supportedFlashModes = params.getSupportedFlashModes();
if (supportedFlashModes != null && !supportedFlashModes.isEmpty()) {
if (supportedFlashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
mCamera.setParameters(params);
return true;
}
}
}
return false;
}
public void stopTorch() {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
mCamera.setParameters(params);
}
}
public interface PreviewCallback {
void onGetRgbaFrame(byte[] data, int width, int height);

@ -26,18 +26,18 @@ public class SrsEncoder {
public static String x264Preset = "veryfast";
public static int vPrevWidth = 640;
public static int vPrevHeight = 360;
public static int vPortraitWidth = 720;
public static int vPortraitHeight = 1280;
public static int vLandscapeWidth = 1280;
public static int vLandscapeHeight = 720;
public static int vOutWidth = 720; // Note: the stride of resolution must be set as 16x for hard encoding with some chip like MTK
public static int vOutHeight = 1280; // Since Y component is quadruple size as U and V component, the stride must be set as 32x
public static int vPortraitWidth = 360;
public static int vPortraitHeight = 640;
public static int vLandscapeWidth = 640;
public static int vLandscapeHeight = 360;
public static int vOutWidth = 360; // Note: the stride of resolution must be set as 16x for hard encoding with some chip like MTK
public static int vOutHeight = 640; // Since Y component is quadruple size as U and V component, the stride must be set as 32x
public static int vBitrate = 1200 * 1024; // 1200 kbps
public static final int VFPS = 24;
public static final int VGOP = 48;
public static final int ASAMPLERATE = 44100;
public static int aChannelConfig = AudioFormat.CHANNEL_IN_STEREO;
public static final int ABITRATE = 128 * 1024; // 128 kbps
public static final int ABITRATE = 64 * 1024; // 64 kbps
private SrsEncodeHandler mHandler;
@ -340,26 +340,31 @@ public class SrsEncoder {
}
public void onGetPcmFrame(byte[] data, int size) {
ByteBuffer[] inBuffers = aencoder.getInputBuffers();
ByteBuffer[] outBuffers = aencoder.getOutputBuffers();
int inBufferIndex = aencoder.dequeueInputBuffer(-1);
if (inBufferIndex >= 0) {
ByteBuffer bb = inBuffers[inBufferIndex];
bb.clear();
bb.put(data, 0, size);
long pts = System.nanoTime() / 1000 - mPresentTimeUs;
aencoder.queueInputBuffer(inBufferIndex, 0, size, pts, 0);
}
// Check video frame cache number to judge the networking situation.
// Just cache GOP / FPS seconds data according to latency.
AtomicInteger videoFrameCacheNumber = flvMuxer.getVideoFrameCacheNumber();
if (videoFrameCacheNumber != null && videoFrameCacheNumber.get() < VGOP) {
ByteBuffer[] inBuffers = aencoder.getInputBuffers();
ByteBuffer[] outBuffers = aencoder.getOutputBuffers();
int inBufferIndex = aencoder.dequeueInputBuffer(-1);
if (inBufferIndex >= 0) {
ByteBuffer bb = inBuffers[inBufferIndex];
bb.clear();
bb.put(data, 0, size);
long pts = System.nanoTime() / 1000 - mPresentTimeUs;
aencoder.queueInputBuffer(inBufferIndex, 0, size, pts, 0);
}
for (; ; ) {
int outBufferIndex = aencoder.dequeueOutputBuffer(aebi, 0);
if (outBufferIndex >= 0) {
ByteBuffer bb = outBuffers[outBufferIndex];
onEncodedAacFrame(bb, aebi);
aencoder.releaseOutputBuffer(outBufferIndex, false);
} else {
break;
for (; ; ) {
int outBufferIndex = aencoder.dequeueOutputBuffer(aebi, 0);
if (outBufferIndex >= 0) {
ByteBuffer bb = outBuffers[outBufferIndex];
onEncodedAacFrame(bb, aebi);
aencoder.releaseOutputBuffer(outBufferIndex, false);
} else {
break;
}
}
}
}

@ -343,13 +343,12 @@ public class SrsFlvMuxer {
*/
private class SrsCodecAudioSampleRate
{
// set to the max value to reserved, for array map.
public final static int Reserved = 4;
public final static int R5512 = 0;
public final static int R11025 = 1;
public final static int R22050 = 2;
public final static int R44100 = 3;
public final static int R5512 = 5512;
public final static int R11025 = 11025;
public final static int R22050 = 22050;
public final static int R44100 = 44100;
public final static int R32000 = 32000;
public final static int R16000 = 16000;
}
/**
@ -457,6 +456,11 @@ public class SrsFlvMuxer {
private SrsFlvFrameBytes sps_bb = new SrsFlvFrameBytes();
private SrsFlvFrameBytes pps_hdr = new SrsFlvFrameBytes();
private SrsFlvFrameBytes pps_bb = new SrsFlvFrameBytes();
private boolean sps_pps_found = false;
public void reset() {
sps_pps_found = false;
}
public boolean isSps(SrsFlvFrameBytes frame) {
return frame.size >= 1 && (frame.data.get(0) & 0x1f) == SrsAvcNaluType.SPS;
@ -570,6 +574,8 @@ public class SrsFlvMuxer {
pps_bb.size = pps.array().length;
pps_bb.data = pps.duplicate();
frames.add(pps_bb);
sps_pps_found = true;
}
public SrsAllocator.Allocation muxFlvTag(ArrayList<SrsFlvFrameBytes> frames, int frame_type,
@ -646,28 +652,20 @@ public class SrsFlvMuxer {
public SrsFlvFrameBytes demuxAnnexb(ByteBuffer bb, MediaCodec.BufferInfo bi, boolean isOnlyChkHeader) {
SrsFlvFrameBytes tbb = new SrsFlvFrameBytes();
if (bb.position() < bi.size) {
// each frame must prefixed by annexb format.
// about annexb, @see H.264-AVC-ISO_IEC_14496-10.pdf, page 211.
SrsAnnexbSearch tbbsc = isOnlyChkHeader?searchStartcode(bb, bi):searchAnnexb(bb, bi);
SrsAnnexbSearch tbbsc = isOnlyChkHeader ? searchStartcode(bb, bi) : searchAnnexb(bb, bi);
// tbbsc.nb_start_code always 4 , after 00 00 00 01
if (!tbbsc.match || tbbsc.nb_start_code < 3) {
Log.e(TAG, "annexb not match.");
// mHandler.notifyRtmpIllegalArgumentException(new IllegalArgumentException(
// String.format("annexb not match for %dB, pos=%d", bi.size, bb.position())));
}
// the start codes.
for (int i = 0; i < tbbsc.nb_start_code; i++) {
bb.get();
}
// find out the frame size.
tbb.data = bb.slice();
tbb.size = bi.size - bb.position();
}
return tbb;
}
}
@ -718,6 +716,7 @@ public class SrsFlvMuxer {
h264_pps_changed = false;
h264_sps_pps_sent = false;
aac_specific_config_got = false;
avc.reset();
}
public void setVideoTrack(MediaFormat format) {
@ -750,6 +749,10 @@ public class SrsFlvMuxer {
samplingFrequencyIndex = 0x07;
} else if (asample_rate == SrsCodecAudioSampleRate.R11025) {
samplingFrequencyIndex = 0x0a;
} else if (asample_rate == SrsCodecAudioSampleRate.R32000) {
samplingFrequencyIndex = 0x05;
} else if (asample_rate == SrsCodecAudioSampleRate.R16000) {
samplingFrequencyIndex = 0x08;
}
ch |= (samplingFrequencyIndex >> 1) & 0x07;
audio_tag.put(ch, 2);
@ -788,11 +791,13 @@ public class SrsFlvMuxer {
sound_type = 1; // 1 = Stereo sound
}
byte sound_size = 1; // 1 = 16-bit samples
byte sound_rate = 3; // 44100, 22050, 11025
byte sound_rate = 3; // 44100, 22050, 11025, 5512
if (asample_rate == 22050) {
sound_rate = 2;
} else if (asample_rate == 11025) {
sound_rate = 1;
} else if (asample_rate == 5512) {
sound_rate = 0;
}
// for audio frame, there is 1 or 2 bytes header:
@ -846,22 +851,19 @@ public class SrsFlvMuxer {
}
public void writeVideoSample(final ByteBuffer bb, MediaCodec.BufferInfo bi) {
if (bi.size < 4) {
return;
}
int pts = (int) (bi.presentationTimeUs / 1000);
int dts = pts;
int type = SrsCodecVideoAVCFrame.InterFrame;
if (bi.size < 4) return;
SrsFlvFrameBytes frame = avc.demuxAnnexb(bb, bi, true);
int nal_unit_type = (int)(frame.data.get(0) & 0x1f);
if (nal_unit_type == SrsAvcNaluType.NonIDR)
{
int nal_unit_type = frame.data.get(0) & 0x1f;
} else if (nal_unit_type == SrsAvcNaluType.IDR)
{
if (nal_unit_type == SrsAvcNaluType.IDR) {
type = SrsCodecVideoAVCFrame.KeyFrame;
} else if (nal_unit_type == SrsAvcNaluType.SPS || nal_unit_type == SrsAvcNaluType.PPS)
{
} else if (nal_unit_type == SrsAvcNaluType.SPS || nal_unit_type == SrsAvcNaluType.PPS) {
if (!frame.data.equals(h264_sps)) {
byte[] sps = new byte[frame.size];
frame.data.get(sps);
@ -878,13 +880,12 @@ public class SrsFlvMuxer {
writeH264SpsPps(dts, pts);
}
return;
} else
} else if (nal_unit_type != SrsAvcNaluType.NonIDR) {
return;
}
ipbs.add(avc.muxNaluHeader(frame));
ipbs.add(frame);
//writeH264SpsPps(dts, pts);
writeH264IpbFrame(ipbs, type, dts, pts);
ipbs.clear();
}

@ -0,0 +1,2 @@
-keep class net.ossrs.yasea.**{*;}
-keep class net.ossrs.yasea.SrsEncoder{*;}
Loading…
Cancel
Save