You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
571 lines
20 KiB
Java
571 lines
20 KiB
Java
package net.ossrs.yasea;
|
|
|
|
import android.Manifest;
|
|
import android.app.Activity;
|
|
import android.content.Context;
|
|
import android.content.pm.PackageManager;
|
|
import android.content.res.Configuration;
|
|
import android.graphics.ImageFormat;
|
|
import android.graphics.SurfaceTexture;
|
|
import android.hardware.Camera;
|
|
import android.hardware.camera2.CameraAccessException;
|
|
import android.hardware.camera2.CameraCaptureSession;
|
|
import android.hardware.camera2.CameraCharacteristics;
|
|
import android.hardware.camera2.CameraDevice;
|
|
import android.hardware.camera2.CameraManager;
|
|
import android.hardware.camera2.CameraMetadata;
|
|
import android.hardware.camera2.CaptureRequest;
|
|
import android.hardware.camera2.params.StreamConfigurationMap;
|
|
import android.media.ImageReader;
|
|
import android.opengl.GLES20;
|
|
import android.opengl.GLSurfaceView;
|
|
import android.opengl.Matrix;
|
|
import android.support.v4.app.ActivityCompat;
|
|
import android.util.AttributeSet;
|
|
import android.util.Log;
|
|
import android.util.Range;
|
|
import android.util.Size;
|
|
import android.view.Surface;
|
|
|
|
import com.seu.magicfilter.base.gpuimage.GPUImageFilter;
|
|
import com.seu.magicfilter.utils.MagicFilterFactory;
|
|
import com.seu.magicfilter.utils.MagicFilterType;
|
|
import com.seu.magicfilter.utils.OpenGLUtils;
|
|
|
|
import java.nio.ByteBuffer;
|
|
import java.nio.IntBuffer;
|
|
import java.util.ArrayList;
|
|
import java.util.Arrays;
|
|
import java.util.List;
|
|
import java.util.concurrent.ConcurrentLinkedQueue;
|
|
|
|
import javax.microedition.khronos.egl.EGLConfig;
|
|
import javax.microedition.khronos.opengles.GL10;
|
|
|
|
/**
|
|
* Created by Leo Ma on 2016/2/25.
|
|
*/
|
|
public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Renderer {
|
|
|
|
private final static String TAG = "SrsCameraView";
|
|
private GPUImageFilter magicFilter;
|
|
private SurfaceTexture surfaceTexture;
|
|
private int mOESTextureId = OpenGLUtils.NO_TEXTURE;
|
|
private int mSurfaceWidth;
|
|
private int mSurfaceHeight;
|
|
private int mPreviewWidth;
|
|
private int mPreviewHeight;
|
|
private volatile boolean mIsEncoding;
|
|
private boolean mIsTorchOn = false;
|
|
private float mInputAspectRatio;
|
|
private float mOutputAspectRatio;
|
|
private float[] mProjectionMatrix = new float[16];
|
|
private float[] mSurfaceMatrix = new float[16];
|
|
private float[] mTransformMatrix = new float[16];
|
|
private ByteBuffer mGLPreviewBuffer;
|
|
private int mCamId = -1;
|
|
private int mPreviewRotation = 90;
|
|
private int mPreviewOrientation = Configuration.ORIENTATION_PORTRAIT;
|
|
|
|
private Thread worker;
|
|
private final Object writeLock = new Object();
|
|
private ConcurrentLinkedQueue<IntBuffer> mGLIntBufferCache = new ConcurrentLinkedQueue<>();
|
|
private PreviewCallback mPrevCb;
|
|
private CameraCallbacksHandler cameraCallbacksHandler = new CameraCallbacksHandler();
|
|
private CameraDevice mCameraDevice;//摄像头设备
|
|
private Size imageDimension;
|
|
private ImageReader imageReader;
|
|
private CaptureRequest.Builder captureRequestBuilder;
|
|
private CameraCaptureSession cameraCaptureSession;
|
|
private CameraManager cameraManager;//摄像头管理类
|
|
private String[] cameraNames;//摄像头名称列表
|
|
private CameraItemData curItem;//当前选中的摄像头参数
|
|
private ArrayList<CameraItemData> itemlist = new ArrayList<>();
|
|
|
|
public SrsCameraView(Context context) {
|
|
this(context, null);
|
|
}
|
|
|
|
public SrsCameraView(Context context, AttributeSet attrs) {
|
|
super(context, attrs);
|
|
|
|
setEGLContextClientVersion(2);
|
|
setRenderer(this);
|
|
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
|
|
initCameraData();
|
|
}
|
|
|
|
@Override
|
|
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
|
|
int cameraid = 0;
|
|
GLES20.glDisable(GL10.GL_DITHER);
|
|
GLES20.glClearColor(0, 0, 0, 0);
|
|
String id = curItem.getId();
|
|
magicFilter = new GPUImageFilter(MagicFilterType.NONE);
|
|
try {
|
|
cameraid = Integer.parseInt(id);
|
|
} catch (Exception e) {
|
|
cameraid = 0;
|
|
}
|
|
magicFilter.setOrtation(cameraid);
|
|
magicFilter.init(getContext().getApplicationContext());
|
|
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
|
|
|
|
mOESTextureId = OpenGLUtils.getExternalOESTextureID();
|
|
surfaceTexture = new SurfaceTexture(mOESTextureId);
|
|
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
|
|
@Override
|
|
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
|
|
requestRender();
|
|
}
|
|
});
|
|
// For camera preview on activity creation
|
|
}
|
|
|
|
@Override
|
|
public void onSurfaceChanged(GL10 gl, int width, int height) {
|
|
GLES20.glViewport(0, 0, width, height);
|
|
mSurfaceWidth = width;
|
|
mSurfaceHeight = height;
|
|
magicFilter.onDisplaySizeChanged(width, height);
|
|
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
|
|
|
|
mOutputAspectRatio = width > height ? (float) width / height : (float) height / width;
|
|
float aspectRatio = mOutputAspectRatio / mInputAspectRatio;
|
|
if (width > height) {
|
|
Matrix.orthoM(mProjectionMatrix, 0, -1.0f, 1.0f, -aspectRatio, aspectRatio, -1.0f, 1.0f);
|
|
} else {
|
|
Matrix.orthoM(mProjectionMatrix, 0, -aspectRatio, aspectRatio, -1.0f, 1.0f, -1.0f, 1.0f);
|
|
}
|
|
}
|
|
|
|
@Override
|
|
public void onDrawFrame(GL10 gl) {
|
|
if (mSurfaceWidth != mPreviewWidth || mSurfaceHeight != mPreviewHeight) {
|
|
Log.e(TAG, String.format("Surface dimensions differ from Preview. May be a buffer overflow. Surface: %dx%d, Preview: %dx%d ", mSurfaceWidth, mSurfaceHeight, mPreviewWidth, mPreviewHeight));
|
|
return;
|
|
}
|
|
//清除屏幕颜色缓冲区
|
|
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
|
|
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
|
|
|
|
surfaceTexture.updateTexImage();
|
|
|
|
surfaceTexture.getTransformMatrix(mSurfaceMatrix);
|
|
Matrix.multiplyMM(mTransformMatrix, 0, mSurfaceMatrix, 0, mProjectionMatrix, 0);
|
|
magicFilter.setTextureTransformMatrix(mTransformMatrix);
|
|
magicFilter.onDrawFrame(mOESTextureId);
|
|
|
|
if (mIsEncoding) {
|
|
mGLIntBufferCache.add(magicFilter.getGLFboBuffer());
|
|
synchronized (writeLock) {
|
|
writeLock.notifyAll();
|
|
}
|
|
}
|
|
}
|
|
|
|
public void setPreviewCallback(PreviewCallback cb) {
|
|
mPrevCb = cb;
|
|
}
|
|
|
|
|
|
//选择sizeMap中大于并且接近width和height的size
|
|
private Size getOptimalSize(Size[] outputSizes, int width, int height) {
|
|
Size tempSize = new Size(width, height);
|
|
List<Size> sizes = new ArrayList<>();
|
|
for (Size outputSize : outputSizes) {
|
|
if (width > height) {
|
|
//横屏的时候
|
|
if (outputSize.getHeight() > height && outputSize.getWidth() > width) {
|
|
sizes.add(outputSize);
|
|
}
|
|
} else {
|
|
//竖屏的时候
|
|
if (outputSize.getWidth() > height && outputSize.getHeight() > width) {
|
|
sizes.add(outputSize);
|
|
}
|
|
}
|
|
}
|
|
if (sizes.size() > 0) {
|
|
//如果有多个符合条件找到一个差距最小的,最接近预览分辨率的
|
|
tempSize = sizes.get(0);
|
|
int minnum = 999999;
|
|
for (Size size : sizes) {
|
|
int num = size.getHeight() * size.getHeight() - width * height;
|
|
if (num < minnum) {
|
|
minnum = num;
|
|
tempSize = size;
|
|
}
|
|
}
|
|
}
|
|
return tempSize;
|
|
/*if (sizes.size() > 0) {
|
|
return Collections.min(sizes, new Comparator<Size>() {
|
|
@Override
|
|
public int compare(Size size, Size t1) {
|
|
return Long.signum(size.getWidth() * size.getHeight() - t1.getWidth() * t1.getHeight());
|
|
}
|
|
});
|
|
}
|
|
return outputSizes[0];*/
|
|
|
|
}
|
|
|
|
|
|
public int[] setPreviewResolution(int width, int height) {
|
|
mPreviewWidth = width;
|
|
mPreviewHeight = height;
|
|
getHolder().setFixedSize(mPreviewWidth, mPreviewHeight);
|
|
mGLPreviewBuffer = ByteBuffer.allocate(mPreviewWidth * mPreviewHeight * 4);
|
|
mInputAspectRatio = mPreviewWidth > mPreviewHeight ? (float) mPreviewWidth / mPreviewHeight : (float) mPreviewHeight / mPreviewWidth;
|
|
return new int[]{mPreviewWidth, mPreviewHeight};
|
|
}
|
|
|
|
public boolean setFilter(final MagicFilterType type) {
|
|
|
|
queueEvent(new Runnable() {
|
|
@Override
|
|
public void run() {
|
|
if (magicFilter != null) {
|
|
magicFilter.destroy();
|
|
}
|
|
magicFilter = MagicFilterFactory.initFilters(type);
|
|
if (magicFilter != null) {
|
|
magicFilter.init(getContext().getApplicationContext());
|
|
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
|
|
magicFilter.onDisplaySizeChanged(mSurfaceWidth, mSurfaceHeight);
|
|
}
|
|
}
|
|
});
|
|
requestRender();
|
|
return true;
|
|
}
|
|
|
|
private void deleteTextures() {
|
|
if (mOESTextureId != OpenGLUtils.NO_TEXTURE) {
|
|
queueEvent(new Runnable() {
|
|
@Override
|
|
public void run() {
|
|
GLES20.glDeleteTextures(1, new int[]{mOESTextureId}, 0);
|
|
mOESTextureId = OpenGLUtils.NO_TEXTURE;
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
public void setCameraId(CameraItemData item) {
|
|
stopTorch();
|
|
curItem = item;
|
|
setPreviewOrientation(mPreviewOrientation);
|
|
}
|
|
|
|
protected int getRotateDeg() {
|
|
try {
|
|
int rotate = ((Activity) getContext()).getWindowManager().getDefaultDisplay().getRotation();
|
|
switch (rotate) {
|
|
case Surface.ROTATION_0:
|
|
return 0;
|
|
case Surface.ROTATION_90:
|
|
return 90;
|
|
case Surface.ROTATION_180:
|
|
return 180;
|
|
case Surface.ROTATION_270:
|
|
return 270;
|
|
}
|
|
} catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
public void setPreviewOrientation(int orientation) {
|
|
mPreviewOrientation = orientation;
|
|
}
|
|
|
|
public int getCameraId() {
|
|
return mCamId;
|
|
}
|
|
|
|
public void enableEncoding() {
|
|
worker = new Thread(new Runnable() {
|
|
@Override
|
|
public void run() {
|
|
while (!Thread.interrupted()) {
|
|
while (!mGLIntBufferCache.isEmpty()) {
|
|
try {
|
|
IntBuffer picture = mGLIntBufferCache.poll();
|
|
mGLPreviewBuffer.asIntBuffer().put(picture.array());
|
|
mPrevCb.onGetRgbaFrame(mGLPreviewBuffer.array(), mPreviewWidth, mPreviewHeight);
|
|
} catch (Exception e) {
|
|
cameraCallbacksHandler.onError(e);
|
|
e.printStackTrace();
|
|
worker.interrupt();
|
|
break;
|
|
}
|
|
|
|
}
|
|
// Waiting for next frame
|
|
synchronized (writeLock) {
|
|
try {
|
|
// isEmpty() may take some time, so we set timeout to detect next frame
|
|
writeLock.wait(500);
|
|
} catch (InterruptedException ie) {
|
|
worker.interrupt();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
});
|
|
worker.start();
|
|
mIsEncoding = true;
|
|
}
|
|
|
|
public void disableEncoding() {
|
|
mIsEncoding = false;
|
|
mGLIntBufferCache.clear();
|
|
if (mGLPreviewBuffer != null) {
|
|
mGLPreviewBuffer.clear();
|
|
}
|
|
|
|
if (worker != null) {
|
|
worker.interrupt();
|
|
try {
|
|
worker.join();
|
|
} catch (InterruptedException e) {
|
|
e.printStackTrace();
|
|
worker.interrupt();
|
|
}
|
|
worker = null;
|
|
}
|
|
}
|
|
|
|
//相机状态变化时会调用这里的回调函数
|
|
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
|
|
@Override
|
|
public void onOpened(CameraDevice camera) {
|
|
//相机打开时执行
|
|
Log.e(TAG, "onOpened");
|
|
mCameraDevice = camera;
|
|
//创建相机预览会话
|
|
createCameraPreviewSession();
|
|
}
|
|
|
|
@Override
|
|
public void onDisconnected(CameraDevice camera) {
|
|
//相机链接断开
|
|
Log.e("dsdsfd", "fdsdf");
|
|
|
|
}
|
|
|
|
@Override
|
|
public void onError(CameraDevice camera, int error) {
|
|
Log.e("dsdsfd", "fdsdf");
|
|
}
|
|
};
|
|
|
|
private void createCameraPreviewSession() {
|
|
// SurfaceTexture surfaceTexture=new SurfaceTexture();
|
|
// assert surfaceTexture!=null;
|
|
//
|
|
surfaceTexture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
|
|
//预览的输出画面
|
|
Surface surface = new Surface(surfaceTexture);
|
|
try {
|
|
//预览请求
|
|
captureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
|
|
captureRequestBuilder.addTarget(surface);
|
|
mCameraDevice.createCaptureSession(Arrays.asList(surface, imageReader.getSurface()), new CameraCaptureSession.StateCallback() {
|
|
@Override
|
|
public void onConfigured(CameraCaptureSession session) {
|
|
if (mCameraDevice == null) {
|
|
return;
|
|
}
|
|
cameraCaptureSession = session;
|
|
updatePreview();
|
|
}
|
|
|
|
@Override
|
|
public void onConfigureFailed(CameraCaptureSession session) {
|
|
Log.e("dkkd", "fdsa");
|
|
}
|
|
}, null);
|
|
} catch (CameraAccessException e) {
|
|
throw new RuntimeException(e);
|
|
}
|
|
|
|
|
|
}
|
|
|
|
private void updatePreview() {
|
|
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
|
|
CaptureRequest captureRequest = captureRequestBuilder.build();
|
|
|
|
try {
|
|
cameraCaptureSession.setRepeatingRequest(captureRequest, null, null);
|
|
} catch (CameraAccessException e) {
|
|
throw new RuntimeException(e);
|
|
}
|
|
}
|
|
|
|
public void startCamera() {
|
|
try {
|
|
if (curItem == null) {
|
|
curItem = itemlist.get(0);
|
|
}
|
|
//通过cameraId获取Camera参数
|
|
String cameraId = curItem.getId();
|
|
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
|
|
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
|
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
|
|
imageReader = ImageReader.newInstance(640, 480, ImageFormat.YUV_420_888, 10);
|
|
// imageReader.setOnImageAvailableListener(onImageAvailableListener,null);
|
|
if (ActivityCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
|
|
return;
|
|
}
|
|
cameraManager.openCamera(cameraId, stateCallback, null);
|
|
|
|
} catch (CameraAccessException e) {
|
|
throw new RuntimeException(e);
|
|
}
|
|
}
|
|
|
|
public void stopCamera() {
|
|
disableEncoding();
|
|
stopTorch();
|
|
if (cameraCaptureSession != null) {
|
|
cameraCaptureSession.close();
|
|
cameraCaptureSession = null;
|
|
}
|
|
if (mCameraDevice != null) {
|
|
mCameraDevice.close();
|
|
mCameraDevice = null;
|
|
}
|
|
}
|
|
|
|
public List<CameraItemData> initCameraData() {
|
|
itemlist = new ArrayList();
|
|
cameraManager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);//获取摄像头管理类
|
|
try {
|
|
cameraNames = cameraManager.getCameraIdList();//获取摄像头名称列表
|
|
} catch (CameraAccessException e) {
|
|
throw new RuntimeException(e);
|
|
}
|
|
|
|
for (String cameraName : cameraNames) {
|
|
CameraItemData itemData = new CameraItemData();
|
|
itemData.setId(cameraName);//摄像头编号
|
|
|
|
CameraCharacteristics characteristics = null;
|
|
try {
|
|
characteristics = cameraManager.getCameraCharacteristics(cameraName);//获取摄像头各种特性
|
|
} catch (CameraAccessException e) {
|
|
throw new RuntimeException(e);
|
|
}
|
|
int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
|
itemData.setSensorOrientation(sensorOrientation);//摄像头朝向
|
|
|
|
|
|
Range<Integer>[] ranges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
|
|
itemData.setFPS_RANGES(ranges);//
|
|
|
|
Long aLong = characteristics.get(CameraCharacteristics.SENSOR_INFO_MAX_FRAME_DURATION);
|
|
itemData.setMAX_FRAME_DURATION(aLong);
|
|
|
|
int lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING);
|
|
itemData.setLENS_FACING(lensFacing);
|
|
|
|
switch (lensFacing) {
|
|
case CameraMetadata.LENS_FACING_FRONT:
|
|
Log.e("lensFacing(前后摄):", "front");
|
|
break;
|
|
case CameraMetadata.LENS_FACING_BACK:
|
|
Log.e("lensFacing(前后摄):", "back");
|
|
break;
|
|
case CameraMetadata.LENS_FACING_EXTERNAL:
|
|
Log.e("lensFacing(前后摄):", "external");
|
|
break;
|
|
}
|
|
|
|
List<int[]> list = new ArrayList<>();
|
|
Size[] supportedSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.YUV_420_888);
|
|
for (Size size : supportedSizes) {
|
|
int width = size.getWidth();
|
|
int height = size.getHeight();
|
|
// 在这里处理支持的分辨率格式
|
|
int[] temp = {width, height};
|
|
list.add(temp);
|
|
}
|
|
itemData.setSupportedSizes(list);//摄像头输出格式
|
|
itemlist.add(itemData);
|
|
}
|
|
return itemlist;
|
|
}
|
|
|
|
|
|
public List<CameraItemData> getCameraData() {
|
|
return itemlist;
|
|
}
|
|
|
|
private int[] adaptFpsRange(int expectedFps, List<int[]> fpsRanges) {
|
|
expectedFps *= 1000;
|
|
int[] closestRange = fpsRanges.get(0);
|
|
int measure = Math.abs(closestRange[0] - expectedFps) + Math.abs(closestRange[1] - expectedFps);
|
|
for (int[] range : fpsRanges) {
|
|
if (range[0] <= expectedFps && range[1] >= expectedFps) {
|
|
int curMeasure = Math.abs(range[0] - expectedFps) + Math.abs(range[1] - expectedFps);
|
|
if (curMeasure < measure) {
|
|
closestRange = range;
|
|
measure = curMeasure;
|
|
}
|
|
}
|
|
}
|
|
return closestRange;
|
|
}
|
|
|
|
public void setOrtation(int cameraid) {
|
|
magicFilter.setOrtation(cameraid);
|
|
}
|
|
|
|
public void stopTorch() {
|
|
// if (mCamera != null) {
|
|
// try {
|
|
// Camera.Parameters params = mCamera.getParameters();
|
|
// params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
|
|
// mCamera.setParameters(params);
|
|
// } catch (Exception e) {
|
|
// e.printStackTrace();
|
|
// }
|
|
// }
|
|
}
|
|
|
|
public interface PreviewCallback {
|
|
|
|
void onGetRgbaFrame(byte[] data, int width, int height);
|
|
}
|
|
|
|
static public class CameraCallbacksHandler implements CameraCallbacks {
|
|
|
|
@Override
|
|
public void onCameraParameters(Camera.Parameters params) {
|
|
|
|
}
|
|
|
|
@Override
|
|
public void onError(Exception e) {
|
|
//stop publish
|
|
}
|
|
|
|
}
|
|
|
|
public interface CameraCallbacks {
|
|
void onCameraParameters(Camera.Parameters params);
|
|
|
|
void onError(Exception e);
|
|
}
|
|
|
|
public void setCameraCallbacksHandler(CameraCallbacksHandler cameraCallbacksHandler) {
|
|
this.cameraCallbacksHandler = cameraCallbacksHandler;
|
|
}
|
|
}
|