Camera2 修改屏幕角度

master
liuguijing 8 months ago
parent 53d1f21764
commit a76456bb64

@ -1,14 +1,14 @@
package com.seu.magicfilter.base;
import java.nio.FloatBuffer;
import java.util.List;
import android.content.Context;
import android.opengl.GLES20;
import com.seu.magicfilter.base.gpuimage.GPUImageFilter;
import com.seu.magicfilter.utils.OpenGLUtils;
import android.content.Context;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import java.util.List;
public class MagicBaseGroupFilter extends GPUImageFilter {

@ -61,6 +61,8 @@ public class GPUImageFilter {
private int[] mGLFboId;
private int[] mGLFboTexId;
private IntBuffer mGLFboBuffer;
private float[] TEX_COORD;
private int curCameraid;
public GPUImageFilter() {
this(MagicFilterType.NONE);
@ -83,6 +85,7 @@ public class GPUImageFilter {
public void init(Context context) {
mContext = context;
// curCameraid = cameraid;
onInit();
onInitialized();
}
@ -119,36 +122,25 @@ public class GPUImageFilter {
}
private void loadSamplerShader() {
mGLProgId = OpenGLUtils.loadProgram(OpenGLUtils.readShaderFromRawResource(getContext(), mVertexShaderId),
OpenGLUtils.readShaderFromRawResource(getContext(), mFragmentShaderId));
mGLProgId = OpenGLUtils.loadProgram(OpenGLUtils.readShaderFromRawResource(getContext(), mVertexShaderId), OpenGLUtils.readShaderFromRawResource(getContext(), mFragmentShaderId));
mGLPositionIndex = GLES20.glGetAttribLocation(mGLProgId, "position");
mGLTextureCoordinateIndex = GLES20.glGetAttribLocation(mGLProgId,"inputTextureCoordinate");
mGLTextureCoordinateIndex = GLES20.glGetAttribLocation(mGLProgId, "inputTextureCoordinate");
mGLTextureTransformIndex = GLES20.glGetUniformLocation(mGLProgId, "textureTransform");
mGLInputImageTextureIndex = GLES20.glGetUniformLocation(mGLProgId, "inputImageTexture");
}
private void initVbo() {
final float VEX_CUBE[] = {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
//初始化 顶点着色器
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
};
final float TEX_COORD[] = {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
};
mGLCubeBuffer = ByteBuffer.allocateDirect(VEX_CUBE.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
setOrtation(curCameraid);
mGLCubeBuffer = ByteBuffer.allocateDirect(VEX_CUBE.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mGLCubeBuffer.put(VEX_CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEX_COORD.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mGLTextureBuffer.put(TEX_COORD).position(0);
mGLCubeId = new int[1];
mGLTextureCoordinateId = new int[1];
@ -157,9 +149,7 @@ public class GPUImageFilter {
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLCubeId[0]);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mGLCubeBuffer.capacity() * 4, mGLCubeBuffer, GLES20.GL_STATIC_DRAW);
GLES20.glGenBuffers(1, mGLTextureCoordinateId, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLTextureCoordinateId[0]);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mGLTextureBuffer.capacity() * 4, mGLTextureBuffer, GLES20.GL_STATIC_DRAW);
initTextures();
}
private void destoryVbo() {
@ -255,6 +245,8 @@ public class GPUImageFilter {
GLES20.glEnableVertexAttribArray(mGLPositionIndex);
GLES20.glVertexAttribPointer(mGLPositionIndex, 2, GLES20.GL_FLOAT, false, 4 * 2, 0);
initTextures();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLTextureCoordinateId[0]);
GLES20.glEnableVertexAttribArray(mGLTextureCoordinateIndex);
GLES20.glVertexAttribPointer(mGLTextureCoordinateIndex, 2, GLES20.GL_FLOAT, false, 4 * 2, 0);
@ -288,16 +280,28 @@ public class GPUImageFilter {
return mGLFboTexId[0];
}
protected void onDrawArraysPre() {}
//初始化纹理坐标
private void initTextures() {
mGLTextureBuffer = ByteBuffer.allocateDirect(TEX_COORD.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mGLTextureBuffer.put(TEX_COORD).position(0);
GLES20.glGenBuffers(1, mGLTextureCoordinateId, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLTextureCoordinateId[0]);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mGLTextureBuffer.capacity() * 4, mGLTextureBuffer, GLES20.GL_STATIC_DRAW);
}
protected void onDrawArraysPre() {
}
protected void onDrawArraysAfter() {
}
protected void onDrawArraysAfter() {}
private void runPendingOnDrawTasks() {
while (!mRunOnDraw.isEmpty()) {
mRunOnDraw.removeFirst().run();
}
}
public int getProgram() {
return mGLProgId;
}
@ -313,8 +317,8 @@ public class GPUImageFilter {
protected MagicFilterType getFilterType() {
return mType;
}
public void setTextureTransformMatrix(float[] mtx){
public void setTextureTransformMatrix(float[] mtx) {
mGLTextureTransformMatrix = mtx;
}
@ -410,5 +414,52 @@ public class GPUImageFilter {
mRunOnDraw.addLast(runnable);
}
}
//设置纹理旋转角度
public void setOrtation(int i) {
//纹理坐标
TEX_COORD = new float[]{0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
};
final float TEX_COORD_270[] = {0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
};
final float TEX_COORD0[] = {
//1号摄像头 正
0.0f, 1.0f, // Bottom left.
0.0f, 0.0f, // Bottom right.
1.0f, 1.0f, // Top left.
1.0f, 0.0f // Top right.
};
final float TEX_COORD_90[] = {
//二号摄像头正
1.0f, 1.0f, // Bottom left.
0.0f, 1.0f, // Bottom right.
1.0f, 0.0f, // Top left.
0.0f, 0.0f // Top right.
};
final float TEX_COORD_180[] = {
//三号摄像头正
1.0f, 0.0f, // Bottom left.
1.0f, 1.0f, // Bottom right.
0.0f, 0.0f, // Top left.
0.0f, 1.0f // Top right.
};
if (i == 0) {
TEX_COORD = TEX_COORD0;
} else if (i == 1) {
TEX_COORD = TEX_COORD_90;
} else if (i == 2) {
TEX_COORD = TEX_COORD_180;
} else if (i == 3) {
TEX_COORD = TEX_COORD_270;
}
}
}

@ -1,5 +1,6 @@
package net.ossrs.yasea;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
@ -19,6 +20,7 @@ import android.media.ImageReader;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.support.v4.app.ActivityCompat;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Range;
@ -95,10 +97,17 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
int cameraid = 0;
GLES20.glDisable(GL10.GL_DITHER);
GLES20.glClearColor(0, 0, 0, 0);
String id = curItem.getId();
magicFilter = new GPUImageFilter(MagicFilterType.NONE);
try {
cameraid = Integer.parseInt(id);
} catch (Exception e) {
cameraid = 0;
}
magicFilter.setOrtation(cameraid);
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
@ -132,17 +141,11 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
@Override
public void onDrawFrame(GL10 gl) {
if (mSurfaceWidth != mPreviewWidth || mSurfaceHeight != mPreviewHeight) {
//May be a buffer overflow in enableEncoding()
//mPreviewWidth changed but onSurfaceCreated fired after enable encoding (mIsEncoding == true)
//could be calling magicFilter.onInputSizeChanged(width, height) in setPreviewResolution() after changing mGLPreviewBuffer?
//or start the encoder only after onSurfaceCreated ...
Log.e(TAG, String.format("Surface dimensions differ from Preview. May be a buffer overflow. Surface: %dx%d, Preview: %dx%d ", mSurfaceWidth, mSurfaceHeight, mPreviewWidth, mPreviewHeight));
return;
}
//清除屏幕颜色缓冲区
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
@ -210,24 +213,11 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
public int[] setPreviewResolution(int width, int height) {
// mCamera = openCamera();
mPreviewWidth = width;
mPreviewHeight = height;
// Camera.Size rs = adaptPreviewResolution(mCamera.new Size(width, height));
// if (rs != null) {
// mPreviewWidth = rs.width;
// mPreviewHeight = rs.height;
// }
getHolder().setFixedSize(mPreviewWidth, mPreviewHeight);
// mCamera.getParameters().setPreviewSize(mPreviewWidth, mPreviewHeight);
mGLPreviewBuffer = ByteBuffer.allocate(mPreviewWidth * mPreviewHeight * 4);
mInputAspectRatio = mPreviewWidth > mPreviewHeight ? (float) mPreviewWidth / mPreviewHeight : (float) mPreviewHeight / mPreviewWidth;
return new int[]{mPreviewWidth, mPreviewHeight};
}
@ -291,7 +281,6 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
public void setPreviewOrientation(int orientation) {
mPreviewOrientation = orientation;
setRotation(180);
}
public int getCameraId() {
@ -431,8 +420,9 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
imageReader = ImageReader.newInstance(640, 480, ImageFormat.YUV_420_888, 10);
// imageReader.setOnImageAvailableListener(onImageAvailableListener,null);
// if (Activity.checkSelfPermission(getContext(), android.Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// }
if (ActivityCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
cameraManager.openCamera(cameraId, stateCallback, null);
} catch (CameraAccessException e) {
@ -533,6 +523,10 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
return closestRange;
}
public void setOrtation(int cameraid) {
magicFilter.setOrtation(cameraid);
}
public void stopTorch() {
// if (mCamera != null) {
// try {

@ -312,6 +312,13 @@ public class SrsPublisher {
public void switchCameraFace(int id) {
List<CameraItemData> cameraData = mCameraView.getCameraData();
CameraItemData item = cameraData.get(id);
int cameraid = 0;
try {
cameraid = Integer.parseInt( item.getId());
} catch (Exception e) {
cameraid = 0;
}
mCameraView.setOrtation(cameraid);
if (mEncoder != null && mEncoder.isEnabled()) {
mEncoder.pause();

Loading…
Cancel
Save