Camera2 修改屏幕角度

camera2
liuguijing 8 months ago
parent eaeab28829
commit 11f8cfdbdd

@ -17,7 +17,6 @@ import android.support.v7.app.AppCompatActivity;
import android.text.TextUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
@ -26,7 +25,6 @@ import android.widget.Toast;
import com.dev.devapi.api.SysApi;
import com.github.faucamp.simplertmp.RtmpHandler;
import com.seu.magicfilter.utils.MagicFilterType;
import net.ossrs.yasea.CameraItemData;
import net.ossrs.yasea.SrsCameraView;
@ -143,12 +141,35 @@ public class MainActivity extends AppCompatActivity implements RtmpHandler.RtmpL
}
mCameraView = (SrsCameraView) findViewById(R.id.glsurfaceview_camera);
// mCameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
// @Override
// public void surfaceCreated(SurfaceHolder holder) {
// Canvas canvas = holder.lockCanvas();
// if (canvas != null) {
// canvas.drawColor(Color.WHITE);
// canvas.rotate(45, canvas.getWidth() / 2, canvas.getHeight() / 2);
// // 在旋转后的Canvas上绘制内容
// holder.unlockCanvasAndPost(canvas);
// }
// }
//
// @Override
// public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
//
// }
//
// @Override
// public void surfaceDestroyed(SurfaceHolder holder) {
//
// }
// });
int rotation = intent.getIntExtra("rotation", -1);
if (rotation != -1) {
//设置图像显示方向
mCameraView.setPreviewOrientation(rotation);
}
cameraData = mCameraView.getCameraData();
int size = cameraData.size();
if (size == 0) {
Toast.makeText(getApplicationContext(), "没有查询到摄像头", Toast.LENGTH_SHORT).show();
@ -252,6 +273,9 @@ public class MainActivity extends AppCompatActivity implements RtmpHandler.RtmpL
if (size > 0) {
int i = (++cameraId) % size;
mPublisher.switchCameraFace(i);
}
}
});
@ -294,71 +318,6 @@ public class MainActivity extends AppCompatActivity implements RtmpHandler.RtmpL
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
} else {
switch (id) {
case R.id.cool_filter:
mPublisher.switchCameraFilter(MagicFilterType.COOL);
break;
case R.id.beauty_filter:
mPublisher.switchCameraFilter(MagicFilterType.BEAUTY);
break;
case R.id.early_bird_filter:
mPublisher.switchCameraFilter(MagicFilterType.EARLYBIRD);
break;
case R.id.evergreen_filter:
mPublisher.switchCameraFilter(MagicFilterType.EVERGREEN);
break;
case R.id.n1977_filter:
mPublisher.switchCameraFilter(MagicFilterType.N1977);
break;
case R.id.nostalgia_filter:
mPublisher.switchCameraFilter(MagicFilterType.NOSTALGIA);
break;
case R.id.romance_filter:
mPublisher.switchCameraFilter(MagicFilterType.ROMANCE);
break;
case R.id.sunrise_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNRISE);
break;
case R.id.sunset_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNSET);
break;
case R.id.tender_filter:
mPublisher.switchCameraFilter(MagicFilterType.TENDER);
break;
case R.id.toast_filter:
mPublisher.switchCameraFilter(MagicFilterType.TOASTER2);
break;
case R.id.valencia_filter:
mPublisher.switchCameraFilter(MagicFilterType.VALENCIA);
break;
case R.id.walden_filter:
mPublisher.switchCameraFilter(MagicFilterType.WALDEN);
break;
case R.id.warm_filter:
mPublisher.switchCameraFilter(MagicFilterType.WARM);
break;
case R.id.original_filter:
default:
mPublisher.switchCameraFilter(MagicFilterType.NONE);
break;
}
}
setTitle(item.getTitle());
return super.onOptionsItemSelected(item);
}
@Override
protected void onStart() {
super.onStart();

@ -14,10 +14,10 @@ buildscript {
}
allprojects {
// repositories {
// jcenter()
// google()
// }
repositories {
jcenter()
google()
}
}
task clean(type: Delete) {

@ -2,12 +2,12 @@ package com.seu.magicfilter.advanced;
import android.opengl.GLES20;
import com.seu.magicfilter.utils.MagicFilterType;
import net.ossrs.yasea.R;
import com.seu.magicfilter.base.gpuimage.GPUImageFilter;
import com.seu.magicfilter.utils.MagicFilterType;
import com.seu.magicfilter.utils.OpenGLUtils;
import net.ossrs.yasea.R;
public class MagicAmaroFilter extends GPUImageFilter{
private int[] inputTextureHandles = {-1,-1,-1};
private int[] inputTextureUniformLocations = {-1,-1,-1};

@ -1,14 +1,14 @@
package com.seu.magicfilter.base;
import java.nio.FloatBuffer;
import java.util.List;
import android.content.Context;
import android.opengl.GLES20;
import com.seu.magicfilter.base.gpuimage.GPUImageFilter;
import com.seu.magicfilter.utils.OpenGLUtils;
import android.content.Context;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import java.util.List;
public class MagicBaseGroupFilter extends GPUImageFilter {
@ -31,9 +31,9 @@ public class MagicBaseGroupFilter extends GPUImageFilter {
}
@Override
public void init(Context context) {
public void init(Context context, int cameraid) {
for (GPUImageFilter filter : filters) {
filter.init(context);
filter.init(context, cameraid);
}
}

@ -61,6 +61,8 @@ public class GPUImageFilter {
private int[] mGLFboId;
private int[] mGLFboTexId;
private IntBuffer mGLFboBuffer;
private float[] TEX_COORD;
private int curCameraid;
public GPUImageFilter() {
this(MagicFilterType.NONE);
@ -81,8 +83,9 @@ public class GPUImageFilter {
mFragmentShaderId = fragmentShaderId;
}
public void init(Context context) {
public void init(Context context, int cameraid) {
mContext = context;
curCameraid = cameraid;
onInit();
onInitialized();
}
@ -127,21 +130,15 @@ public class GPUImageFilter {
mGLInputImageTextureIndex = GLES20.glGetUniformLocation(mGLProgId, "inputImageTexture");
}
private void initVbo() {
public void initVbo() {
final float VEX_CUBE[] = {
//初始化 顶点着色器
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
};
final float TEX_COORD[] = {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
};
setOrtation(curCameraid);
mGLCubeBuffer = ByteBuffer.allocateDirect(VEX_CUBE.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mGLCubeBuffer.put(VEX_CUBE).position(0);
@ -162,6 +159,12 @@ public class GPUImageFilter {
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mGLTextureBuffer.capacity() * 4, mGLTextureBuffer, GLES20.GL_STATIC_DRAW);
}
public void initTExt(int i) {
setOrtation(i);
// GLES20.glGenBuffers(1, mGLTextureCoordinateId, 0);
// GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLTextureCoordinateId[0]);
// GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mGLTextureBuffer.capacity() * 4, mGLTextureBuffer, GLES20.GL_STATIC_DRAW);
}
private void destoryVbo() {
if (mGLCubeId != null) {
GLES20.glDeleteBuffers(1, mGLCubeId, 0);
@ -251,10 +254,19 @@ public class GPUImageFilter {
GLES20.glUseProgram(mGLProgId);
runPendingOnDrawTasks();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLCubeId[0]);
GLES20.glEnableVertexAttribArray(mGLPositionIndex);
GLES20.glVertexAttribPointer(mGLPositionIndex, 2, GLES20.GL_FLOAT, false, 4 * 2, 0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEX_COORD.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mGLTextureBuffer.put(TEX_COORD).position(0);
GLES20.glGenBuffers(1, mGLTextureCoordinateId, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLTextureCoordinateId[0]);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mGLTextureBuffer.capacity() * 4, mGLTextureBuffer, GLES20.GL_STATIC_DRAW);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLTextureCoordinateId[0]);
GLES20.glEnableVertexAttribArray(mGLTextureCoordinateIndex);
GLES20.glVertexAttribPointer(mGLTextureCoordinateIndex, 2, GLES20.GL_FLOAT, false, 4 * 2, 0);
@ -288,9 +300,11 @@ public class GPUImageFilter {
return mGLFboTexId[0];
}
protected void onDrawArraysPre() {}
protected void onDrawArraysPre() {
}
protected void onDrawArraysAfter() {}
protected void onDrawArraysAfter() {
}
private void runPendingOnDrawTasks() {
while (!mRunOnDraw.isEmpty()) {
@ -410,5 +424,54 @@ public class GPUImageFilter {
mRunOnDraw.addLast(runnable);
}
}
//设置纹理旋转角度
public void setOrtation(int i) {
//纹理坐标
TEX_COORD = new float[]{
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
};
final float TEX_COORD_270[] = {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
};
final float TEX_COORD0[] = {
//1号摄像头 正
0.0f, 1.0f, // Bottom left.
0.0f, 0.0f, // Bottom right.
1.0f, 1.0f, // Top left.
1.0f, 0.0f // Top right.
};
final float TEX_COORD_90[] = {
//二号摄像头正
1.0f, 1.0f, // Bottom left.
0.0f, 1.0f, // Bottom right.
1.0f, 0.0f, // Top left.
0.0f, 0.0f // Top right.
};
final float TEX_COORD_180[] = {
//三号摄像头正
1.0f, 0.0f, // Bottom left.
1.0f, 1.0f, // Bottom right.
0.0f, 0.0f, // Top left.
0.0f, 1.0f // Top right.
};
if (i == 0) {
TEX_COORD = TEX_COORD0;
} else if (i == 1) {
TEX_COORD = TEX_COORD_90;
} else if (i == 2) {
TEX_COORD = TEX_COORD_180;
} else if (i == 3) {
TEX_COORD = TEX_COORD_270;
}
}
}

@ -0,0 +1,19 @@
package net.ossrs.yasea;
import java.util.LinkedList;
public class PendingThreadAider {
LinkedList<Runnable> mRunOnDraw = new LinkedList<Runnable>();
public void runPendings() {
while (!mRunOnDraw.isEmpty()) {
mRunOnDraw.removeFirst().run();
}
}
public void addToPending(final Runnable runnable) {
synchronized (mRunOnDraw) {
mRunOnDraw.addLast(runnable);
}
}
}

@ -62,6 +62,7 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
private float[] mProjectionMatrix = new float[16];
private float[] mSurfaceMatrix = new float[16];
private float[] mTransformMatrix = new float[16];
private ByteBuffer mGLPreviewBuffer;
private int mCamId = -1;
private int mPreviewRotation = 90;
@ -81,6 +82,9 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
private String[] cameraNames;//摄像头名称列表
private CameraItemData curItem;//当前选中的摄像头参数
private ArrayList<CameraItemData> itemlist = new ArrayList<>();
private float[] rotationMatrix = new float[16];
private float[] vPMatrix = new float[16];
;
public SrsCameraView(Context context) {
this(context, null);
@ -91,17 +95,24 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
initCameraData();
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
int cameraid = 0;
GLES20.glDisable(GL10.GL_DITHER);
GLES20.glClearColor(0, 0, 0, 0);
String id = curItem.getId();
magicFilter = new GPUImageFilter(MagicFilterType.NONE);
magicFilter.init(getContext().getApplicationContext());
try {
cameraid = Integer.parseInt(id);
} catch (Exception e) {
cameraid = 0;
}
// magicFilter.setOrtation(cameraid);
magicFilter.init(getContext().getApplicationContext(),cameraid);
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
mOESTextureId = OpenGLUtils.getExternalOESTextureID();
@ -122,7 +133,6 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
mSurfaceHeight = height;
magicFilter.onDisplaySizeChanged(width, height);
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
mOutputAspectRatio = width > height ? (float) width / height : (float) height / width;
float aspectRatio = mOutputAspectRatio / mInputAspectRatio;
if (width > height) {
@ -134,33 +144,38 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
@Override
public void onDrawFrame(GL10 gl) {
if (mSurfaceWidth != mPreviewWidth || mSurfaceHeight != mPreviewHeight) {
//May be a buffer overflow in enableEncoding()
//mPreviewWidth changed but onSurfaceCreated fired after enable encoding (mIsEncoding == true)
//could be calling magicFilter.onInputSizeChanged(width, height) in setPreviewResolution() after changing mGLPreviewBuffer?
//or start the encoder only after onSurfaceCreated ...
Log.e(TAG, String.format("Surface dimensions differ from Preview. May be a buffer overflow. Surface: %dx%d, Preview: %dx%d ", mSurfaceWidth, mSurfaceHeight, mPreviewWidth, mPreviewHeight));
return;
}
//清除屏幕颜色缓冲区
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSurfaceMatrix);
Matrix.multiplyMM(mTransformMatrix, 0, mSurfaceMatrix, 0, mProjectionMatrix, 0);
magicFilter.setTextureTransformMatrix(mTransformMatrix);
magicFilter.onDrawFrame(mOESTextureId);
if (mIsEncoding) {
mGLIntBufferCache.add(magicFilter.getGLFboBuffer());
synchronized (writeLock) {
writeLock.notifyAll();
}
}
}
public int computeRelativeRotation(CameraCharacteristics characteristics, int surfaceRotationDegrees) {
Integer sensorOrientationDegrees = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Reverse device orientation for back-facing cameras.
int sign = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT ? 1 : -1;
// Calculate desired orientation relative to camera orientation to make
// the image upright relative to the device orientation.
return (sensorOrientationDegrees - surfaceRotationDegrees * sign + 360) % 360;
}
public void setPreviewCallback(PreviewCallback cb) {
@ -212,9 +227,6 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
public int[] setPreviewResolution(int width, int height) {
// mCamera = openCamera();
mPreviewWidth = width;
mPreviewHeight = height;
// Camera.Size rs = adaptPreviewResolution(mCamera.new Size(width, height));
@ -222,18 +234,14 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
// mPreviewWidth = rs.width;
// mPreviewHeight = rs.height;
// }
getHolder().setFixedSize(mPreviewWidth, mPreviewHeight);
// mCamera.getParameters().setPreviewSize(mPreviewWidth, mPreviewHeight);
mGLPreviewBuffer = ByteBuffer.allocate(mPreviewWidth * mPreviewHeight * 4);
mInputAspectRatio = mPreviewWidth > mPreviewHeight ? (float) mPreviewWidth / mPreviewHeight : (float) mPreviewHeight / mPreviewWidth;
return new int[]{mPreviewWidth, mPreviewHeight};
}
public boolean setFilter(final MagicFilterType type) {
public boolean setFilter(final MagicFilterType type, final int cameraid) {
queueEvent(new Runnable() {
@Override
@ -243,7 +251,7 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
}
magicFilter = MagicFilterFactory.initFilters(type);
if (magicFilter != null) {
magicFilter.init(getContext().getApplicationContext());
magicFilter.init(getContext().getApplicationContext(), cameraid);
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
magicFilter.onDisplaySizeChanged(mSurfaceWidth, mSurfaceHeight);
}
@ -293,7 +301,6 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
public void setPreviewOrientation(int orientation) {
mPreviewOrientation = orientation;
setRotation(180);
}
public int getCameraId() {
@ -431,7 +438,7 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
imageReader = ImageReader.newInstance(640, 480, ImageFormat.YUV_420_888, 10);
imageReader = ImageReader.newInstance(640, 480, ImageFormat.JPEG, 10);
// imageReader.setOnImageAvailableListener(onImageAvailableListener,null);
if (ActivityCompat.checkSelfPermission(getContext(), android.Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
}
@ -534,7 +541,13 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
}
return closestRange;
}
public void setOrtation(int cameraid) {
// magicFilter.setOrtation(cameraid);
// magicFilter.initTex();
// setFilter(MagicFilterType.NONE,cameraid);
magicFilter.initTExt(cameraid);
}
public void stopTorch() {
// if (mCamera != null) {
// try {
@ -547,6 +560,8 @@ public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Render
// }
}
public interface PreviewCallback {
void onGetRgbaFrame(byte[] data, int width, int height);

@ -5,7 +5,6 @@ import android.media.audiofx.AcousticEchoCanceler;
import android.media.audiofx.AutomaticGainControl;
import com.github.faucamp.simplertmp.RtmpHandler;
import com.seu.magicfilter.utils.MagicFilterType;
import java.io.File;
import java.util.List;
@ -305,13 +304,17 @@ public class SrsPublisher {
sendAudioOnly = flag;
}
public boolean switchCameraFilter(MagicFilterType type) {
return mCameraView.setFilter(type);
}
public void switchCameraFace(int id) {
List<CameraItemData> cameraData = mCameraView.getCameraData();
CameraItemData item = cameraData.get(id);
int cameraid = 0;
try {
cameraid = Integer.parseInt( item.getId());
} catch (Exception e) {
cameraid = 0;
}
mCameraView.setOrtation(cameraid);
if (mEncoder != null && mEncoder.isEnabled()) {
mEncoder.pause();

Loading…
Cancel
Save