短视频的新实现,支持OSD
parent
fe4a2ae765
commit
f21bcab4a5
@ -0,0 +1,78 @@
|
||||
package com.xypower.gpuvideoandroid.widget;
|
||||
|
||||
import android.content.Context;
|
||||
import android.opengl.GLSurfaceView;
|
||||
import android.util.AttributeSet;
|
||||
import android.view.MotionEvent;
|
||||
import android.view.View;
|
||||
|
||||
public class AutoFitGLView extends GLSurfaceView implements View.OnTouchListener {
|
||||
|
||||
private float mAspectRatio;
|
||||
|
||||
public AutoFitGLView(Context context) {
|
||||
this(context, null);
|
||||
}
|
||||
|
||||
public AutoFitGLView(Context context, AttributeSet attrs) {
|
||||
super(context, attrs);
|
||||
setOnTouchListener(this);
|
||||
}
|
||||
|
||||
private TouchListener touchListener;
|
||||
|
||||
public void setAspectRatio(int width, int height){
|
||||
mAspectRatio = (float)width / height;
|
||||
getHolder().setFixedSize(width, height);
|
||||
requestLayout();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onTouch(View v, MotionEvent event) {
|
||||
final int actionMasked = event.getActionMasked();
|
||||
if (actionMasked != MotionEvent.ACTION_DOWN) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (touchListener != null) {
|
||||
touchListener.onTouch(event, v.getWidth(), v.getHeight());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public interface TouchListener {
|
||||
void onTouch(MotionEvent event, int width, int height);
|
||||
}
|
||||
|
||||
public void setTouchListener(TouchListener touchListener) {
|
||||
this.touchListener = touchListener;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
|
||||
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
|
||||
int width = MeasureSpec.getSize(widthMeasureSpec);
|
||||
int height = MeasureSpec.getSize(heightMeasureSpec);
|
||||
if (mAspectRatio == 0) {
|
||||
setMeasuredDimension(width, height);
|
||||
}else {
|
||||
int newW,newH;
|
||||
float actualRatio;
|
||||
if (width > height) {
|
||||
actualRatio = mAspectRatio;
|
||||
}else {
|
||||
actualRatio = 1 / mAspectRatio;
|
||||
}
|
||||
|
||||
if (width < height * actualRatio){
|
||||
newH = height;
|
||||
newW = (int) (height * actualRatio);
|
||||
}else {
|
||||
newW = width;
|
||||
newH = (int) (width / actualRatio);
|
||||
}
|
||||
setMeasuredDimension(newW, newH);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,653 @@
|
||||
package com.xypower.mpapp.v2;
|
||||
|
||||
import android.content.ContentValues;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.Color;
|
||||
import android.graphics.Insets;
|
||||
import android.graphics.Paint;
|
||||
import android.graphics.PorterDuff;
|
||||
import android.graphics.Rect;
|
||||
import android.net.Uri;
|
||||
import android.opengl.GLException;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.Environment;
|
||||
import android.os.Handler;
|
||||
import android.provider.MediaStore;
|
||||
import android.text.TextUtils;
|
||||
import android.util.Log;
|
||||
import android.view.Window;
|
||||
import android.view.WindowInsets;
|
||||
import android.view.WindowInsetsController;
|
||||
import android.view.WindowManager;
|
||||
import android.view.WindowMetrics;
|
||||
import android.widget.FrameLayout;
|
||||
|
||||
import androidx.annotation.RequiresApi;
|
||||
import androidx.appcompat.app.AppCompatActivity;
|
||||
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
|
||||
|
||||
import com.xypower.gpuv.camerarecorder.CameraRecordListener;
|
||||
import com.xypower.gpuv.camerarecorder.GPUCameraRecorder;
|
||||
import com.xypower.gpuv.camerarecorder.GPUCameraRecorderBuilder;
|
||||
import com.xypower.gpuv.egl.filter.GlWatermarkFilter;
|
||||
import com.xypower.gpuvideoandroid.widget.AutoFitGLView;
|
||||
import com.xypower.mpapp.MicroPhotoService;
|
||||
import com.xypower.mpapp.R;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.opengles.GL10;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.IntBuffer;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
|
||||
public class Camera2VideoActivity extends AppCompatActivity {
|
||||
|
||||
public static final String ACTION_FINISH = "com.xypower.mvapp.ACT_FINISH";
|
||||
public static final String ACTION_MP_VIDEO_FINISHED = "com.xypower.mpapp.ACT_V_FINISHED";
|
||||
|
||||
private static final int DEFAULT_FONT_SIZE = 20;
|
||||
private AutoFitGLView mPreviewView;
|
||||
protected GPUCameraRecorder mGPUCameraRecorder;
|
||||
// protected LensFacing lensFacing = LensFacing.BACK;
|
||||
protected int mCameraWidth = 1280;
|
||||
protected int mCameraHeight = 720;
|
||||
protected int mVideoWidth = 1280;
|
||||
protected int mVideoHeight = 720;
|
||||
|
||||
private int mCameraId;
|
||||
private long mVideoId = 0;
|
||||
private int mDuration = 0;
|
||||
|
||||
private int mOrientation = -1;
|
||||
|
||||
private String mNextVideoAbsolutePath;
|
||||
|
||||
private String mOSDLeftTop = null;
|
||||
private String mOSDRightTop = null;
|
||||
private String mOSDRightBottom = null;
|
||||
private String mOSDLeftBottom = null;
|
||||
|
||||
private int mOSDMargin = 0;
|
||||
private Paint mPaint;
|
||||
private Paint mPaintStroker;
|
||||
private Bitmap mBitmap;
|
||||
GlWatermarkFilter mOSDFilter = null;
|
||||
|
||||
private SimpleDateFormat mDateFormater;
|
||||
// SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss a");
|
||||
|
||||
private final String TIME_MICRO_TS = "$$TS$$";
|
||||
private final String TIME_MICRO_DT = "$$DATETIME$$";
|
||||
|
||||
private int mTimeMask = 0;
|
||||
private int mStatusBarHeight = -1;
|
||||
|
||||
private final static int TIME_MASK_LT_TS = 1;
|
||||
private final static int TIME_MASK_LT_DT = 2;
|
||||
private final static int TIME_MASK_LT_ML = 4;
|
||||
private final static int TIME_MASK_LT = TIME_MASK_LT_TS | TIME_MASK_LT_DT | TIME_MASK_LT_ML;
|
||||
private final static int TIME_MASK_RT_TS = 8;
|
||||
private final static int TIME_MASK_RT_DT = 16;
|
||||
private final static int TIME_MASK_RT_ML = 32;
|
||||
private final static int TIME_MASK_RT = TIME_MASK_RT_TS | TIME_MASK_RT_DT | TIME_MASK_RT_ML;
|
||||
private final static int TIME_MASK_RB_TS = 64;
|
||||
private final static int TIME_MASK_RB_DT = 128;
|
||||
private final static int TIME_MASK_RB_ML = 256;
|
||||
private final static int TIME_MASK_RB = TIME_MASK_RB_TS | TIME_MASK_RB_DT | TIME_MASK_RB_ML;
|
||||
private final static int TIME_MASK_LB_TS = 512;
|
||||
private final static int TIME_MASK_LB_DT = 1024;
|
||||
private final static int TIME_MASK_LB_ML = 2048;
|
||||
private final static int TIME_MASK_LB = TIME_MASK_LB_TS | TIME_MASK_LB_DT | TIME_MASK_LB_ML;
|
||||
|
||||
private Handler mHandler = null;
|
||||
private Runnable mTimerRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
long ts = System.currentTimeMillis();
|
||||
long ms = ts % 1000;
|
||||
if (ms > 900) {
|
||||
ts += 1000 - ms;
|
||||
ms = 0;
|
||||
}
|
||||
|
||||
updateOSD(ts);
|
||||
|
||||
mHandler.postDelayed(this, 1000 - ms);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
|
||||
Window win = getWindow();
|
||||
// win.setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
|
||||
/*
|
||||
win.setDecorFitsSystemWindows(false);
|
||||
WindowInsetsController controller = win.getInsetsController();
|
||||
if (controller != null) {
|
||||
controller.hide(WindowInsets.Type.statusBars() | WindowInsets.Type.navigationBars());
|
||||
controller.setSystemBarsBehavior(WindowInsetsController.BEHAVIOR_SHOW_TRANSIENT_BARS_BY_SWIPE);
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
||||
|
||||
setContentView(R.layout.activity_camera2_video);
|
||||
|
||||
getSupportActionBar().hide();
|
||||
|
||||
// mStatusBarHeight = getStatusBarHeight(this);
|
||||
onCreateActivity();
|
||||
|
||||
getWindow().getDecorView().setOnApplyWindowInsetsListener((v, insets) -> {
|
||||
mStatusBarHeight = px2dip(Camera2VideoActivity.this, insets.getStableInsetTop());
|
||||
|
||||
return insets;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
public static int px2dip(Context context, float pxValue) {
|
||||
final float scale = context.getResources().getDisplayMetrics().density;
|
||||
return (int) (pxValue / scale + 0.5f);
|
||||
}
|
||||
|
||||
|
||||
public int getStatusBarHeight(Context context) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
|
||||
|
||||
|
||||
|
||||
|
||||
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
|
||||
WindowMetrics windowMetrics = wm.getCurrentWindowMetrics();
|
||||
WindowInsets windowInsets = windowMetrics.getWindowInsets();
|
||||
Insets insets = windowInsets.getInsetsIgnoringVisibility(WindowInsets.Type.navigationBars() | WindowInsets.Type.displayCutout());
|
||||
return insets.top;
|
||||
}
|
||||
|
||||
Rect frame = new Rect();
|
||||
getWindow().getDecorView().getWindowVisibleDisplayFrame(frame);
|
||||
int statusBarHeight = frame.top;
|
||||
|
||||
return statusBarHeight;
|
||||
}
|
||||
|
||||
|
||||
protected void onCreateActivity() {
|
||||
//
|
||||
|
||||
// SysApi.setCam3V3Enable(true);
|
||||
|
||||
Intent intent = getIntent();
|
||||
|
||||
mCameraId = intent.getIntExtra("cameraId", 0);
|
||||
mVideoId = intent.getLongExtra("videoId", 0);
|
||||
mDuration = intent.getIntExtra("duration", 0);
|
||||
mVideoWidth = intent.getIntExtra("width", 0);
|
||||
mVideoHeight = intent.getIntExtra("height", 0);
|
||||
mOrientation = intent.getIntExtra("orientation", -1);
|
||||
mOSDLeftTop = intent.getStringExtra("leftTopOsd");
|
||||
mOSDLeftBottom = intent.getStringExtra("leftBottomOsd");
|
||||
mOSDRightBottom = intent.getStringExtra("rightBottomOsd");
|
||||
mOSDRightTop = intent.getStringExtra("rightTopOsd");
|
||||
mOSDMargin = intent.getIntExtra("margin", 0);
|
||||
|
||||
mCameraWidth = mVideoWidth;
|
||||
mCameraHeight = mVideoHeight;
|
||||
|
||||
mTimeMask = 0;
|
||||
if (!TextUtils.isEmpty(mOSDLeftTop)) {
|
||||
mOSDLeftTop = mOSDLeftTop.replace("\r\n", "\n");
|
||||
mOSDLeftTop = mOSDLeftTop.replace("\n\r", "\n");
|
||||
mOSDLeftTop = mOSDLeftTop.replace("\r", "\n");
|
||||
if (mOSDLeftTop.indexOf(TIME_MICRO_TS) != 0) {
|
||||
mTimeMask |= TIME_MASK_LT_TS;
|
||||
}
|
||||
if (mOSDLeftTop.indexOf(TIME_MICRO_DT) != 0) {
|
||||
mTimeMask |= TIME_MASK_LT_DT;
|
||||
}
|
||||
if (mOSDLeftTop.indexOf("\n") != 0) {
|
||||
mTimeMask |= TIME_MASK_LT_ML;
|
||||
}
|
||||
}
|
||||
if (!TextUtils.isEmpty(mOSDRightTop)) {
|
||||
mOSDRightTop = mOSDRightTop.replace("\r\n", "\n");
|
||||
mOSDRightTop = mOSDRightTop.replace("\n\r", "\n");
|
||||
mOSDRightTop = mOSDRightTop.replace("\r", "\n");
|
||||
if (mOSDRightTop.indexOf(TIME_MICRO_TS) != 0) {
|
||||
mTimeMask |= TIME_MASK_RT_TS;
|
||||
}
|
||||
if (mOSDRightTop.indexOf(TIME_MICRO_DT) != 0) {
|
||||
mTimeMask |= TIME_MASK_RT_DT;
|
||||
}
|
||||
if (mOSDRightTop.indexOf("\n") != 0) {
|
||||
mTimeMask |= TIME_MASK_RT_ML;
|
||||
}
|
||||
}
|
||||
if (!TextUtils.isEmpty(mOSDRightBottom)) {
|
||||
mOSDRightBottom = mOSDRightBottom.replace("\r\n", "\n");
|
||||
mOSDRightBottom = mOSDRightBottom.replace("\n\r", "\n");
|
||||
mOSDRightBottom = mOSDRightBottom.replace("\r", "\n");
|
||||
if (mOSDRightBottom.indexOf(TIME_MICRO_TS) != 0) {
|
||||
mTimeMask |= TIME_MASK_RB_TS;
|
||||
}
|
||||
if (mOSDRightBottom.indexOf(TIME_MICRO_DT) != 0) {
|
||||
mTimeMask |= TIME_MASK_RB_DT;
|
||||
}
|
||||
if (mOSDRightBottom.indexOf("\n") != 0) {
|
||||
mTimeMask |= TIME_MASK_RB_ML;
|
||||
}
|
||||
}
|
||||
if (!TextUtils.isEmpty(mOSDLeftBottom)) {
|
||||
mOSDLeftBottom = mOSDLeftBottom.replace("\r\n", "\n");
|
||||
mOSDLeftBottom = mOSDLeftBottom.replace("\n\r", "\n");
|
||||
mOSDLeftBottom = mOSDLeftBottom.replace("\r", "\n");
|
||||
if (mOSDLeftBottom.indexOf(TIME_MICRO_TS) != 0) {
|
||||
mTimeMask |= TIME_MASK_LB_TS;
|
||||
}
|
||||
if (mOSDLeftBottom.indexOf(TIME_MICRO_DT) != 0) {
|
||||
mTimeMask |= TIME_MASK_LB_DT;
|
||||
}
|
||||
if (mOSDLeftBottom.indexOf("\n") != 0) {
|
||||
mTimeMask |= TIME_MASK_LB_ML;
|
||||
}
|
||||
}
|
||||
|
||||
mHandler = new Handler();
|
||||
|
||||
mHandler.postDelayed(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
mNextVideoAbsolutePath = getVideoFilePath();
|
||||
mGPUCameraRecorder.start(mNextVideoAbsolutePath);
|
||||
}
|
||||
}, 200);
|
||||
|
||||
mHandler.postDelayed(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
mGPUCameraRecorder.stop();
|
||||
}
|
||||
}, 200 + mDuration * 1000);
|
||||
|
||||
// getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onResume() {
|
||||
super.onResume();
|
||||
setUpCamera();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStop() {
|
||||
super.onStop();
|
||||
releaseCamera();
|
||||
}
|
||||
|
||||
private void updateOSD(long ts) {
|
||||
|
||||
if (mStatusBarHeight == -1) {
|
||||
mStatusBarHeight = getStatusBarHeight(this);
|
||||
}
|
||||
int statusHeight = mStatusBarHeight;
|
||||
synchronized (mBitmap) {
|
||||
int bmWidth = mBitmap.getWidth();
|
||||
int bmHeight = mBitmap.getHeight();
|
||||
int margin = mOSDMargin;
|
||||
// mOSDFilter.
|
||||
|
||||
Canvas canvas = new Canvas(mBitmap);
|
||||
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
|
||||
|
||||
if (!TextUtils.isEmpty(mOSDLeftTop)) {
|
||||
String osd = ((mTimeMask | TIME_MASK_LT) == 0) ? mOSDLeftTop : updateOSDTime(mOSDLeftTop, ts);
|
||||
// mPaint.setTextAlign(Paint.Align.LEFT);
|
||||
int x = margin;
|
||||
int y = margin + statusHeight;
|
||||
canvas.drawText(osd, x, y, mPaint);
|
||||
canvas.drawText(osd, x, y, mPaintStroker);
|
||||
}
|
||||
|
||||
if (!TextUtils.isEmpty(mOSDLeftBottom)) {
|
||||
String osd = ((mTimeMask | TIME_MASK_LB) == 0) ? mOSDLeftBottom : updateOSDTime(mOSDLeftBottom, ts);
|
||||
// mPaint.setTextAlign(Paint.Align.LEFT);
|
||||
Rect textBounds = new Rect();
|
||||
mPaint.getTextBounds(osd, 0, osd.length(), textBounds);
|
||||
float y = bmHeight - margin - textBounds.height();
|
||||
canvas.drawText(osd, margin, y, mPaint);
|
||||
canvas.drawText(osd, margin, y, mPaintStroker);
|
||||
}
|
||||
|
||||
if (!TextUtils.isEmpty(mOSDRightTop)) {
|
||||
String osd = ((mTimeMask | TIME_MASK_RT) == 0) ? mOSDRightTop : updateOSDTime(mOSDRightTop, ts);
|
||||
// mPaint.setTextAlign(Paint.Align.RIGHT);
|
||||
Rect textBounds = new Rect();
|
||||
mPaint.getTextBounds(osd, 0, osd.length(), textBounds);
|
||||
float x = bmWidth - margin - textBounds.width();
|
||||
int y = margin + statusHeight;
|
||||
canvas.drawText(osd, x, y, mPaint);
|
||||
canvas.drawText(osd, x, y, mPaintStroker);
|
||||
}
|
||||
|
||||
if (!TextUtils.isEmpty(mOSDRightBottom)) {
|
||||
String osd = ((mTimeMask | TIME_MASK_RB) == 0) ? mOSDRightBottom : updateOSDTime(mOSDRightBottom, ts);
|
||||
// mPaint.setTextAlign(Paint.Align.RIGHT);
|
||||
Rect textBounds = new Rect();
|
||||
mPaint.getTextBounds(osd, 0, osd.length(), textBounds);
|
||||
float x = bmWidth - margin - textBounds.width();
|
||||
float y = bmHeight - margin - textBounds.height();
|
||||
canvas.drawText(osd, x, y, mPaint);
|
||||
canvas.drawText(osd, x, y, mPaintStroker);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
SurfaceHolder surfaceHolder = sampleGLView.getHolder();
|
||||
Canvas surfaceCanvas = surfaceHolder.lockCanvas();
|
||||
if (surfaceCanvas != null) {
|
||||
surfaceCanvas.drawBitmap(mBitmap, 0, 0, null);
|
||||
surfaceHolder.unlockCanvasAndPost(surfaceCanvas);
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
private String updateOSDTime(String osd, long ts) {
|
||||
String newOSD = osd;
|
||||
if (newOSD.indexOf(TIME_MICRO_TS) != -1) {
|
||||
newOSD = newOSD.replace(TIME_MICRO_TS, Long.toString(ts / 1000));
|
||||
}
|
||||
if (newOSD.indexOf(TIME_MICRO_DT) != -1) {
|
||||
if (mDateFormater == null) {
|
||||
mDateFormater = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
}
|
||||
Date dt = new Date(ts);
|
||||
newOSD = newOSD.replace(TIME_MICRO_DT, mDateFormater.format(dt));
|
||||
}
|
||||
|
||||
return newOSD;
|
||||
}
|
||||
|
||||
|
||||
private void releaseCamera() {
|
||||
if (mPreviewView != null) {
|
||||
mPreviewView.onPause();
|
||||
}
|
||||
|
||||
if (mGPUCameraRecorder != null) {
|
||||
mGPUCameraRecorder.stop();
|
||||
mGPUCameraRecorder.release();
|
||||
mGPUCameraRecorder = null;
|
||||
}
|
||||
|
||||
if (mPreviewView != null) {
|
||||
((FrameLayout) findViewById(R.id.wrap_view)).removeView(mPreviewView);
|
||||
mPreviewView = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void setUpCameraView() {
|
||||
|
||||
runOnUiThread(() -> {
|
||||
|
||||
if (!TextUtils.isEmpty(mOSDLeftTop) || !TextUtils.isEmpty(mOSDLeftTop) || !TextUtils.isEmpty(mOSDLeftTop) || !TextUtils.isEmpty(mOSDLeftTop)) {
|
||||
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
|
||||
mPaint.setStyle(Paint.Style.FILL);
|
||||
mPaint.setColor(Color.WHITE);
|
||||
int fontSize = DEFAULT_FONT_SIZE;
|
||||
mPaint.setTextSize(fontSize);
|
||||
|
||||
mPaintStroker = new Paint(Paint.ANTI_ALIAS_FLAG);
|
||||
mPaintStroker.setStyle(Paint.Style.STROKE);
|
||||
mPaintStroker.setColor(Color.BLACK);
|
||||
mPaintStroker.setTextSize(fontSize);
|
||||
mPaintStroker.setStrokeWidth(1);
|
||||
|
||||
mBitmap = Bitmap.createBitmap(mVideoWidth, mVideoHeight, Bitmap.Config.ARGB_8888);
|
||||
|
||||
Canvas canvas = new Canvas(mBitmap);
|
||||
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
|
||||
|
||||
mOSDFilter = new GlWatermarkFilter(mBitmap);
|
||||
}
|
||||
|
||||
FrameLayout frameLayout = findViewById(R.id.wrap_view);
|
||||
frameLayout.removeAllViews();
|
||||
mPreviewView = null;
|
||||
mPreviewView = new AutoFitGLView(getApplicationContext());
|
||||
mPreviewView.setTouchListener((event, width, height) -> {
|
||||
if (mGPUCameraRecorder == null) return;
|
||||
mGPUCameraRecorder.changeManualFocusPoint(event.getX(), event.getY(), width, height);
|
||||
});
|
||||
frameLayout.addView(mPreviewView);
|
||||
|
||||
if (mGPUCameraRecorder != null) {
|
||||
mGPUCameraRecorder.setFilter(mOSDFilter);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void setUpCamera() {
|
||||
setUpCameraView();
|
||||
|
||||
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
|
||||
mNextVideoAbsolutePath = getVideoFilePath(this);
|
||||
}
|
||||
|
||||
mGPUCameraRecorder = new GPUCameraRecorderBuilder(this, mPreviewView)
|
||||
//.recordNoFilter(true)
|
||||
.cameraRecordListener(new CameraRecordListener() {
|
||||
@Override
|
||||
public void onGetFlashSupport(boolean flashSupport) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRecordComplete() {
|
||||
mHandler.removeCallbacks(mTimerRunnable);
|
||||
exportMp4ToGallery(getApplicationContext(), mNextVideoAbsolutePath);
|
||||
broadcastVideoFile(true, mNextVideoAbsolutePath);
|
||||
mHandler.postDelayed(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Camera2VideoActivity.this.finish();
|
||||
}
|
||||
}, 500);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRecordStart() {
|
||||
|
||||
if (mTimeMask != 0) {
|
||||
long ts = System.currentTimeMillis();
|
||||
long ms = ts % 1000;
|
||||
updateOSD(ts - ms);
|
||||
mHandler.postDelayed(mTimerRunnable, 1000 - ms);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(Exception exception) {
|
||||
Log.e("GPUCameraRecorder", exception.toString());
|
||||
broadcastVideoFile(false, mNextVideoAbsolutePath);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraThreadFinish() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onVideoFileReady() {
|
||||
|
||||
}
|
||||
})
|
||||
.videoSize(mVideoWidth, mVideoHeight)
|
||||
.cameraSize(mCameraWidth, mCameraHeight)
|
||||
.cameraId(Integer.toString(mCameraId))
|
||||
.build();
|
||||
|
||||
if (mOSDFilter != null) {
|
||||
mGPUCameraRecorder.setFilter(mOSDFilter);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// private void changeFilter(Filters filters) {
|
||||
// GPUCameraRecorder.setFilter(Filters.getFilterInstance(filters, getApplicationContext()));
|
||||
// }
|
||||
|
||||
|
||||
private interface BitmapReadyCallbacks {
|
||||
void onBitmapReady(Bitmap bitmap);
|
||||
}
|
||||
|
||||
private void captureBitmap(final BitmapReadyCallbacks bitmapReadyCallbacks) {
|
||||
mPreviewView.queueEvent(() -> {
|
||||
EGL10 egl = (EGL10) EGLContext.getEGL();
|
||||
GL10 gl = (GL10) egl.eglGetCurrentContext().getGL();
|
||||
Bitmap snapshotBitmap = createBitmapFromGLSurface(mPreviewView.getMeasuredWidth(), mPreviewView.getMeasuredHeight(), gl);
|
||||
|
||||
runOnUiThread(() -> {
|
||||
bitmapReadyCallbacks.onBitmapReady(snapshotBitmap);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private void broadcastVideoFile(boolean result, String path) {
|
||||
if (mDuration <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
Context context = getApplicationContext();
|
||||
String receiverName = MicroPhotoService.AlarmReceiver.class.getName();
|
||||
String packageName = context.getPackageName();
|
||||
|
||||
Intent intent = new Intent(ACTION_MP_VIDEO_FINISHED);
|
||||
// intent.setPackage(packageName);
|
||||
intent.putExtra("result", result);
|
||||
intent.putExtra("path", path);
|
||||
intent.putExtra("videoId", mVideoId);
|
||||
|
||||
// intent.setComponent(new ComponentName(packageName, receiverName));
|
||||
|
||||
// Log.i(TAG, "Notify recording videoId=" + Long.toString(mVideoId) + " " + path);
|
||||
LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(getApplicationContext());
|
||||
localBroadcastManager.sendBroadcast(intent);
|
||||
|
||||
context.sendBroadcast(intent);
|
||||
}
|
||||
|
||||
private String getVideoFilePath(Context context) {
|
||||
// final File dir = context.getExternalFilesDir(null);
|
||||
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
|
||||
if (!path.endsWith(File.separator)) {
|
||||
path += File.separator;
|
||||
}
|
||||
path += context.getPackageName() + File.separator;
|
||||
File file = new File(path);
|
||||
if (!file.exists()) {
|
||||
file.mkdirs();
|
||||
}
|
||||
path += System.currentTimeMillis() + ".mp4";
|
||||
return path;
|
||||
}
|
||||
|
||||
private Bitmap createBitmapFromGLSurface(int w, int h, GL10 gl) {
|
||||
|
||||
int bitmapBuffer[] = new int[w * h];
|
||||
int bitmapSource[] = new int[w * h];
|
||||
IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer);
|
||||
intBuffer.position(0);
|
||||
|
||||
try {
|
||||
gl.glReadPixels(0, 0, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, intBuffer);
|
||||
int offset1, offset2, texturePixel, blue, red, pixel;
|
||||
for (int i = 0; i < h; i++) {
|
||||
offset1 = i * w;
|
||||
offset2 = (h - i - 1) * w;
|
||||
for (int j = 0; j < w; j++) {
|
||||
texturePixel = bitmapBuffer[offset1 + j];
|
||||
blue = (texturePixel >> 16) & 0xff;
|
||||
red = (texturePixel << 16) & 0x00ff0000;
|
||||
pixel = (texturePixel & 0xff00ff00) | red | blue;
|
||||
bitmapSource[offset2 + j] = pixel;
|
||||
}
|
||||
}
|
||||
} catch (GLException e) {
|
||||
Log.e("CreateBitmap", "createBitmapFromGLSurface: " + e.getMessage(), e);
|
||||
return null;
|
||||
}
|
||||
|
||||
return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.ARGB_8888);
|
||||
}
|
||||
|
||||
public void saveAsPngImage(Bitmap bitmap, String filePath) {
|
||||
try {
|
||||
File file = new File(filePath);
|
||||
FileOutputStream outStream = new FileOutputStream(file);
|
||||
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outStream);
|
||||
outStream.close();
|
||||
|
||||
} catch (FileNotFoundException e) {
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void exportMp4ToGallery(Context context, String filePath) {
|
||||
final ContentValues values = new ContentValues(2);
|
||||
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
|
||||
values.put(MediaStore.Video.Media.DATA, filePath);
|
||||
context.getContentResolver().insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI,
|
||||
values);
|
||||
context.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
|
||||
Uri.parse("file://" + filePath)));
|
||||
}
|
||||
|
||||
public static String getVideoFilePath() {
|
||||
return getAndroidMoviesFolder().getAbsolutePath() + "/" + new SimpleDateFormat("yyyyMM_dd-HHmmss").format(new Date()) + "GPUCameraRecorder.mp4";
|
||||
}
|
||||
|
||||
public static File getAndroidMoviesFolder() {
|
||||
return Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES);
|
||||
}
|
||||
|
||||
private static void exportPngToGallery(Context context, String filePath) {
|
||||
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
|
||||
File f = new File(filePath);
|
||||
Uri contentUri = Uri.fromFile(f);
|
||||
mediaScanIntent.setData(contentUri);
|
||||
context.sendBroadcast(mediaScanIntent);
|
||||
}
|
||||
|
||||
public static String getImageFilePath() {
|
||||
return getAndroidImageFolder().getAbsolutePath() + "/" + new SimpleDateFormat("yyyyMM_dd-HHmmss").format(new Date()) + "GPUCameraRecorder.png";
|
||||
}
|
||||
|
||||
public static File getAndroidImageFolder() {
|
||||
return Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
|
||||
>
|
||||
|
||||
<FrameLayout
|
||||
android:id="@+id/wrap_view"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
app:layout_constraintLeft_toLeftOf="parent"
|
||||
app:layout_constraintRight_toRightOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent" />
|
||||
|
||||
</androidx.constraintlayout.widget.ConstraintLayout>
|
@ -0,0 +1 @@
|
||||
/build
|
@ -0,0 +1,27 @@
|
||||
apply plugin: 'com.android.library'
|
||||
|
||||
android {
|
||||
compileSdkVersion COMPILE_SDK_VERSION as int
|
||||
|
||||
defaultConfig {
|
||||
minSdkVersion COMPILE_MIN_SDK_VERSION as int
|
||||
targetSdkVersion TARGET_SDK_VERSION as int
|
||||
|
||||
|
||||
}
|
||||
|
||||
buildTypes {
|
||||
release {
|
||||
minifyEnabled false
|
||||
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation fileTree(dir: 'libs', include: ['*.jar'])
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
# Add project specific ProGuard rules here.
|
||||
# You can control the set of applied configuration files using the
|
||||
# proguardFiles setting in build.gradle.
|
||||
#
|
||||
# For more details, see
|
||||
# http://developer.android.com/guide/developing/tools/proguard.html
|
||||
|
||||
# If your project uses WebView with JS, uncomment the following
|
||||
# and specify the fully qualified class name to the JavaScript interface
|
||||
# class:
|
||||
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
|
||||
# public *;
|
||||
#}
|
||||
|
||||
# Uncomment this to preserve the line number information for
|
||||
# debugging stack traces.
|
||||
#-keepattributes SourceFile,LineNumberTable
|
||||
|
||||
# If you keep the line number information, uncomment this to
|
||||
# hide the original source file name.
|
||||
#-renamesourcefileattribute SourceFile
|
@ -0,0 +1,2 @@
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="com.xypower.gpuv"/>
|
@ -0,0 +1,125 @@
|
||||
package com.xypower.gpuv.camerarecorder;
|
||||
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.os.Message;
|
||||
import android.util.Log;
|
||||
|
||||
/**
|
||||
* Created by sudamasayuki on 2018/03/13.
|
||||
*/
|
||||
|
||||
public class CameraHandler extends Handler {
|
||||
private static final boolean DEBUG = false;
|
||||
private static final String TAG = "CameraHandler";
|
||||
|
||||
private static final int MSG_PREVIEW_START = 1;
|
||||
private static final int MSG_PREVIEW_STOP = 2;
|
||||
private static final int MSG_MANUAL_FOCUS = 3;
|
||||
private static final int MSG_SWITCH_FLASH = 4;
|
||||
private static final int MSG_AUTO_FOCUS = 5;
|
||||
|
||||
|
||||
private int viewWidth = 0;
|
||||
private int viewHeight = 0;
|
||||
private float eventX = 0;
|
||||
private float eventY = 0;
|
||||
|
||||
private CameraThread thread;
|
||||
|
||||
CameraHandler(final CameraThread thread) {
|
||||
this.thread = thread;
|
||||
}
|
||||
|
||||
void startPreview(final int width, final int height) {
|
||||
sendMessage(obtainMessage(MSG_PREVIEW_START, width, height));
|
||||
}
|
||||
|
||||
/**
|
||||
* request to stop camera preview
|
||||
*
|
||||
* @param needWait need to wait for stopping camera preview
|
||||
*/
|
||||
void stopPreview(final boolean needWait) {
|
||||
synchronized (this) {
|
||||
sendEmptyMessage(MSG_PREVIEW_STOP);
|
||||
if (thread == null) return;
|
||||
if (needWait && thread.isRunning) {
|
||||
try {
|
||||
if (DEBUG) Log.d(TAG, "wait for terminating of camera thread");
|
||||
wait();
|
||||
} catch (final InterruptedException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void changeManualFocusPoint(float eventX, float eventY, int viewWidth, int viewHeight) {
|
||||
this.viewWidth = viewWidth;
|
||||
this.viewHeight = viewHeight;
|
||||
this.eventX = eventX;
|
||||
this.eventY = eventY;
|
||||
sendMessage(obtainMessage(MSG_MANUAL_FOCUS));
|
||||
}
|
||||
|
||||
void changeAutoFocus() {
|
||||
sendMessage(obtainMessage(MSG_AUTO_FOCUS));
|
||||
}
|
||||
|
||||
void switchFlashMode() {
|
||||
sendMessage(obtainMessage(MSG_SWITCH_FLASH));
|
||||
}
|
||||
|
||||
/**
|
||||
* message handler for camera thread
|
||||
*/
|
||||
@Override
|
||||
public void handleMessage(final Message msg) {
|
||||
switch (msg.what) {
|
||||
case MSG_PREVIEW_START:
|
||||
if (thread != null) {
|
||||
thread.startPreview(msg.arg1, msg.arg2);
|
||||
}
|
||||
break;
|
||||
case MSG_PREVIEW_STOP:
|
||||
if (thread != null) {
|
||||
thread.stopPreview();
|
||||
}
|
||||
synchronized (this) {
|
||||
notifyAll();
|
||||
}
|
||||
try {
|
||||
Looper.myLooper().quit();
|
||||
removeCallbacks(thread);
|
||||
removeMessages(MSG_PREVIEW_START);
|
||||
removeMessages(MSG_PREVIEW_STOP);
|
||||
removeMessages(MSG_MANUAL_FOCUS);
|
||||
removeMessages(MSG_SWITCH_FLASH);
|
||||
removeMessages(MSG_AUTO_FOCUS);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
thread = null;
|
||||
break;
|
||||
case MSG_MANUAL_FOCUS:
|
||||
if (thread != null) {
|
||||
thread.changeManualFocusPoint(eventX, eventY, viewWidth, viewHeight);
|
||||
}
|
||||
break;
|
||||
case MSG_SWITCH_FLASH:
|
||||
if (thread != null) {
|
||||
thread.switchFlashMode();
|
||||
}
|
||||
break;
|
||||
case MSG_AUTO_FOCUS:
|
||||
if (thread != null) {
|
||||
thread.changeAutoFocus();
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new RuntimeException("unknown message:what=" + msg.what);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,23 @@
|
||||
package com.xypower.gpuv.camerarecorder;
|
||||
|
||||
/**
|
||||
* Created by sudamasayuki on 2018/03/13.
|
||||
*/
|
||||
|
||||
public interface CameraRecordListener {
|
||||
|
||||
void onGetFlashSupport(boolean flashSupport);
|
||||
|
||||
void onRecordComplete();
|
||||
|
||||
void onRecordStart();
|
||||
|
||||
void onError(Exception exception);
|
||||
|
||||
void onCameraThreadFinish();
|
||||
|
||||
/**
|
||||
* Is called when native codecs finish to write file.
|
||||
*/
|
||||
void onVideoFileReady();
|
||||
}
|
@ -0,0 +1,325 @@
|
||||
package com.xypower.gpuv.camerarecorder;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.*;
|
||||
import android.hardware.camera2.params.MeteringRectangle;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.Looper;
|
||||
import android.util.Log;
|
||||
import android.util.Size;
|
||||
import android.view.Surface;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Created by sudamasayuki on 2018/03/13.
|
||||
*/
|
||||
|
||||
public class CameraThread extends Thread {
|
||||
|
||||
|
||||
private static final String TAG = "CameraThread";
|
||||
|
||||
|
||||
private final Object readyFence = new Object();
|
||||
private CameraHandler handler;
|
||||
volatile boolean isRunning = false;
|
||||
|
||||
private CameraDevice cameraDevice;
|
||||
private CaptureRequest.Builder requestBuilder;
|
||||
private CameraCaptureSession cameraCaptureSession;
|
||||
private Rect sensorArraySize;
|
||||
|
||||
private SurfaceTexture surfaceTexture;
|
||||
|
||||
private final OnStartPreviewListener listener;
|
||||
private final CameraRecordListener cameraRecordListener;
|
||||
private final CameraManager cameraManager;
|
||||
|
||||
private Size cameraSize;
|
||||
private boolean isFlashTorch = false;
|
||||
private final String cameraId;
|
||||
|
||||
private boolean flashSupport = false;
|
||||
|
||||
|
||||
CameraThread(
|
||||
final CameraRecordListener cameraRecordListener,
|
||||
final OnStartPreviewListener listener,
|
||||
final SurfaceTexture surfaceTexture,
|
||||
final CameraManager cameraManager,
|
||||
final String cameraId
|
||||
) {
|
||||
super("Camera thread");
|
||||
this.listener = listener;
|
||||
this.cameraRecordListener = cameraRecordListener;
|
||||
this.surfaceTexture = surfaceTexture;
|
||||
this.cameraManager = cameraManager;
|
||||
this.cameraId = cameraId;
|
||||
|
||||
}
|
||||
|
||||
public CameraHandler getHandler() {
|
||||
synchronized (readyFence) {
|
||||
try {
|
||||
readyFence.wait();
|
||||
} catch (final InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
return handler;
|
||||
}
|
||||
|
||||
private CameraDevice.StateCallback cameraDeviceCallback = new CameraDevice.StateCallback() {
|
||||
@Override
|
||||
public void onOpened(CameraDevice camera) {
|
||||
Log.d(TAG, "cameraDeviceCallback onOpened");
|
||||
CameraThread.this.cameraDevice = camera;
|
||||
createCaptureSession();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnected(CameraDevice camera) {
|
||||
Log.d(TAG, "cameraDeviceCallback onDisconnected");
|
||||
camera.close();
|
||||
CameraThread.this.cameraDevice = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(CameraDevice camera, int error) {
|
||||
Log.d(TAG, "cameraDeviceCallback onError");
|
||||
camera.close();
|
||||
CameraThread.this.cameraDevice = null;
|
||||
}
|
||||
};
|
||||
|
||||
private CameraCaptureSession.StateCallback cameraCaptureSessionCallback = new CameraCaptureSession.StateCallback() {
|
||||
@Override
|
||||
public void onConfigured(CameraCaptureSession session) {
|
||||
cameraCaptureSession = session;
|
||||
updatePreview();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConfigureFailed(CameraCaptureSession session) {
|
||||
// Toast.makeText(activity, "onConfigureFailed", Toast.LENGTH_LONG).show();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
private void updatePreview() {
|
||||
|
||||
requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
|
||||
|
||||
HandlerThread thread = new HandlerThread("CameraPreview");
|
||||
thread.start();
|
||||
Handler backgroundHandler = new Handler(thread.getLooper());
|
||||
|
||||
try {
|
||||
cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, backgroundHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* message loop
|
||||
* prepare Looper and create Handler for this thread
|
||||
*/
|
||||
@Override
|
||||
public void run() {
|
||||
Log.d(TAG, "Camera thread start");
|
||||
Looper.prepare();
|
||||
synchronized (readyFence) {
|
||||
handler = new CameraHandler(this);
|
||||
isRunning = true;
|
||||
readyFence.notify();
|
||||
}
|
||||
Looper.loop();
|
||||
Log.d(TAG, "Camera thread finish");
|
||||
if (cameraRecordListener != null) {
|
||||
cameraRecordListener.onCameraThreadFinish();
|
||||
}
|
||||
synchronized (readyFence) {
|
||||
handler = null;
|
||||
isRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* start camera preview
|
||||
*
|
||||
* @param width
|
||||
* @param height
|
||||
*/
|
||||
@SuppressLint("MissingPermission")
|
||||
final void startPreview(final int width, final int height) {
|
||||
Log.v(TAG, "startPreview:");
|
||||
|
||||
try {
|
||||
|
||||
if (cameraManager == null) return;
|
||||
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
|
||||
// if (characteristics.get(CameraCharacteristics.LENS_FACING) == null || characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) == null) {
|
||||
// continue;
|
||||
//}
|
||||
sensorArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
|
||||
|
||||
flashSupport = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
|
||||
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
|
||||
if (width < 0 || height < 0) {
|
||||
cameraSize = map.getOutputSizes(SurfaceTexture.class)[0];
|
||||
} else {
|
||||
cameraSize = getClosestSupportedSize(Arrays.asList(map.getOutputSizes(SurfaceTexture.class)), width, height);
|
||||
}
|
||||
Log.v(TAG, "cameraSize =" + cameraSize);
|
||||
|
||||
HandlerThread thread = new HandlerThread("OpenCamera");
|
||||
thread.start();
|
||||
Handler backgroundHandler = new Handler(thread.getLooper());
|
||||
|
||||
cameraManager.openCamera(cameraId, cameraDeviceCallback, backgroundHandler);
|
||||
|
||||
return;
|
||||
} catch (CameraAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void createCaptureSession() {
|
||||
surfaceTexture.setDefaultBufferSize(cameraSize.getWidth(), cameraSize.getHeight());
|
||||
Surface surface = new Surface(surfaceTexture);
|
||||
|
||||
try {
|
||||
requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
|
||||
} catch (CameraAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
requestBuilder.addTarget(surface);
|
||||
try {
|
||||
cameraDevice.createCaptureSession(Collections.singletonList(surface), cameraCaptureSessionCallback, null);
|
||||
} catch (CameraAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
listener.onStart(cameraSize, flashSupport);
|
||||
|
||||
}
|
||||
|
||||
private static Size getClosestSupportedSize(List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
|
||||
return Collections.min(supportedSizes, new Comparator<Size>() {
|
||||
|
||||
private int diff(final Size size) {
|
||||
return Math.abs(requestedWidth - size.getWidth()) + Math.abs(requestedHeight - size.getHeight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compare(final Size lhs, final Size rhs) {
|
||||
return diff(lhs) - diff(rhs);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* stop camera preview
|
||||
*/
|
||||
void stopPreview() {
|
||||
Log.v(TAG, "stopPreview:");
|
||||
isFlashTorch = false;
|
||||
if (requestBuilder != null) {
|
||||
requestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
|
||||
try {
|
||||
cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, null);
|
||||
cameraDevice.close();
|
||||
Log.v(TAG, "stopPreview: cameraDevice.close()");
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* change focus
|
||||
*/
|
||||
void changeManualFocusPoint(float eventX, float eventY, int viewWidth, int viewHeight) {
|
||||
|
||||
final int y = (int) ((eventX / (float) viewWidth) * (float) sensorArraySize.height());
|
||||
final int x = (int) ((eventY / (float) viewHeight) * (float) sensorArraySize.width());
|
||||
final int halfTouchWidth = 400;
|
||||
final int halfTouchHeight = 400;
|
||||
MeteringRectangle focusAreaTouch = new MeteringRectangle(Math.max(x - halfTouchWidth, 0),
|
||||
Math.max(y - halfTouchHeight, 0),
|
||||
halfTouchWidth * 2,
|
||||
halfTouchHeight * 2,
|
||||
MeteringRectangle.METERING_WEIGHT_MAX - 1);
|
||||
requestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{focusAreaTouch});
|
||||
try {
|
||||
cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, null);
|
||||
} catch (CameraAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
requestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
|
||||
requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
|
||||
requestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
|
||||
|
||||
//then we ask for a single request (not repeating!)
|
||||
try {
|
||||
cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, null);
|
||||
} catch (CameraAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
// フラッシュ切り替え
|
||||
void switchFlashMode() {
|
||||
if (!flashSupport) return;
|
||||
|
||||
try {
|
||||
if (isFlashTorch) {
|
||||
isFlashTorch = false;
|
||||
requestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
|
||||
} else {
|
||||
isFlashTorch = true;
|
||||
requestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
|
||||
}
|
||||
|
||||
cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, null);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
void changeAutoFocus() {
|
||||
requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
|
||||
requestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
|
||||
//then we ask for a single request (not repeating!)
|
||||
try {
|
||||
cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, null);
|
||||
} catch (CameraAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
interface OnStartPreviewListener {
|
||||
void onStart(Size previewSize, boolean flashSupport);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,360 @@
|
||||
package com.xypower.gpuv.camerarecorder;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.opengl.GLSurfaceView;
|
||||
import android.os.Handler;
|
||||
import android.util.Log;
|
||||
import android.util.Size;
|
||||
import com.xypower.gpuv.camerarecorder.capture.MediaAudioEncoder;
|
||||
import com.xypower.gpuv.camerarecorder.capture.MediaEncoder;
|
||||
import com.xypower.gpuv.camerarecorder.capture.MediaMuxerCaptureWrapper;
|
||||
import com.xypower.gpuv.camerarecorder.capture.MediaVideoEncoder;
|
||||
import com.xypower.gpuv.egl.GlPreviewRenderer;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
|
||||
public class GPUCameraRecorder {
|
||||
private GlPreviewRenderer glPreviewRenderer;
|
||||
|
||||
private final CameraRecordListener cameraRecordListener;
|
||||
private static final String TAG = "GPUCameraRecorder";
|
||||
|
||||
private boolean started = false;
|
||||
private CameraHandler cameraHandler = null;
|
||||
private GLSurfaceView glSurfaceView;
|
||||
|
||||
private boolean flashSupport = false;
|
||||
|
||||
private MediaMuxerCaptureWrapper muxer;
|
||||
private final int fileWidth;
|
||||
private final int fileHeight;
|
||||
|
||||
private final int cameraWidth;
|
||||
private final int cameraHeight;
|
||||
private final String cameraId;
|
||||
private final boolean flipHorizontal;
|
||||
private final boolean flipVertical;
|
||||
private final boolean mute;
|
||||
private final CameraManager cameraManager;
|
||||
private final boolean isLandscapeDevice;
|
||||
private final int degrees;
|
||||
private final boolean recordNoFilter;
|
||||
|
||||
GPUCameraRecorder(
|
||||
CameraRecordListener cameraRecordListener,
|
||||
final GLSurfaceView glSurfaceView,
|
||||
final int fileWidth,
|
||||
final int fileHeight,
|
||||
final int cameraWidth,
|
||||
final int cameraHeight,
|
||||
final String cameraId,
|
||||
final boolean flipHorizontal,
|
||||
final boolean flipVertical,
|
||||
final boolean mute,
|
||||
final CameraManager cameraManager,
|
||||
final boolean isLandscapeDevice,
|
||||
final int degrees,
|
||||
final boolean recordNoFilter
|
||||
) {
|
||||
|
||||
|
||||
this.cameraRecordListener = cameraRecordListener;
|
||||
|
||||
glSurfaceView.setDebugFlags(GLSurfaceView.DEBUG_CHECK_GL_ERROR);
|
||||
this.glSurfaceView = glSurfaceView;
|
||||
|
||||
this.fileWidth = fileWidth;
|
||||
this.fileHeight = fileHeight;
|
||||
this.cameraWidth = cameraWidth;
|
||||
this.cameraHeight = cameraHeight;
|
||||
this.cameraId = cameraId;
|
||||
this.flipHorizontal = flipHorizontal;
|
||||
this.flipVertical = flipVertical;
|
||||
this.mute = mute;
|
||||
this.cameraManager = cameraManager;
|
||||
this.isLandscapeDevice = isLandscapeDevice;
|
||||
this.degrees = degrees;
|
||||
this.recordNoFilter = recordNoFilter;
|
||||
|
||||
// create preview Renderer
|
||||
if (null == glPreviewRenderer) {
|
||||
glPreviewRenderer = new GlPreviewRenderer(glSurfaceView);
|
||||
}
|
||||
|
||||
glPreviewRenderer.setSurfaceCreateListener(new GlPreviewRenderer.SurfaceCreateListener() {
|
||||
@Override
|
||||
public void onCreated(SurfaceTexture surfaceTexture) {
|
||||
startPreview(surfaceTexture);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
private synchronized void startPreview(SurfaceTexture surfaceTexture) {
|
||||
if (cameraHandler == null) {
|
||||
final CameraThread thread = new CameraThread(cameraRecordListener, new CameraThread.OnStartPreviewListener() {
|
||||
@Override
|
||||
public void onStart(Size previewSize, boolean flash) {
|
||||
|
||||
Log.d(TAG, "previewSize : width " + previewSize.getWidth() + " height = " + previewSize.getHeight());
|
||||
if (glPreviewRenderer != null) {
|
||||
glPreviewRenderer.setCameraResolution(new Size(previewSize.getWidth(), previewSize.getHeight()));
|
||||
}
|
||||
|
||||
flashSupport = flash;
|
||||
if (cameraRecordListener != null) {
|
||||
cameraRecordListener.onGetFlashSupport(flashSupport);
|
||||
}
|
||||
|
||||
final float previewWidth = previewSize.getWidth();
|
||||
final float previewHeight = previewSize.getHeight();
|
||||
|
||||
glSurfaceView.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (glPreviewRenderer != null) {
|
||||
glPreviewRenderer.setAngle(degrees);
|
||||
glPreviewRenderer.onStartPreview(previewWidth, previewHeight, isLandscapeDevice);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (glPreviewRenderer != null) {
|
||||
final SurfaceTexture st = glPreviewRenderer.getPreviewTexture().getSurfaceTexture();
|
||||
st.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
|
||||
}
|
||||
}
|
||||
}, surfaceTexture, cameraManager, cameraId);
|
||||
thread.start();
|
||||
cameraHandler = thread.getHandler();
|
||||
}
|
||||
cameraHandler.startPreview(cameraWidth, cameraHeight);
|
||||
}
|
||||
|
||||
|
||||
public void setFilter(final GlFilter filter) {
|
||||
if (filter == null) return;
|
||||
glPreviewRenderer.setGlFilter(filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* change focus
|
||||
*/
|
||||
public void changeManualFocusPoint(float eventX, float eventY, int viewWidth, int viewHeight) {
|
||||
if (cameraHandler != null) {
|
||||
cameraHandler.changeManualFocusPoint(eventX, eventY, viewWidth, viewHeight);
|
||||
}
|
||||
}
|
||||
|
||||
public void changeAutoFocus() {
|
||||
if (cameraHandler != null) {
|
||||
cameraHandler.changeAutoFocus();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void switchFlashMode() {
|
||||
if (!flashSupport) return;
|
||||
if (cameraHandler != null) {
|
||||
cameraHandler.switchFlashMode();
|
||||
}
|
||||
}
|
||||
|
||||
public void setGestureScale(float scale) {
|
||||
if (glPreviewRenderer != null) {
|
||||
glPreviewRenderer.setGestureScale(scale);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isFlashSupport() {
|
||||
return flashSupport;
|
||||
}
|
||||
|
||||
|
||||
private void destroyPreview() {
|
||||
if (glPreviewRenderer != null) {
|
||||
glPreviewRenderer.release();
|
||||
glPreviewRenderer = null;
|
||||
}
|
||||
if (cameraHandler != null) {
|
||||
// just request stop prviewing
|
||||
cameraHandler.stopPreview(false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* callback methods from encoder
|
||||
*/
|
||||
private final MediaEncoder.MediaEncoderListener mediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
|
||||
private boolean videoStopped;
|
||||
private boolean audioStopped;
|
||||
private boolean videoExitReady;
|
||||
private boolean audioExitReady;
|
||||
|
||||
@Override
|
||||
public void onPrepared(final MediaEncoder encoder) {
|
||||
Log.v("TAG", "onPrepared:encoder=" + encoder);
|
||||
if (encoder instanceof MediaVideoEncoder) {
|
||||
videoStopped = false;
|
||||
if (glPreviewRenderer != null) {
|
||||
glPreviewRenderer.setVideoEncoder((MediaVideoEncoder) encoder);
|
||||
}
|
||||
}
|
||||
|
||||
if (encoder instanceof MediaAudioEncoder) {
|
||||
audioStopped = false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStopped(final MediaEncoder encoder) {
|
||||
Log.v("TAG", "onStopped:encoder=" + encoder);
|
||||
if (encoder instanceof MediaVideoEncoder) {
|
||||
videoStopped = true;
|
||||
if (glPreviewRenderer != null) {
|
||||
glPreviewRenderer.setVideoEncoder(null);
|
||||
}
|
||||
}
|
||||
if (encoder instanceof MediaAudioEncoder) {
|
||||
audioStopped = true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onExit(final MediaEncoder encoder) {
|
||||
if (encoder instanceof MediaVideoEncoder && videoStopped) {
|
||||
videoExitReady = true;
|
||||
}
|
||||
if (encoder instanceof MediaAudioEncoder && audioStopped) {
|
||||
audioExitReady = true;
|
||||
}
|
||||
if (videoExitReady && (audioExitReady || mute)) {
|
||||
cameraRecordListener.onVideoFileReady();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Start data processing
|
||||
*/
|
||||
public void start(final String filePath) {
|
||||
if (started) return;
|
||||
|
||||
|
||||
new Handler().post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
muxer = new MediaMuxerCaptureWrapper(filePath);
|
||||
|
||||
// for video capturing
|
||||
// ここにcamera width , heightもわたす。
|
||||
// 差分をいろいろと吸収する。
|
||||
new MediaVideoEncoder(
|
||||
muxer,
|
||||
mediaEncoderListener,
|
||||
fileWidth,
|
||||
fileHeight,
|
||||
flipHorizontal,
|
||||
flipVertical,
|
||||
glSurfaceView.getMeasuredWidth(),
|
||||
glSurfaceView.getMeasuredHeight(),
|
||||
recordNoFilter,
|
||||
glPreviewRenderer.getFilter()
|
||||
);
|
||||
if (!mute) {
|
||||
// for audio capturing
|
||||
new MediaAudioEncoder(muxer, mediaEncoderListener);
|
||||
}
|
||||
muxer.prepare();
|
||||
muxer.startRecording();
|
||||
|
||||
if (cameraRecordListener != null) {
|
||||
cameraRecordListener.onRecordStart();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
notifyOnError(e);
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
started = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops capturing.
|
||||
*/
|
||||
public void stop() {
|
||||
if (!started) return;
|
||||
try {
|
||||
|
||||
new Handler().post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
// stop recording and release camera
|
||||
try {
|
||||
// stop the recording
|
||||
if (muxer != null) {
|
||||
muxer.stopRecording();
|
||||
muxer = null;
|
||||
// you should not wait here
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// RuntimeException is thrown when stop() is called immediately after start().
|
||||
// In this case the output file is not properly constructed ans should be deleted.
|
||||
Log.d("TAG", "RuntimeException: stop() is called immediately after start()");
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
notifyOnError(e);
|
||||
}
|
||||
|
||||
notifyOnDone();
|
||||
}
|
||||
});
|
||||
|
||||
} catch (Exception e) {
|
||||
notifyOnError(e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
|
||||
started = false;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
// destroy everithing
|
||||
try {
|
||||
// stop the recording
|
||||
if (muxer != null) {
|
||||
muxer.stopRecording();
|
||||
muxer = null;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// RuntimeException is thrown when stop() is called immediately after start().
|
||||
// In this case the output file is not properly constructed ans should be deleted.
|
||||
Log.d("TAG", "RuntimeException: stop() is called immediately after start()");
|
||||
}
|
||||
|
||||
destroyPreview();
|
||||
}
|
||||
|
||||
|
||||
public boolean isStarted() {
|
||||
return started;
|
||||
}
|
||||
|
||||
private void notifyOnDone() {
|
||||
if (cameraRecordListener == null) return;
|
||||
cameraRecordListener.onRecordComplete();
|
||||
}
|
||||
|
||||
private void notifyOnError(Exception e) {
|
||||
if (cameraRecordListener == null) return;
|
||||
cameraRecordListener.onError(e);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,124 @@
|
||||
package com.xypower.gpuv.camerarecorder;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.content.Context;
|
||||
import android.content.res.Configuration;
|
||||
import android.content.res.Resources;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.opengl.GLSurfaceView;
|
||||
import android.util.Log;
|
||||
import android.view.Surface;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
|
||||
public class GPUCameraRecorderBuilder {
|
||||
|
||||
|
||||
private GLSurfaceView glSurfaceView;
|
||||
|
||||
private String cameraId = "0";
|
||||
private Resources resources;
|
||||
private Activity activity;
|
||||
private CameraRecordListener cameraRecordListener;
|
||||
private int fileWidth = 720;
|
||||
private int fileHeight = 1280;
|
||||
private boolean flipVertical = false;
|
||||
private boolean flipHorizontal = false;
|
||||
private boolean mute = false;
|
||||
private boolean recordNoFilter = false;
|
||||
private int cameraWidth = 1280;
|
||||
private int cameraHeight = 720;
|
||||
private GlFilter glFilter;
|
||||
|
||||
public GPUCameraRecorderBuilder(Activity activity, GLSurfaceView glSurfaceView) {
|
||||
this.activity = activity;
|
||||
this.glSurfaceView = glSurfaceView;
|
||||
this.resources = activity.getResources();
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder cameraRecordListener(CameraRecordListener cameraRecordListener) {
|
||||
this.cameraRecordListener = cameraRecordListener;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder filter(GlFilter glFilter) {
|
||||
this.glFilter = glFilter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder videoSize(int fileWidth, int fileHeight) {
|
||||
this.fileWidth = fileWidth;
|
||||
this.fileHeight = fileHeight;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder cameraSize(int cameraWidth, int cameraHeight) {
|
||||
this.cameraWidth = cameraWidth;
|
||||
this.cameraHeight = cameraHeight;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder cameraId(String camera) {
|
||||
this.cameraId = cameraId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder flipHorizontal(boolean flip) {
|
||||
this.flipHorizontal = flip;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder flipVertical(boolean flip) {
|
||||
this.flipVertical = flip;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder mute(boolean mute) {
|
||||
this.mute = mute;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorderBuilder recordNoFilter(boolean recordNoFilter) {
|
||||
this.recordNoFilter = recordNoFilter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUCameraRecorder build() {
|
||||
if (this.glSurfaceView == null) {
|
||||
throw new IllegalArgumentException("glSurfaceView and windowManager, multiVideoEffects is NonNull !!");
|
||||
}
|
||||
|
||||
CameraManager cameraManager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
|
||||
boolean isLandscapeDevice = resources.getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE;
|
||||
|
||||
int degrees = 0;
|
||||
if (isLandscapeDevice) {
|
||||
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
|
||||
Log.d("GPUCameraRecorder", "Surface.ROTATION_90 = " + Surface.ROTATION_90 + " rotation = " + rotation);
|
||||
degrees = 90 * (rotation - 2);
|
||||
}
|
||||
|
||||
GPUCameraRecorder GPUCameraRecorder = new GPUCameraRecorder(
|
||||
cameraRecordListener,
|
||||
glSurfaceView,
|
||||
fileWidth,
|
||||
fileHeight,
|
||||
cameraWidth,
|
||||
cameraHeight,
|
||||
cameraId,
|
||||
flipHorizontal,
|
||||
flipVertical,
|
||||
mute,
|
||||
cameraManager,
|
||||
isLandscapeDevice,
|
||||
degrees,
|
||||
recordNoFilter
|
||||
);
|
||||
|
||||
GPUCameraRecorder.setFilter(glFilter);
|
||||
activity = null;
|
||||
resources = null;
|
||||
return GPUCameraRecorder;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
package com.xypower.gpuv.camerarecorder;
|
||||
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
|
||||
|
||||
|
||||
public enum LensFacing {
|
||||
FRONT(CameraCharacteristics.LENS_FACING_FRONT),
|
||||
BACK(CameraCharacteristics.LENS_FACING_BACK);
|
||||
|
||||
private int facing;
|
||||
|
||||
LensFacing(int facing) {
|
||||
this.facing = facing;
|
||||
}
|
||||
|
||||
public int getFacing() {
|
||||
return facing;
|
||||
}
|
||||
}
|
@ -0,0 +1,64 @@
|
||||
package com.xypower.gpuv.camerarecorder.capture;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.util.Log;
|
||||
import android.view.Surface;
|
||||
import android.view.SurfaceHolder;
|
||||
import android.view.SurfaceView;
|
||||
|
||||
|
||||
|
||||
public class EglSurface {
|
||||
private static final boolean DEBUG = false;
|
||||
private static final String TAG = "EglWrapper";
|
||||
|
||||
|
||||
private final EglWrapper egl;
|
||||
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
private final int width, height;
|
||||
|
||||
EglSurface(final EglWrapper egl, final Object surface) {
|
||||
if (DEBUG) Log.v(TAG, "EglSurface:");
|
||||
if (!(surface instanceof SurfaceView)
|
||||
&& !(surface instanceof Surface)
|
||||
&& !(surface instanceof SurfaceHolder)
|
||||
&& !(surface instanceof SurfaceTexture))
|
||||
throw new IllegalArgumentException("unsupported surface");
|
||||
this.egl = egl;
|
||||
eglSurface = this.egl.createWindowSurface(surface);
|
||||
width = this.egl.querySurface(eglSurface, EGL14.EGL_WIDTH);
|
||||
height = this.egl.querySurface(eglSurface, EGL14.EGL_HEIGHT);
|
||||
if (DEBUG) Log.v(TAG, String.format("EglSurface:size(%d,%d)", width, height));
|
||||
}
|
||||
|
||||
public void makeCurrent() {
|
||||
egl.makeCurrent(eglSurface);
|
||||
}
|
||||
|
||||
public void swap() {
|
||||
egl.swap(eglSurface);
|
||||
}
|
||||
|
||||
public EGLContext getContext() {
|
||||
return egl.getContext();
|
||||
}
|
||||
|
||||
public void release() {
|
||||
if (DEBUG) Log.v(TAG, "EglSurface:release:");
|
||||
egl.makeDefault();
|
||||
egl.destroyWindowSurface(eglSurface);
|
||||
eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
}
|
||||
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,233 @@
|
||||
package com.xypower.gpuv.camerarecorder.capture;
|
||||
|
||||
import android.opengl.*;
|
||||
import android.util.Log;
|
||||
|
||||
|
||||
public class EglWrapper {
|
||||
private static final boolean DEBUG = false;
|
||||
private static final String TAG = "EglWrapper";
|
||||
|
||||
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
|
||||
private EGLConfig eglConfig = null;
|
||||
private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
private EGLContext defaultContext = EGL14.EGL_NO_CONTEXT;
|
||||
|
||||
|
||||
EglWrapper(final EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
|
||||
if (DEBUG) Log.v(TAG, "EglWrapper:");
|
||||
init(shared_context, with_depth_buffer, isRecordable);
|
||||
}
|
||||
|
||||
public void release() {
|
||||
if (DEBUG) Log.v(TAG, "release:");
|
||||
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
|
||||
destroyContext();
|
||||
EGL14.eglTerminate(eglDisplay);
|
||||
EGL14.eglReleaseThread();
|
||||
}
|
||||
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
}
|
||||
|
||||
EglSurface createFromSurface(final Object surface) {
|
||||
if (DEBUG) Log.v(TAG, "createFromSurface:");
|
||||
final EglSurface eglSurface = new EglSurface(this, surface);
|
||||
eglSurface.makeCurrent();
|
||||
return eglSurface;
|
||||
}
|
||||
|
||||
|
||||
public EGLContext getContext() {
|
||||
return eglContext;
|
||||
}
|
||||
|
||||
int querySurface(final EGLSurface eglSurface, final int what) {
|
||||
final int[] value = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, what, value, 0);
|
||||
return value[0];
|
||||
}
|
||||
|
||||
private void init(EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
|
||||
if (DEBUG) Log.v(TAG, "init:");
|
||||
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("EGL already set up");
|
||||
}
|
||||
|
||||
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("eglGetDisplay failed");
|
||||
}
|
||||
|
||||
final int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
|
||||
eglDisplay = null;
|
||||
throw new RuntimeException("eglInitialize failed");
|
||||
}
|
||||
|
||||
shared_context = shared_context != null ? shared_context : EGL14.EGL_NO_CONTEXT;
|
||||
if (eglContext == EGL14.EGL_NO_CONTEXT) {
|
||||
eglConfig = getConfig(with_depth_buffer, isRecordable);
|
||||
if (eglConfig == null) {
|
||||
throw new RuntimeException("chooseConfig failed");
|
||||
}
|
||||
// create EGL rendering context
|
||||
eglContext = createContext(shared_context);
|
||||
}
|
||||
// confirm whether the EGL rendering context is successfully created
|
||||
final int[] values = new int[1];
|
||||
EGL14.eglQueryContext(eglDisplay, eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
|
||||
if (DEBUG) Log.d(TAG, "EGLContext created, client version " + values[0]);
|
||||
makeDefault(); // makeCurrent(EGL14.EGL_NO_SURFACE);
|
||||
}
|
||||
|
||||
/**
|
||||
* change context to prepareDraw this window surface
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean makeCurrent(final EGLSurface surface) {
|
||||
// if (DEBUG) Log.v(TAG, "makeCurrent:");
|
||||
if (eglDisplay == null) {
|
||||
if (DEBUG) Log.d(TAG, "makeCurrent:eglDisplay not initialized");
|
||||
}
|
||||
if (surface == null || surface == EGL14.EGL_NO_SURFACE) {
|
||||
final int error = EGL14.eglGetError();
|
||||
if (error == EGL14.EGL_BAD_NATIVE_WINDOW) {
|
||||
Log.e(TAG, "makeCurrent:returned EGL_BAD_NATIVE_WINDOW.");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// attach EGL renderring context to specific EGL window surface
|
||||
if (!EGL14.eglMakeCurrent(eglDisplay, surface, surface, eglContext)) {
|
||||
Log.w(TAG, "eglMakeCurrent:" + EGL14.eglGetError());
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void makeDefault() {
|
||||
if (DEBUG) Log.v(TAG, "makeDefault:");
|
||||
if (!EGL14.eglMakeCurrent(eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
|
||||
Log.w("TAG", "makeDefault" + EGL14.eglGetError());
|
||||
}
|
||||
}
|
||||
|
||||
int swap(final EGLSurface surface) {
|
||||
// if (DEBUG) Log.v(TAG, "swap:");
|
||||
if (!EGL14.eglSwapBuffers(eglDisplay, surface)) {
|
||||
final int err = EGL14.eglGetError();
|
||||
if (DEBUG) Log.w(TAG, "swap:err=" + err);
|
||||
return err;
|
||||
}
|
||||
return EGL14.EGL_SUCCESS;
|
||||
}
|
||||
|
||||
private EGLContext createContext(final EGLContext shared_context) {
|
||||
// if (DEBUG) Log.v(TAG, "createContext:");
|
||||
|
||||
final int[] attrib_list = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
final EGLContext context = EGL14.eglCreateContext(eglDisplay, eglConfig, shared_context, attrib_list, 0);
|
||||
checkEglError("eglCreateContext");
|
||||
return context;
|
||||
}
|
||||
|
||||
private void destroyContext() {
|
||||
if (DEBUG) Log.v(TAG, "destroyContext:");
|
||||
|
||||
if (!EGL14.eglDestroyContext(eglDisplay, eglContext)) {
|
||||
Log.e("destroyContext", "display:" + eglDisplay + " context: " + eglContext);
|
||||
Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
|
||||
}
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
if (defaultContext != EGL14.EGL_NO_CONTEXT) {
|
||||
if (!EGL14.eglDestroyContext(eglDisplay, defaultContext)) {
|
||||
Log.e("destroyContext", "display:" + eglDisplay + " context: " + defaultContext);
|
||||
Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
|
||||
}
|
||||
defaultContext = EGL14.EGL_NO_CONTEXT;
|
||||
}
|
||||
}
|
||||
|
||||
EGLSurface createWindowSurface(final Object surface) {
|
||||
if (DEBUG) Log.v(TAG, "createWindowSurface:nativeWindow=" + surface);
|
||||
|
||||
final int[] surfaceAttribs = {
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
EGLSurface result = null;
|
||||
try {
|
||||
result = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
|
||||
} catch (final IllegalArgumentException e) {
|
||||
Log.e(TAG, "eglCreateWindowSurface", e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
void destroyWindowSurface(EGLSurface surface) {
|
||||
if (DEBUG) Log.v(TAG, "destroySurface:");
|
||||
|
||||
if (surface != EGL14.EGL_NO_SURFACE) {
|
||||
EGL14.eglMakeCurrent(eglDisplay,
|
||||
EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
|
||||
EGL14.eglDestroySurface(eglDisplay, surface);
|
||||
}
|
||||
surface = EGL14.EGL_NO_SURFACE;
|
||||
if (DEBUG) Log.v(TAG, "destroySurface:finished");
|
||||
}
|
||||
|
||||
private void checkEglError(final String msg) {
|
||||
int error;
|
||||
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
|
||||
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
private EGLConfig getConfig(final boolean with_depth_buffer, final boolean isRecordable) {
|
||||
final int[] attribList = {
|
||||
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
|
||||
EGL14.EGL_RED_SIZE, 8,
|
||||
EGL14.EGL_GREEN_SIZE, 8,
|
||||
EGL14.EGL_BLUE_SIZE, 8,
|
||||
EGL14.EGL_ALPHA_SIZE, 8,
|
||||
EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL14.EGL_STENCIL_SIZE, 8,
|
||||
EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL_RECORDABLE_ANDROID, 1, // this flag need to recording of MediaCodec
|
||||
EGL14.EGL_NONE, EGL14.EGL_NONE, // with_depth_buffer ? EGL14.EGL_DEPTH_SIZE : EGL14.EGL_NONE,
|
||||
// with_depth_buffer ? 16 : 0,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
int offset = 10;
|
||||
if (false) {
|
||||
attribList[offset++] = EGL14.EGL_STENCIL_SIZE;
|
||||
attribList[offset++] = 8;
|
||||
}
|
||||
if (with_depth_buffer) {
|
||||
attribList[offset++] = EGL14.EGL_DEPTH_SIZE;
|
||||
attribList[offset++] = 16;
|
||||
}
|
||||
if (isRecordable) {// MediaCodecの入力用Surfaceの場合
|
||||
attribList[offset++] = EGL_RECORDABLE_ANDROID;
|
||||
attribList[offset++] = 1;
|
||||
}
|
||||
for (int i = attribList.length - 1; i >= offset; i--) {
|
||||
attribList[i] = EGL14.EGL_NONE;
|
||||
}
|
||||
final EGLConfig[] configs = new EGLConfig[1];
|
||||
final int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
|
||||
// XXX it will be better to fallback to RGB565
|
||||
Log.w(TAG, "unable to find RGBA8888 / " + " EGLConfig");
|
||||
return null;
|
||||
}
|
||||
return configs[0];
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,306 @@
|
||||
package com.xypower.gpuv.camerarecorder.capture;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
import android.text.TextUtils;
|
||||
import android.util.Log;
|
||||
import android.view.Surface;
|
||||
import android.view.SurfaceHolder;
|
||||
import com.xypower.gpuv.egl.GlFramebufferObject;
|
||||
import com.xypower.gpuv.egl.GlPreview;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import static android.opengl.GLES20.*;
|
||||
|
||||
|
||||
public class EncodeRenderHandler implements Runnable {
|
||||
private static final String TAG = "GPUCameraRecorder";
|
||||
|
||||
private final Object sync = new Object();
|
||||
private EGLContext sharedContext;
|
||||
private boolean isRecordable;
|
||||
private Object surface;
|
||||
private int texId = -1;
|
||||
|
||||
private boolean requestSetEglContext;
|
||||
private boolean requestRelease;
|
||||
private int requestDraw;
|
||||
|
||||
private float[] MVPMatrix = new float[16];
|
||||
private float[] STMatrix = new float[16];
|
||||
private float aspectRatio = 1f;
|
||||
|
||||
private final float XMatrixScale;
|
||||
private final float YMatrixScale;
|
||||
private final float fileWidth;
|
||||
private final float fileHeight;
|
||||
private final boolean recordNoFilter;
|
||||
|
||||
private GlFramebufferObject framebufferObject;
|
||||
private GlFramebufferObject filterFramebufferObject;
|
||||
private GlFilter normalFilter;
|
||||
private GlFilter glFilter;
|
||||
|
||||
private EglWrapper egl;
|
||||
private EglSurface inputSurface;
|
||||
private GlPreview previewShader;
|
||||
|
||||
static EncodeRenderHandler createHandler(final String name,
|
||||
final boolean flipVertical,
|
||||
final boolean flipHorizontal,
|
||||
final float viewAspect,
|
||||
final float fileWidth,
|
||||
final float fileHeight,
|
||||
final boolean recordNoFilter,
|
||||
final GlFilter filter
|
||||
) {
|
||||
Log.v(TAG, "createHandler:");
|
||||
Log.v(TAG, "fileAspect:" + (fileHeight / fileWidth) + " viewAcpect: " + viewAspect);
|
||||
|
||||
final EncodeRenderHandler handler = new EncodeRenderHandler(
|
||||
flipVertical,
|
||||
flipHorizontal,
|
||||
fileHeight > fileWidth ? fileHeight / fileWidth : fileWidth / fileHeight,
|
||||
viewAspect,
|
||||
fileWidth,
|
||||
fileHeight,
|
||||
recordNoFilter,
|
||||
filter
|
||||
);
|
||||
synchronized (handler.sync) {
|
||||
new Thread(handler, !TextUtils.isEmpty(name) ? name : TAG).start();
|
||||
try {
|
||||
handler.sync.wait();
|
||||
} catch (final InterruptedException e) {
|
||||
}
|
||||
}
|
||||
|
||||
return handler;
|
||||
}
|
||||
|
||||
private EncodeRenderHandler(final boolean flipVertical,
|
||||
final boolean flipHorizontal,
|
||||
final float fileAspect,
|
||||
final float viewAspect,
|
||||
final float fileWidth,
|
||||
final float fileHeight,
|
||||
final boolean recordNoFilter,
|
||||
final GlFilter filter
|
||||
) {
|
||||
|
||||
|
||||
this.fileWidth = fileWidth;
|
||||
this.fileHeight = fileHeight;
|
||||
this.recordNoFilter = recordNoFilter;
|
||||
this.glFilter = filter;
|
||||
|
||||
if (fileAspect == viewAspect) {
|
||||
XMatrixScale = (flipHorizontal ? -1 : 1);
|
||||
YMatrixScale = flipVertical ? -1 : 1;
|
||||
} else {
|
||||
if (fileAspect < viewAspect) {
|
||||
XMatrixScale = (flipHorizontal ? -1 : 1);
|
||||
YMatrixScale = (flipVertical ? -1 : 1) * (viewAspect / fileAspect);
|
||||
Log.v(TAG, "cameraAspect: " + viewAspect + " YMatrixScale :" + YMatrixScale);
|
||||
} else {
|
||||
XMatrixScale = (flipHorizontal ? -1 : 1) * (fileAspect / viewAspect);
|
||||
YMatrixScale = (flipVertical ? -1 : 1);
|
||||
Log.v(TAG, "cameraAspect: " + viewAspect + " YMatrixScale :" + YMatrixScale + " XMatrixScale :" + XMatrixScale);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
final void setEglContext(final EGLContext shared_context, final int tex_id, final Object surface) {
|
||||
Log.i(TAG, "setEglContext:");
|
||||
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture) && !(surface instanceof SurfaceHolder)) {
|
||||
throw new RuntimeException("unsupported window type:" + surface);
|
||||
}
|
||||
synchronized (sync) {
|
||||
if (requestRelease) return;
|
||||
sharedContext = shared_context;
|
||||
texId = tex_id;
|
||||
this.surface = surface;
|
||||
this.isRecordable = true;
|
||||
requestSetEglContext = true;
|
||||
sync.notifyAll();
|
||||
try {
|
||||
sync.wait();
|
||||
} catch (final InterruptedException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
final void prepareDraw() {
|
||||
synchronized (sync) {
|
||||
if (requestRelease) return;
|
||||
requestDraw++;
|
||||
sync.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public final void draw(final int tex_id, final float[] texMatrix, final float[] mvpMatrix, final float aspectRatio) {
|
||||
synchronized (sync) {
|
||||
if (requestRelease) return;
|
||||
texId = tex_id;
|
||||
System.arraycopy(texMatrix, 0, STMatrix, 0, 16);
|
||||
System.arraycopy(mvpMatrix, 0, MVPMatrix, 0, 16);
|
||||
// square対策
|
||||
Matrix.scaleM(MVPMatrix,
|
||||
0,
|
||||
XMatrixScale, // ここをマイナスの値にするとflipする
|
||||
YMatrixScale, // 見た目との歪みもここで調整すればいけると思う。
|
||||
1);
|
||||
this.aspectRatio = aspectRatio;
|
||||
requestDraw++;
|
||||
sync.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public final void release() {
|
||||
Log.i(TAG, "release:");
|
||||
synchronized (sync) {
|
||||
if (requestRelease) return;
|
||||
requestRelease = true;
|
||||
sync.notifyAll();
|
||||
try {
|
||||
sync.wait();
|
||||
} catch (final InterruptedException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//********************************************************************************
|
||||
//********************************************************************************
|
||||
|
||||
|
||||
@Override
|
||||
public final void run() {
|
||||
Log.i(TAG, "EncodeRenderHandler thread started:");
|
||||
synchronized (sync) {
|
||||
requestSetEglContext = requestRelease = false;
|
||||
requestDraw = 0;
|
||||
sync.notifyAll();
|
||||
}
|
||||
boolean localRequestDraw;
|
||||
for (; ; ) {
|
||||
synchronized (sync) {
|
||||
if (requestRelease) break;
|
||||
if (requestSetEglContext) {
|
||||
requestSetEglContext = false;
|
||||
internalPrepare();
|
||||
}
|
||||
localRequestDraw = requestDraw > 0;
|
||||
if (localRequestDraw) {
|
||||
requestDraw--;
|
||||
|
||||
}
|
||||
}
|
||||
if (localRequestDraw) {
|
||||
if ((egl != null) && texId >= 0) {
|
||||
inputSurface.makeCurrent();
|
||||
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
|
||||
if (isRecordFilter()) {
|
||||
framebufferObject.enable();
|
||||
filterFramebufferObject.enable();
|
||||
}
|
||||
|
||||
previewShader.draw(texId, MVPMatrix, STMatrix, aspectRatio);
|
||||
|
||||
if (isRecordFilter()) {
|
||||
framebufferObject.enable();
|
||||
//GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
glFilter.draw(filterFramebufferObject.getTexName(), framebufferObject);
|
||||
|
||||
GLES20.glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
GLES20.glViewport(0, 0, framebufferObject.getWidth(), framebufferObject.getHeight());
|
||||
|
||||
GLES20.glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
normalFilter.draw(framebufferObject.getTexName(), null);
|
||||
}
|
||||
|
||||
|
||||
inputSurface.swap();
|
||||
}
|
||||
} else {
|
||||
synchronized (sync) {
|
||||
try {
|
||||
sync.wait();
|
||||
} catch (final InterruptedException e) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
synchronized (sync) {
|
||||
requestRelease = true;
|
||||
internalRelease();
|
||||
sync.notifyAll();
|
||||
}
|
||||
Log.i(TAG, "EncodeRenderHandler thread finished:");
|
||||
}
|
||||
|
||||
private void internalPrepare() {
|
||||
Log.i(TAG, "internalPrepare:");
|
||||
internalRelease();
|
||||
egl = new EglWrapper(sharedContext, false, isRecordable);
|
||||
|
||||
inputSurface = egl.createFromSurface(surface);
|
||||
|
||||
inputSurface.makeCurrent();
|
||||
|
||||
previewShader = new GlPreview(GlPreview.GL_TEXTURE_EXTERNAL_OES);
|
||||
previewShader.setup();
|
||||
|
||||
if (isRecordFilter()) {
|
||||
framebufferObject = new GlFramebufferObject();
|
||||
framebufferObject.setup((int) fileWidth, (int) fileHeight);
|
||||
|
||||
filterFramebufferObject = new GlFramebufferObject();
|
||||
filterFramebufferObject.setup((int) fileWidth, (int) fileHeight);
|
||||
|
||||
normalFilter = new GlFilter();
|
||||
normalFilter.setup();
|
||||
}
|
||||
|
||||
surface = null;
|
||||
sync.notifyAll();
|
||||
}
|
||||
|
||||
private void internalRelease() {
|
||||
Log.i(TAG, "internalRelease:");
|
||||
if (inputSurface != null) {
|
||||
inputSurface.release();
|
||||
inputSurface = null;
|
||||
}
|
||||
if (egl != null) {
|
||||
egl.release();
|
||||
egl = null;
|
||||
}
|
||||
if (normalFilter != null) {
|
||||
normalFilter.release();
|
||||
normalFilter = null;
|
||||
}
|
||||
if (filterFramebufferObject != null) {
|
||||
filterFramebufferObject.release();
|
||||
filterFramebufferObject = null;
|
||||
}
|
||||
if (framebufferObject != null) {
|
||||
framebufferObject.release();
|
||||
framebufferObject = null;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isRecordFilter() {
|
||||
return (glFilter != null && !recordNoFilter);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,181 @@
|
||||
package com.xypower.gpuv.camerarecorder.capture;
|
||||
|
||||
import android.media.*;
|
||||
import android.util.Log;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
|
||||
public class MediaAudioEncoder extends MediaEncoder {
|
||||
private static final String TAG = "MediaAudioEncoder";
|
||||
|
||||
private static final String MIME_TYPE = "audio/mp4a-latm";
|
||||
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
|
||||
private static final int BIT_RATE = 64000;
|
||||
private static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
|
||||
private static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
|
||||
|
||||
private AudioThread audioThread = null;
|
||||
|
||||
public MediaAudioEncoder(final MediaMuxerCaptureWrapper muxer, final MediaEncoderListener listener) {
|
||||
super(muxer, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void prepare() throws IOException {
|
||||
Log.v(TAG, "prepare:");
|
||||
trackIndex = -1;
|
||||
muxerStarted = isEOS = false;
|
||||
// prepare MediaCodec for AAC encoding of audio data from inernal mic.
|
||||
final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
|
||||
if (audioCodecInfo == null) {
|
||||
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
|
||||
return;
|
||||
}
|
||||
Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
|
||||
|
||||
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
|
||||
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
|
||||
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
|
||||
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
|
||||
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
|
||||
Log.i(TAG, "format: " + audioFormat);
|
||||
mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
|
||||
mediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
mediaCodec.start();
|
||||
Log.i(TAG, "prepare finishing");
|
||||
if (listener != null) {
|
||||
try {
|
||||
listener.onPrepared(this);
|
||||
} catch (final Exception e) {
|
||||
Log.e(TAG, "prepare:", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void startRecording() {
|
||||
super.startRecording();
|
||||
// create and execute audio capturing thread using internal mic
|
||||
if (audioThread == null) {
|
||||
audioThread = new AudioThread();
|
||||
audioThread.start();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void release() {
|
||||
audioThread = null;
|
||||
super.release();
|
||||
}
|
||||
|
||||
private static final int[] AUDIO_SOURCES = new int[]{
|
||||
MediaRecorder.AudioSource.MIC,
|
||||
MediaRecorder.AudioSource.DEFAULT,
|
||||
MediaRecorder.AudioSource.CAMCORDER,
|
||||
MediaRecorder.AudioSource.VOICE_COMMUNICATION,
|
||||
MediaRecorder.AudioSource.VOICE_RECOGNITION,
|
||||
};
|
||||
|
||||
/**
|
||||
* Thread to capture audio data from internal mic as uncompressed 16bit PCM data
|
||||
* and write them to the MediaCodec encoder
|
||||
*/
|
||||
private class AudioThread extends Thread {
|
||||
@Override
|
||||
public void run() {
|
||||
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
|
||||
try {
|
||||
final int min_buffer_size = AudioRecord.getMinBufferSize(
|
||||
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
|
||||
AudioFormat.ENCODING_PCM_16BIT);
|
||||
int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
|
||||
if (buffer_size < min_buffer_size)
|
||||
buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
|
||||
|
||||
AudioRecord audioRecord = null;
|
||||
for (final int source : AUDIO_SOURCES) {
|
||||
try {
|
||||
audioRecord = new AudioRecord(
|
||||
source, SAMPLE_RATE,
|
||||
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
|
||||
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED)
|
||||
audioRecord = null;
|
||||
} catch (final Exception e) {
|
||||
audioRecord = null;
|
||||
}
|
||||
if (audioRecord != null) break;
|
||||
}
|
||||
if (audioRecord != null) {
|
||||
try {
|
||||
if (isCapturing) {
|
||||
Log.v(TAG, "AudioThread:start audio recording");
|
||||
final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
|
||||
int readBytes;
|
||||
audioRecord.startRecording();
|
||||
try {
|
||||
for (; isCapturing && !requestStop && !isEOS; ) {
|
||||
// read audio data from internal mic
|
||||
buf.clear();
|
||||
readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
|
||||
if (readBytes > 0) {
|
||||
// set audio data to encoder
|
||||
buf.position(readBytes);
|
||||
buf.flip();
|
||||
encode(buf, readBytes, getPTSUs());
|
||||
frameAvailableSoon();
|
||||
}
|
||||
}
|
||||
frameAvailableSoon();
|
||||
} finally {
|
||||
audioRecord.stop();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
audioRecord.release();
|
||||
}
|
||||
} else {
|
||||
Log.e(TAG, "failed to initialize AudioRecord");
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
Log.e(TAG, "AudioThread#run", e);
|
||||
}
|
||||
Log.v(TAG, "AudioThread:finished");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* select the first codec that match a specific MIME type
|
||||
*
|
||||
* @param mimeType
|
||||
* @return
|
||||
*/
|
||||
private static MediaCodecInfo selectAudioCodec(final String mimeType) {
|
||||
Log.v(TAG, "selectAudioCodec:");
|
||||
|
||||
MediaCodecInfo result = null;
|
||||
MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
|
||||
MediaCodecInfo[] codecInfos = list.getCodecInfos();
|
||||
final int numCodecs = codecInfos.length;
|
||||
LOOP:
|
||||
for (int i = 0; i < numCodecs; i++) {
|
||||
final MediaCodecInfo codecInfo = codecInfos[i];
|
||||
if (!codecInfo.isEncoder()) { // skipp decoder
|
||||
continue;
|
||||
}
|
||||
final String[] types = codecInfo.getSupportedTypes();
|
||||
for (int j = 0; j < types.length; j++) {
|
||||
if (types[j].equalsIgnoreCase(mimeType)) {
|
||||
Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
|
||||
if (result == null) {
|
||||
result = codecInfo;
|
||||
break LOOP;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,164 @@
|
||||
package com.xypower.gpuv.camerarecorder.capture;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMuxer;
|
||||
import android.util.Log;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
|
||||
|
||||
public class MediaMuxerCaptureWrapper {
|
||||
private static final String TAG = "MediaMuxerWrapper";
|
||||
|
||||
private final MediaMuxer mediaMuxer;
|
||||
private int encoderCount, startedCount;
|
||||
private boolean isStarted;
|
||||
private MediaEncoder videoEncoder, audioEncoder;
|
||||
private long preventAudioPresentationTimeUs = -1;
|
||||
private int audioTrackIndex = -1;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public MediaMuxerCaptureWrapper(final String filePath) throws IOException {
|
||||
mediaMuxer = new MediaMuxer(filePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
||||
encoderCount = startedCount = 0;
|
||||
isStarted = false;
|
||||
|
||||
}
|
||||
|
||||
public void prepare() throws IOException {
|
||||
if (videoEncoder != null) {
|
||||
videoEncoder.prepare();
|
||||
}
|
||||
if (audioEncoder != null) {
|
||||
audioEncoder.prepare();
|
||||
}
|
||||
}
|
||||
|
||||
public void startRecording() {
|
||||
if (videoEncoder != null) {
|
||||
videoEncoder.startRecording();
|
||||
}
|
||||
if (audioEncoder != null) {
|
||||
audioEncoder.startRecording();
|
||||
}
|
||||
}
|
||||
|
||||
public void stopRecording() {
|
||||
if (videoEncoder != null) {
|
||||
videoEncoder.stopRecording();
|
||||
}
|
||||
videoEncoder = null;
|
||||
if (audioEncoder != null) {
|
||||
audioEncoder.stopRecording();
|
||||
}
|
||||
audioEncoder = null;
|
||||
}
|
||||
|
||||
public synchronized boolean isStarted() {
|
||||
return isStarted;
|
||||
}
|
||||
|
||||
//**********************************************************************
|
||||
//**********************************************************************
|
||||
|
||||
/**
|
||||
* assign encoder to this calss. this is called from encoder.
|
||||
*
|
||||
* @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
|
||||
*/
|
||||
void addEncoder(final MediaEncoder encoder) {
|
||||
if (encoder instanceof MediaVideoEncoder) {
|
||||
if (videoEncoder != null)
|
||||
throw new IllegalArgumentException("Video encoder already added.");
|
||||
videoEncoder = encoder;
|
||||
} else if (encoder instanceof MediaAudioEncoder) {
|
||||
if (audioEncoder != null)
|
||||
throw new IllegalArgumentException("Video encoder already added.");
|
||||
audioEncoder = encoder;
|
||||
} else
|
||||
throw new IllegalArgumentException("unsupported encoder");
|
||||
encoderCount = (videoEncoder != null ? 1 : 0) + (audioEncoder != null ? 1 : 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* request start recording from encoder
|
||||
*
|
||||
* @return true when muxer is ready to write
|
||||
*/
|
||||
synchronized boolean start() {
|
||||
Log.v(TAG, "start:");
|
||||
startedCount++;
|
||||
if ((encoderCount > 0) && (startedCount == encoderCount)) {
|
||||
mediaMuxer.start();
|
||||
isStarted = true;
|
||||
notifyAll();
|
||||
Log.v(TAG, "MediaMuxer started:");
|
||||
}
|
||||
return isStarted;
|
||||
}
|
||||
|
||||
/**
|
||||
* request stop recording from encoder when encoder received EOS
|
||||
*/
|
||||
/*package*/
|
||||
synchronized void stop() {
|
||||
Log.v(TAG, "stop:startedCount=" + startedCount);
|
||||
startedCount--;
|
||||
if ((encoderCount > 0) && (startedCount <= 0)) {
|
||||
mediaMuxer.stop();
|
||||
mediaMuxer.release();
|
||||
isStarted = false;
|
||||
Log.v(TAG, "MediaMuxer stopped:");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* assign encoder to muxer
|
||||
*
|
||||
* @param format
|
||||
* @return minus value indicate error
|
||||
*/
|
||||
synchronized int addTrack(final MediaFormat format) {
|
||||
if (isStarted) {
|
||||
throw new IllegalStateException("muxer already started");
|
||||
}
|
||||
|
||||
final int trackIx = mediaMuxer.addTrack(format);
|
||||
Log.i(TAG, "addTrack:trackNum=" + encoderCount + ",trackIx=" + trackIx + ",format=" + format);
|
||||
|
||||
String mime = format.getString(MediaFormat.KEY_MIME);
|
||||
if (!mime.startsWith("video/")) {
|
||||
audioTrackIndex = trackIx;
|
||||
}
|
||||
return trackIx;
|
||||
}
|
||||
|
||||
/**
|
||||
* write encoded data to muxer
|
||||
*
|
||||
* @param trackIndex
|
||||
* @param byteBuf
|
||||
* @param bufferInfo
|
||||
*/
|
||||
/*package*/
|
||||
synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
|
||||
//bufferInfo.presentationTimeUs
|
||||
if (startedCount <= 0) return;
|
||||
|
||||
if (audioTrackIndex == trackIndex) {
|
||||
if (preventAudioPresentationTimeUs < bufferInfo.presentationTimeUs) {
|
||||
mediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
|
||||
preventAudioPresentationTimeUs = bufferInfo.presentationTimeUs;
|
||||
}
|
||||
} else {
|
||||
mediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,212 @@
|
||||
package com.xypower.gpuv.camerarecorder.capture;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaFormat;
|
||||
import android.opengl.EGLContext;
|
||||
import android.util.Log;
|
||||
import android.view.Surface;
|
||||
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
|
||||
public class MediaVideoEncoder extends MediaEncoder {
|
||||
private static final String TAG = "MediaVideoEncoder";
|
||||
|
||||
private static final String MIME_TYPE = "video/avc";
|
||||
// parameters for recording
|
||||
private static final int FRAME_RATE = 30;
|
||||
private static final float BPP = 0.25f;
|
||||
|
||||
private final int fileWidth;
|
||||
private final int fileHeight;
|
||||
private EncodeRenderHandler encodeRenderHandler;
|
||||
private Surface surface;
|
||||
|
||||
public MediaVideoEncoder(final MediaMuxerCaptureWrapper muxer,
|
||||
final MediaEncoderListener listener,
|
||||
final int fileWidth,
|
||||
final int fileHeight,
|
||||
final boolean flipHorizontal,
|
||||
final boolean flipVertical,
|
||||
final float viewWidth,
|
||||
final float viewHeight,
|
||||
final boolean recordNoFilter,
|
||||
final GlFilter filter
|
||||
) {
|
||||
super(muxer, listener);
|
||||
this.fileWidth = fileWidth;
|
||||
this.fileHeight = fileHeight;
|
||||
encodeRenderHandler = EncodeRenderHandler.createHandler(
|
||||
TAG,
|
||||
flipVertical,
|
||||
flipHorizontal,
|
||||
(viewWidth > viewHeight) ? (viewWidth / viewHeight) : (viewHeight / viewWidth),
|
||||
fileWidth,
|
||||
fileHeight,
|
||||
recordNoFilter,
|
||||
filter
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
public void frameAvailableSoon(final int texName, final float[] stMatrix, final float[] mvpMatrix, final float aspectRatio) {
|
||||
if (super.frameAvailableSoon()) {
|
||||
encodeRenderHandler.draw(texName, stMatrix, mvpMatrix, aspectRatio);
|
||||
}
|
||||
//result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean frameAvailableSoon() {
|
||||
boolean result;
|
||||
if (result = super.frameAvailableSoon()) {
|
||||
encodeRenderHandler.prepareDraw();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void prepare() throws IOException {
|
||||
Log.i(TAG, "prepare: ");
|
||||
trackIndex = -1;
|
||||
muxerStarted = isEOS = false;
|
||||
|
||||
final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
|
||||
|
||||
if (videoCodecInfo == null) {
|
||||
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
|
||||
return;
|
||||
}
|
||||
Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
|
||||
|
||||
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, fileWidth, fileHeight);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate(fileWidth, fileHeight));
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
|
||||
Log.i(TAG, "format: " + format);
|
||||
|
||||
mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
|
||||
|
||||
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
// get Surface for encoder input
|
||||
// this method only can call between #configure and #start
|
||||
surface = mediaCodec.createInputSurface();
|
||||
mediaCodec.start();
|
||||
Log.i(TAG, "prepare finishing");
|
||||
if (listener != null) {
|
||||
try {
|
||||
listener.onPrepared(this);
|
||||
} catch (final Exception e) {
|
||||
Log.e(TAG, "prepare:", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void setEglContext(final EGLContext shared_context, final int tex_id) {
|
||||
encodeRenderHandler.setEglContext(shared_context, tex_id, surface);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void release() {
|
||||
Log.i(TAG, "release:");
|
||||
if (surface != null) {
|
||||
surface.release();
|
||||
surface = null;
|
||||
}
|
||||
if (encodeRenderHandler != null) {
|
||||
encodeRenderHandler.release();
|
||||
encodeRenderHandler = null;
|
||||
}
|
||||
super.release();
|
||||
}
|
||||
|
||||
private static int calcBitRate(int width, int height) {
|
||||
final int bitrate = (int) (BPP * FRAME_RATE * width * height);
|
||||
Log.i(TAG, "bitrate=" + bitrate);
|
||||
return bitrate;
|
||||
}
|
||||
|
||||
/**
|
||||
* select the first codec that match a specific MIME type
|
||||
*
|
||||
* @param mimeType
|
||||
* @return null if no codec matched
|
||||
*/
|
||||
private static MediaCodecInfo selectVideoCodec(final String mimeType) {
|
||||
Log.v(TAG, "selectVideoCodec:");
|
||||
|
||||
// get the list of available codecs
|
||||
MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
|
||||
MediaCodecInfo[] codecInfos = list.getCodecInfos();
|
||||
|
||||
final int numCodecs = codecInfos.length;
|
||||
for (int i = 0; i < numCodecs; i++) {
|
||||
final MediaCodecInfo codecInfo = codecInfos[i];
|
||||
|
||||
if (!codecInfo.isEncoder()) { // skipp decoder
|
||||
continue;
|
||||
}
|
||||
// select first codec that match a specific MIME type and color format
|
||||
final String[] types = codecInfo.getSupportedTypes();
|
||||
for (int j = 0; j < types.length; j++) {
|
||||
if (types[j].equalsIgnoreCase(mimeType)) {
|
||||
Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
|
||||
final int format = selectColorFormat(codecInfo, mimeType);
|
||||
if (format > 0) {
|
||||
return codecInfo;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* select color format available on specific codec and we can use.
|
||||
*
|
||||
* @return 0 if no colorFormat is matched
|
||||
*/
|
||||
private static int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
|
||||
Log.i(TAG, "selectColorFormat: ");
|
||||
int result = 0;
|
||||
final MediaCodecInfo.CodecCapabilities caps;
|
||||
try {
|
||||
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
|
||||
caps = codecInfo.getCapabilitiesForType(mimeType);
|
||||
} finally {
|
||||
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
|
||||
}
|
||||
int colorFormat;
|
||||
for (int i = 0; i < caps.colorFormats.length; i++) {
|
||||
colorFormat = caps.colorFormats[i];
|
||||
if (isRecognizedViewoFormat(colorFormat)) {
|
||||
if (result == 0)
|
||||
result = colorFormat;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (result == 0)
|
||||
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static boolean isRecognizedViewoFormat(final int colorFormat) {
|
||||
Log.i(TAG, "isRecognizedViewoFormat:colorFormat=" + colorFormat);
|
||||
return (colorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void signalEndOfInputStream() {
|
||||
Log.d(TAG, "sending EOS to encoder");
|
||||
mediaCodec.signalEndOfInputStream(); // API >= 18
|
||||
isEOS = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,216 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.ShortBuffer;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Queue;
|
||||
|
||||
// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/AudioChannel.java
|
||||
|
||||
|
||||
class AudioChannel {
|
||||
|
||||
private static class AudioBuffer {
|
||||
int bufferIndex;
|
||||
long presentationTimeUs;
|
||||
ShortBuffer data;
|
||||
}
|
||||
|
||||
static final int BUFFER_INDEX_END_OF_STREAM = -1;
|
||||
|
||||
private static final int BYTES_PER_SHORT = 2;
|
||||
private static final long MICROSECS_PER_SEC = 1000000;
|
||||
|
||||
private final Queue<AudioBuffer> emptyBuffers = new ArrayDeque<>();
|
||||
private final Queue<AudioBuffer> filledBuffers = new ArrayDeque<>();
|
||||
|
||||
private final MediaCodec decoder;
|
||||
private final MediaCodec encoder;
|
||||
private final MediaFormat encodeFormat;
|
||||
|
||||
private int inputSampleRate;
|
||||
private int inputChannelCount;
|
||||
private int outputChannelCount;
|
||||
|
||||
private final MediaCodecBufferCompatWrapper decoderBuffers;
|
||||
private final MediaCodecBufferCompatWrapper encoderBuffers;
|
||||
|
||||
private final AudioBuffer overflowBuffer = new AudioBuffer();
|
||||
|
||||
private MediaFormat actualDecodedFormat;
|
||||
|
||||
|
||||
AudioChannel(final MediaCodec decoder,
|
||||
final MediaCodec encoder, final MediaFormat encodeFormat) {
|
||||
this.decoder = decoder;
|
||||
this.encoder = encoder;
|
||||
this.encodeFormat = encodeFormat;
|
||||
|
||||
decoderBuffers = new MediaCodecBufferCompatWrapper(this.decoder);
|
||||
encoderBuffers = new MediaCodecBufferCompatWrapper(this.encoder);
|
||||
}
|
||||
|
||||
void setActualDecodedFormat(final MediaFormat decodedFormat) {
|
||||
actualDecodedFormat = decodedFormat;
|
||||
|
||||
inputSampleRate = actualDecodedFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
|
||||
if (inputSampleRate != encodeFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)) {
|
||||
throw new UnsupportedOperationException("Audio sample rate conversion not supported yet.");
|
||||
}
|
||||
|
||||
inputChannelCount = actualDecodedFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
|
||||
outputChannelCount = encodeFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
|
||||
|
||||
if (inputChannelCount != 1 && inputChannelCount != 2) {
|
||||
throw new UnsupportedOperationException("Input channel count (" + inputChannelCount + ") not supported.");
|
||||
}
|
||||
|
||||
if (outputChannelCount != 1 && outputChannelCount != 2) {
|
||||
throw new UnsupportedOperationException("Output channel count (" + outputChannelCount + ") not supported.");
|
||||
}
|
||||
|
||||
overflowBuffer.presentationTimeUs = 0;
|
||||
}
|
||||
|
||||
void drainDecoderBufferAndQueue(final int bufferIndex, final long presentationTimeUs) {
|
||||
if (actualDecodedFormat == null) {
|
||||
throw new RuntimeException("Buffer received before format!");
|
||||
}
|
||||
|
||||
final ByteBuffer data =
|
||||
bufferIndex == BUFFER_INDEX_END_OF_STREAM ?
|
||||
null : decoderBuffers.getOutputBuffer(bufferIndex);
|
||||
|
||||
AudioBuffer buffer = emptyBuffers.poll();
|
||||
if (buffer == null) {
|
||||
buffer = new AudioBuffer();
|
||||
}
|
||||
|
||||
buffer.bufferIndex = bufferIndex;
|
||||
buffer.presentationTimeUs = presentationTimeUs;
|
||||
buffer.data = data == null ? null : data.asShortBuffer();
|
||||
|
||||
if (overflowBuffer.data == null) {
|
||||
overflowBuffer.data = ByteBuffer
|
||||
.allocateDirect(data.capacity())
|
||||
.order(ByteOrder.nativeOrder())
|
||||
.asShortBuffer();
|
||||
overflowBuffer.data.clear().flip();
|
||||
}
|
||||
|
||||
filledBuffers.add(buffer);
|
||||
}
|
||||
|
||||
boolean feedEncoder(long timeoutUs) {
|
||||
final boolean hasOverflow = overflowBuffer.data != null && overflowBuffer.data.hasRemaining();
|
||||
if (filledBuffers.isEmpty() && !hasOverflow) {
|
||||
// No audio data - Bail out
|
||||
return false;
|
||||
}
|
||||
|
||||
final int encoderInBuffIndex = encoder.dequeueInputBuffer(timeoutUs);
|
||||
if (encoderInBuffIndex < 0) {
|
||||
// Encoder is full - Bail out
|
||||
return false;
|
||||
}
|
||||
|
||||
// Drain overflow first
|
||||
final ShortBuffer outBuffer = encoderBuffers.getInputBuffer(encoderInBuffIndex).asShortBuffer();
|
||||
if (hasOverflow) {
|
||||
final long presentationTimeUs = drainOverflow(outBuffer);
|
||||
encoder.queueInputBuffer(encoderInBuffIndex,
|
||||
0, outBuffer.position() * BYTES_PER_SHORT,
|
||||
presentationTimeUs, 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
final AudioBuffer inBuffer = filledBuffers.poll();
|
||||
if (inBuffer.bufferIndex == BUFFER_INDEX_END_OF_STREAM) {
|
||||
encoder.queueInputBuffer(encoderInBuffIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
return false;
|
||||
}
|
||||
|
||||
final long presentationTimeUs = remixAndMaybeFillOverflow(inBuffer, outBuffer);
|
||||
encoder.queueInputBuffer(encoderInBuffIndex,
|
||||
0, outBuffer.position() * BYTES_PER_SHORT,
|
||||
presentationTimeUs, 0);
|
||||
if (inBuffer != null) {
|
||||
decoder.releaseOutputBuffer(inBuffer.bufferIndex, false);
|
||||
emptyBuffers.add(inBuffer);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static long sampleCountToDurationUs(final int sampleCount,
|
||||
final int sampleRate,
|
||||
final int channelCount) {
|
||||
return (sampleCount / (sampleRate * MICROSECS_PER_SEC)) / channelCount;
|
||||
}
|
||||
|
||||
private long drainOverflow(final ShortBuffer outBuff) {
|
||||
final ShortBuffer overflowBuff = overflowBuffer.data;
|
||||
final int overflowLimit = overflowBuff.limit();
|
||||
final int overflowSize = overflowBuff.remaining();
|
||||
|
||||
final long beginPresentationTimeUs = overflowBuffer.presentationTimeUs +
|
||||
sampleCountToDurationUs(overflowBuff.position(), inputSampleRate, outputChannelCount);
|
||||
|
||||
outBuff.clear();
|
||||
// Limit overflowBuff to outBuff's capacity
|
||||
overflowBuff.limit(outBuff.capacity());
|
||||
// Load overflowBuff onto outBuff
|
||||
outBuff.put(overflowBuff);
|
||||
|
||||
if (overflowSize >= outBuff.capacity()) {
|
||||
// Overflow fully consumed - Reset
|
||||
overflowBuff.clear().limit(0);
|
||||
} else {
|
||||
// Only partially consumed - Keep position & restore previous limit
|
||||
overflowBuff.limit(overflowLimit);
|
||||
}
|
||||
|
||||
return beginPresentationTimeUs;
|
||||
}
|
||||
|
||||
private long remixAndMaybeFillOverflow(final AudioBuffer input,
|
||||
final ShortBuffer outBuff) {
|
||||
final ShortBuffer inBuff = input.data;
|
||||
final ShortBuffer overflowBuff = overflowBuffer.data;
|
||||
|
||||
outBuff.clear();
|
||||
|
||||
// Reset position to 0, and set limit to capacity (Since MediaCodec doesn't do that for us)
|
||||
inBuff.clear();
|
||||
|
||||
if (inBuff.remaining() > outBuff.remaining()) {
|
||||
// Overflow
|
||||
// Limit inBuff to outBuff's capacity
|
||||
inBuff.limit(outBuff.capacity());
|
||||
outBuff.put(inBuff);
|
||||
|
||||
// Reset limit to its own capacity & Keep position
|
||||
inBuff.limit(inBuff.capacity());
|
||||
|
||||
// Remix the rest onto overflowBuffer
|
||||
// NOTE: We should only reach this point when overflow buffer is empty
|
||||
final long consumedDurationUs =
|
||||
sampleCountToDurationUs(inBuff.position(), inputSampleRate, inputChannelCount);
|
||||
overflowBuff.put(inBuff);
|
||||
|
||||
// Seal off overflowBuff & mark limit
|
||||
overflowBuff.flip();
|
||||
overflowBuffer.presentationTimeUs = input.presentationTimeUs + consumedDurationUs;
|
||||
} else {
|
||||
// No overflow
|
||||
outBuff.put(inBuff);
|
||||
}
|
||||
|
||||
return input.presentationTimeUs;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,83 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
|
||||
// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java
|
||||
class AudioComposer implements IAudioComposer {
|
||||
private final MediaExtractor mediaExtractor;
|
||||
private final int trackIndex;
|
||||
private final MuxRender muxRender;
|
||||
private final MuxRender.SampleType sampleType = MuxRender.SampleType.AUDIO;
|
||||
private final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
private int bufferSize;
|
||||
private ByteBuffer buffer;
|
||||
private boolean isEOS;
|
||||
private MediaFormat actualOutputFormat;
|
||||
private long writtenPresentationTimeUs;
|
||||
|
||||
AudioComposer(MediaExtractor mediaExtractor, int trackIndex,
|
||||
MuxRender muxRender) {
|
||||
this.mediaExtractor = mediaExtractor;
|
||||
this.trackIndex = trackIndex;
|
||||
this.muxRender = muxRender;
|
||||
|
||||
actualOutputFormat = this.mediaExtractor.getTrackFormat(this.trackIndex);
|
||||
this.muxRender.setOutputFormat(this.sampleType, actualOutputFormat);
|
||||
bufferSize = actualOutputFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
|
||||
buffer = ByteBuffer.allocateDirect(bufferSize).order(ByteOrder.nativeOrder());
|
||||
}
|
||||
|
||||
|
||||
@SuppressLint("Assert")
|
||||
public boolean stepPipeline() {
|
||||
if (isEOS) return false;
|
||||
int trackIndex = mediaExtractor.getSampleTrackIndex();
|
||||
if (trackIndex < 0) {
|
||||
buffer.clear();
|
||||
bufferInfo.set(0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
muxRender.writeSampleData(sampleType, buffer, bufferInfo);
|
||||
isEOS = true;
|
||||
return true;
|
||||
}
|
||||
if (trackIndex != this.trackIndex) return false;
|
||||
|
||||
buffer.clear();
|
||||
int sampleSize = mediaExtractor.readSampleData(buffer, 0);
|
||||
assert sampleSize <= bufferSize;
|
||||
boolean isKeyFrame = (mediaExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
|
||||
int flags = isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0;
|
||||
bufferInfo.set(0, sampleSize, mediaExtractor.getSampleTime(), flags);
|
||||
muxRender.writeSampleData(sampleType, buffer, bufferInfo);
|
||||
writtenPresentationTimeUs = bufferInfo.presentationTimeUs;
|
||||
|
||||
mediaExtractor.advance();
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getWrittenPresentationTimeUs() {
|
||||
return writtenPresentationTimeUs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFinished() {
|
||||
return isEOS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
// do nothing
|
||||
}
|
||||
}
|
@ -0,0 +1,357 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.*;
|
||||
import android.util.Log;
|
||||
import android.util.Size;
|
||||
import android.view.Surface;
|
||||
import com.xypower.gpuv.egl.EglUtil;
|
||||
import com.xypower.gpuv.egl.GlFramebufferObject;
|
||||
import com.xypower.gpuv.egl.GlPreviewFilter;
|
||||
import com.xypower.gpuv.egl.GlSurfaceTexture;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import static android.opengl.GLES20.*;
|
||||
|
||||
|
||||
// Refer : https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/OutputSurface.java
|
||||
|
||||
/**
|
||||
* Holds state associated with a Surface used for MediaCodec decoder output.
|
||||
* <p>
|
||||
* The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
|
||||
* and then create a Surface for that SurfaceTexture. The Surface can be passed to
|
||||
* MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
|
||||
* texture with updateTexImage, then render the texture with GL to a pbuffer.
|
||||
* <p>
|
||||
* The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
|
||||
* Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
|
||||
* we just draw it on whatever surface is current.
|
||||
* <p>
|
||||
* By default, the Surface will be using a BufferQueue in asynchronous mode, so we
|
||||
* can potentially drop frames.
|
||||
*/
|
||||
class DecoderSurface implements SurfaceTexture.OnFrameAvailableListener {
|
||||
private static final String TAG = "DecoderSurface";
|
||||
private static final boolean VERBOSE = false;
|
||||
private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
private Surface surface;
|
||||
private Object frameSyncObject = new Object(); // guards frameAvailable
|
||||
private boolean frameAvailable;
|
||||
private GlFilter filter;
|
||||
|
||||
private int texName;
|
||||
|
||||
private GlSurfaceTexture previewTexture;
|
||||
|
||||
private GlFramebufferObject filterFramebufferObject;
|
||||
private GlPreviewFilter previewShader;
|
||||
private GlFilter normalShader;
|
||||
private GlFramebufferObject framebufferObject;
|
||||
|
||||
private float[] MVPMatrix = new float[16];
|
||||
private float[] ProjMatrix = new float[16];
|
||||
private float[] MMatrix = new float[16];
|
||||
private float[] VMatrix = new float[16];
|
||||
private float[] STMatrix = new float[16];
|
||||
|
||||
|
||||
private Rotation rotation = Rotation.NORMAL;
|
||||
private Size outputResolution;
|
||||
private Size inputResolution;
|
||||
private FillMode fillMode = FillMode.PRESERVE_ASPECT_FIT;
|
||||
private FillModeCustomItem fillModeCustomItem;
|
||||
private boolean flipVertical = false;
|
||||
private boolean flipHorizontal = false;
|
||||
|
||||
/**
|
||||
* Creates an DecoderSurface using the current EGL context (rather than establishing a
|
||||
* new one). Creates a Surface that can be passed to MediaCodec.configure().
|
||||
*/
|
||||
DecoderSurface(GlFilter filter) {
|
||||
this.filter = filter;
|
||||
setup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
|
||||
* with the SurfaceTexture.
|
||||
*/
|
||||
private void setup() {
|
||||
|
||||
// Even if we don't access the SurfaceTexture after the constructor returns, we
|
||||
// still need to keep a reference to it. The Surface doesn't retain a reference
|
||||
// at the Java level, so if we don't either then the object can get GCed, which
|
||||
// causes the native finalizer to run.
|
||||
|
||||
// if (VERBOSE) Log.d(TAG, "textureID=" + filter.getTextureId());
|
||||
// surfaceTexture = new SurfaceTexture(filter.getTextureId());
|
||||
|
||||
// This doesn't work if DecoderSurface is created on the thread that CTS started for
|
||||
// these test cases.
|
||||
//
|
||||
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
|
||||
// create a Handler that uses it. The "frame available" message is delivered
|
||||
// there, but since we're not a Looper-based thread we'll never see it. For
|
||||
// this to do anything useful, DecoderSurface must be created on a thread without
|
||||
// a Looper, so that SurfaceTexture uses the main application Looper instead.
|
||||
//
|
||||
// Java language note: passing "this" out of a constructor is generally unwise,
|
||||
// but we should be able to get away with it here.
|
||||
|
||||
filter.setup();
|
||||
framebufferObject = new GlFramebufferObject();
|
||||
normalShader = new GlFilter();
|
||||
normalShader.setup();
|
||||
|
||||
final int[] args = new int[1];
|
||||
|
||||
GLES20.glGenTextures(args.length, args, 0);
|
||||
texName = args[0];
|
||||
|
||||
// SurfaceTextureを生成
|
||||
previewTexture = new GlSurfaceTexture(texName);
|
||||
previewTexture.setOnFrameAvailableListener(this);
|
||||
surface = new Surface(previewTexture.getSurfaceTexture());
|
||||
|
||||
GLES20.glBindTexture(previewTexture.getTextureTarget(), texName);
|
||||
// GL_TEXTURE_EXTERNAL_OES
|
||||
//OpenGlUtils.setupSampler(previewTexture.getTextureTarget(), GL_LINEAR, GL_NEAREST);
|
||||
EglUtil.setupSampler(previewTexture.getTextureTarget(), GL_LINEAR, GL_NEAREST);
|
||||
|
||||
GLES20.glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
// GL_TEXTURE_EXTERNAL_OES
|
||||
previewShader = new GlPreviewFilter(previewTexture.getTextureTarget());
|
||||
previewShader.setup();
|
||||
filterFramebufferObject = new GlFramebufferObject();
|
||||
|
||||
|
||||
Matrix.setLookAtM(VMatrix, 0,
|
||||
0.0f, 0.0f, 5.0f,
|
||||
0.0f, 0.0f, 0.0f,
|
||||
0.0f, 1.0f, 0.0f
|
||||
);
|
||||
|
||||
GLES20.glGetIntegerv(GL_MAX_TEXTURE_SIZE, args, 0);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Discard all resources held by this class, notably the EGL context.
|
||||
*/
|
||||
void release() {
|
||||
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
|
||||
EGL14.eglDestroySurface(eglDisplay, eglSurface);
|
||||
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
||||
EGL14.eglReleaseThread();
|
||||
EGL14.eglTerminate(eglDisplay);
|
||||
}
|
||||
surface.release();
|
||||
previewTexture.release();
|
||||
// this causes a bunch of warnings that appear harmless but might confuse someone:
|
||||
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
|
||||
//surfaceTexture.release();
|
||||
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
filter.release();
|
||||
filter = null;
|
||||
surface = null;
|
||||
previewTexture = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Surface that we draw onto.
|
||||
*/
|
||||
Surface getSurface() {
|
||||
return surface;
|
||||
}
|
||||
|
||||
/**
|
||||
* Latches the next buffer into the texture. Must be called from the thread that created
|
||||
* the DecoderSurface object, after the onFrameAvailable callback has signaled that new
|
||||
* data is available.
|
||||
*/
|
||||
void awaitNewImage() {
|
||||
final int TIMEOUT_MS = 10000;
|
||||
synchronized (frameSyncObject) {
|
||||
while (!frameAvailable) {
|
||||
try {
|
||||
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
|
||||
// stalling the test if it doesn't arrive.
|
||||
frameSyncObject.wait(TIMEOUT_MS);
|
||||
if (!frameAvailable) {
|
||||
// TODO: if "spurious wakeup", continue while loop
|
||||
throw new RuntimeException("Surface frame wait timed out");
|
||||
}
|
||||
} catch (InterruptedException ie) {
|
||||
// shouldn't happen
|
||||
throw new RuntimeException(ie);
|
||||
}
|
||||
}
|
||||
frameAvailable = false;
|
||||
}
|
||||
// Latch the data.
|
||||
// GlUtils.checkGlError("before updateTexImage");
|
||||
previewTexture.updateTexImage();
|
||||
previewTexture.getTransformMatrix(STMatrix);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Draws the data from SurfaceTexture onto the current EGL surface.
|
||||
*/
|
||||
void drawImage() {
|
||||
|
||||
framebufferObject.enable();
|
||||
GLES20.glViewport(0, 0, framebufferObject.getWidth(), framebufferObject.getHeight());
|
||||
|
||||
|
||||
if (filter != null) {
|
||||
filterFramebufferObject.enable();
|
||||
GLES20.glViewport(0, 0, filterFramebufferObject.getWidth(), filterFramebufferObject.getHeight());
|
||||
}
|
||||
|
||||
GLES20.glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
Matrix.multiplyMM(MVPMatrix, 0, VMatrix, 0, MMatrix, 0);
|
||||
Matrix.multiplyMM(MVPMatrix, 0, ProjMatrix, 0, MVPMatrix, 0);
|
||||
|
||||
float scaleDirectionX = flipHorizontal ? -1 : 1;
|
||||
float scaleDirectionY = flipVertical ? -1 : 1;
|
||||
|
||||
float scale[];
|
||||
switch (fillMode) {
|
||||
case PRESERVE_ASPECT_FIT:
|
||||
scale = FillMode.getScaleAspectFit(rotation.getRotation(), inputResolution.getWidth(), inputResolution.getHeight(), outputResolution.getWidth(), outputResolution.getHeight());
|
||||
|
||||
// Log.d(TAG, "scale[0] = " + scale[0] + " scale[1] = " + scale[1]);
|
||||
|
||||
Matrix.scaleM(MVPMatrix, 0, scale[0] * scaleDirectionX, scale[1] * scaleDirectionY, 1);
|
||||
if (rotation != Rotation.NORMAL) {
|
||||
Matrix.rotateM(MVPMatrix, 0, -rotation.getRotation(), 0.f, 0.f, 1.f);
|
||||
}
|
||||
break;
|
||||
case PRESERVE_ASPECT_CROP:
|
||||
scale = FillMode.getScaleAspectCrop(rotation.getRotation(), inputResolution.getWidth(), inputResolution.getHeight(), outputResolution.getWidth(), outputResolution.getHeight());
|
||||
Matrix.scaleM(MVPMatrix, 0, scale[0] * scaleDirectionX, scale[1] * scaleDirectionY, 1);
|
||||
if (rotation != Rotation.NORMAL) {
|
||||
Matrix.rotateM(MVPMatrix, 0, -rotation.getRotation(), 0.f, 0.f, 1.f);
|
||||
}
|
||||
break;
|
||||
case CUSTOM:
|
||||
if (fillModeCustomItem != null) {
|
||||
Matrix.translateM(MVPMatrix, 0, fillModeCustomItem.getTranslateX(), -fillModeCustomItem.getTranslateY(), 0f);
|
||||
scale = FillMode.getScaleAspectCrop(rotation.getRotation(), inputResolution.getWidth(), inputResolution.getHeight(), outputResolution.getWidth(), outputResolution.getHeight());
|
||||
|
||||
if (fillModeCustomItem.getRotate() == 0 || fillModeCustomItem.getRotate() == 180) {
|
||||
Matrix.scaleM(MVPMatrix,
|
||||
0,
|
||||
fillModeCustomItem.getScale() * scale[0] * scaleDirectionX,
|
||||
fillModeCustomItem.getScale() * scale[1] * scaleDirectionY,
|
||||
1);
|
||||
} else {
|
||||
Matrix.scaleM(MVPMatrix,
|
||||
0,
|
||||
fillModeCustomItem.getScale() * scale[0] * (1 / fillModeCustomItem.getVideoWidth() * fillModeCustomItem.getVideoHeight()) * scaleDirectionX,
|
||||
fillModeCustomItem.getScale() * scale[1] * (fillModeCustomItem.getVideoWidth() / fillModeCustomItem.getVideoHeight()) * scaleDirectionY,
|
||||
1);
|
||||
}
|
||||
|
||||
Matrix.rotateM(MVPMatrix, 0, -(rotation.getRotation() + fillModeCustomItem.getRotate()), 0.f, 0.f, 1.f);
|
||||
|
||||
// Log.d(TAG, "inputResolution = " + inputResolution.getWidth() + " height = " + inputResolution.getHeight());
|
||||
// Log.d(TAG, "out = " + outputResolution.getWidth() + " height = " + outputResolution.getHeight());
|
||||
// Log.d(TAG, "rotation = " + rotation.getRotation());
|
||||
// Log.d(TAG, "scale[0] = " + scale[0] + " scale[1] = " + scale[1]);
|
||||
|
||||
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
previewShader.draw(texName, MVPMatrix, STMatrix, 1f);
|
||||
|
||||
if (filter != null) {
|
||||
// 一度shaderに描画したものを、fboを利用して、drawする。drawには必要なさげだけど。
|
||||
framebufferObject.enable();
|
||||
GLES20.glClear(GL_COLOR_BUFFER_BIT);
|
||||
filter.draw(filterFramebufferObject.getTexName(), framebufferObject);
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
GLES20.glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
GLES20.glViewport(0, 0, framebufferObject.getWidth(), framebufferObject.getHeight());
|
||||
|
||||
GLES20.glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
normalShader.draw(framebufferObject.getTexName(), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameAvailable(SurfaceTexture st) {
|
||||
if (VERBOSE) Log.d(TAG, "new frame available");
|
||||
synchronized (frameSyncObject) {
|
||||
if (frameAvailable) {
|
||||
throw new RuntimeException("frameAvailable already set, frame could be dropped");
|
||||
}
|
||||
frameAvailable = true;
|
||||
frameSyncObject.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
void setRotation(Rotation rotation) {
|
||||
this.rotation = rotation;
|
||||
}
|
||||
|
||||
|
||||
void setOutputResolution(Size resolution) {
|
||||
this.outputResolution = resolution;
|
||||
}
|
||||
|
||||
void setFillMode(FillMode fillMode) {
|
||||
this.fillMode = fillMode;
|
||||
}
|
||||
|
||||
void setInputResolution(Size resolution) {
|
||||
this.inputResolution = resolution;
|
||||
}
|
||||
|
||||
void setFillModeCustomItem(FillModeCustomItem fillModeCustomItem) {
|
||||
this.fillModeCustomItem = fillModeCustomItem;
|
||||
}
|
||||
|
||||
void setFlipVertical(boolean flipVertical) {
|
||||
this.flipVertical = flipVertical;
|
||||
}
|
||||
|
||||
void setFlipHorizontal(boolean flipHorizontal) {
|
||||
this.flipHorizontal = flipHorizontal;
|
||||
}
|
||||
|
||||
void completeParams() {
|
||||
int width = outputResolution.getWidth();
|
||||
int height = outputResolution.getHeight();
|
||||
framebufferObject.setup(width, height);
|
||||
normalShader.setFrameSize(width, height);
|
||||
|
||||
filterFramebufferObject.setup(width, height);
|
||||
previewShader.setFrameSize(width, height);
|
||||
// MCLog.d("onSurfaceChanged width = " + width + " height = " + height + " aspectRatio = " + scaleRatio);
|
||||
Matrix.frustumM(ProjMatrix, 0, -1f, 1f, -1, 1, 5, 7);
|
||||
Matrix.setIdentityM(MMatrix, 0);
|
||||
|
||||
if (filter != null) {
|
||||
filter.setFrameSize(width, height);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,141 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.view.Surface;
|
||||
|
||||
// Refer : https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/InputSurface.java
|
||||
|
||||
/**
|
||||
* Holds state associated with a Surface used for MediaCodec encoder input.
|
||||
* <p>
|
||||
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
|
||||
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
|
||||
* to the video encoder.
|
||||
*/
|
||||
class EncoderSurface {
|
||||
|
||||
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
private Surface surface;
|
||||
|
||||
/**
|
||||
* Creates an EncoderSurface from a Surface.
|
||||
*/
|
||||
EncoderSurface(Surface surface) {
|
||||
if (surface == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
this.surface = surface;
|
||||
eglSetup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
|
||||
*/
|
||||
private void eglSetup() {
|
||||
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("unable to get EGL14 display");
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
|
||||
eglDisplay = null;
|
||||
throw new RuntimeException("unable to initialize EGL14");
|
||||
}
|
||||
// Configure EGL for recordable and OpenGL ES 2.0. We want enough RGB bits
|
||||
// to minimize artifacts from possible YUV conversion.
|
||||
int[] attribList = {
|
||||
EGL14.EGL_RED_SIZE, 8,
|
||||
EGL14.EGL_GREEN_SIZE, 8,
|
||||
EGL14.EGL_BLUE_SIZE, 8,
|
||||
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
|
||||
EGL_RECORDABLE_ANDROID, 1,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.length,
|
||||
numConfigs, 0)) {
|
||||
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
|
||||
}
|
||||
// Configure context for OpenGL ES 2.0.
|
||||
int[] attrib_list = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
|
||||
attrib_list, 0);
|
||||
checkEglError("eglCreateContext");
|
||||
if (eglContext == null) {
|
||||
throw new RuntimeException("null context");
|
||||
}
|
||||
// Create a window surface, and attach it to the Surface we received.
|
||||
int[] surfaceAttribs = {
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, configs[0], surface,
|
||||
surfaceAttribs, 0);
|
||||
checkEglError("eglCreateWindowSurface");
|
||||
if (eglSurface == null) {
|
||||
throw new RuntimeException("surface was null");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard all resources held by this class, notably the EGL context. Also releases the
|
||||
* Surface that was passed to our constructor.
|
||||
*/
|
||||
public void release() {
|
||||
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
|
||||
EGL14.eglDestroySurface(eglDisplay, eglSurface);
|
||||
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
||||
EGL14.eglReleaseThread();
|
||||
EGL14.eglTerminate(eglDisplay);
|
||||
}
|
||||
surface.release();
|
||||
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
surface = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes our EGL context and surface current.
|
||||
*/
|
||||
void makeCurrent() {
|
||||
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls eglSwapBuffers. Use this to "publish" the current frame.
|
||||
*/
|
||||
void swapBuffers() {
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
|
||||
*/
|
||||
void setPresentationTime(long nsecs) {
|
||||
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, nsecs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for EGL errors.
|
||||
*/
|
||||
private void checkEglError(String msg) {
|
||||
int error;
|
||||
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
|
||||
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
public enum FillMode {
|
||||
PRESERVE_ASPECT_FIT,
|
||||
PRESERVE_ASPECT_CROP,
|
||||
CUSTOM;
|
||||
|
||||
public static float[] getScaleAspectFit(int angle, int widthIn, int heightIn, int widthOut, int heightOut) {
|
||||
final float[] scale = {1, 1};
|
||||
scale[0] = scale[1] = 1;
|
||||
if (angle == 90 || angle == 270) {
|
||||
int cx = widthIn;
|
||||
widthIn = heightIn;
|
||||
heightIn = cx;
|
||||
}
|
||||
|
||||
float aspectRatioIn = (float) widthIn / (float) heightIn;
|
||||
float heightOutCalculated = (float) widthOut / aspectRatioIn;
|
||||
|
||||
if (heightOutCalculated < heightOut) {
|
||||
scale[1] = heightOutCalculated / heightOut;
|
||||
} else {
|
||||
scale[0] = heightOut * aspectRatioIn / widthOut;
|
||||
}
|
||||
|
||||
return scale;
|
||||
}
|
||||
|
||||
public static float[] getScaleAspectCrop(int angle, int widthIn, int heightIn, int widthOut, int heightOut) {
|
||||
final float[] scale = {1, 1};
|
||||
scale[0] = scale[1] = 1;
|
||||
if (angle == 90 || angle == 270) {
|
||||
int cx = widthIn;
|
||||
widthIn = heightIn;
|
||||
heightIn = cx;
|
||||
}
|
||||
|
||||
float aspectRatioIn = (float) widthIn / (float) heightIn;
|
||||
float aspectRatioOut = (float) widthOut / (float) heightOut;
|
||||
|
||||
if (aspectRatioIn > aspectRatioOut) {
|
||||
float widthOutCalculated = (float) heightOut * aspectRatioIn;
|
||||
scale[0] = widthOutCalculated / widthOut;
|
||||
} else {
|
||||
float heightOutCalculated = (float) widthOut / aspectRatioIn;
|
||||
scale[1] = heightOutCalculated / heightOut;
|
||||
}
|
||||
|
||||
return scale;
|
||||
}
|
||||
}
|
@ -0,0 +1,83 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.os.Parcel;
|
||||
import android.os.Parcelable;
|
||||
|
||||
public class FillModeCustomItem implements Parcelable {
|
||||
private final float scale;
|
||||
private final float rotate;
|
||||
private final float translateX;
|
||||
private final float translateY;
|
||||
private final float videoWidth;
|
||||
private final float videoHeight;
|
||||
|
||||
public FillModeCustomItem(float scale, float rotate, float translateX, float translateY, float videoWidth, float videoHeight) {
|
||||
this.scale = scale;
|
||||
this.rotate = rotate;
|
||||
this.translateX = translateX;
|
||||
this.translateY = translateY;
|
||||
this.videoWidth = videoWidth;
|
||||
this.videoHeight = videoHeight;
|
||||
}
|
||||
|
||||
public float getScale() {
|
||||
return scale;
|
||||
}
|
||||
|
||||
public float getRotate() {
|
||||
return rotate;
|
||||
}
|
||||
|
||||
public float getTranslateX() {
|
||||
return translateX;
|
||||
}
|
||||
|
||||
public float getTranslateY() {
|
||||
return translateY;
|
||||
}
|
||||
|
||||
public float getVideoWidth() {
|
||||
return videoWidth;
|
||||
}
|
||||
|
||||
public float getVideoHeight() {
|
||||
return videoHeight;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int describeContents() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeToParcel(Parcel dest, int flags) {
|
||||
dest.writeFloat(this.scale);
|
||||
dest.writeFloat(this.rotate);
|
||||
dest.writeFloat(this.translateX);
|
||||
dest.writeFloat(this.translateY);
|
||||
dest.writeFloat(this.videoWidth);
|
||||
dest.writeFloat(this.videoHeight);
|
||||
}
|
||||
|
||||
protected FillModeCustomItem(Parcel in) {
|
||||
this.scale = in.readFloat();
|
||||
this.rotate = in.readFloat();
|
||||
this.translateX = in.readFloat();
|
||||
this.translateY = in.readFloat();
|
||||
this.videoWidth = in.readFloat();
|
||||
this.videoHeight = in.readFloat();
|
||||
}
|
||||
|
||||
public static final Parcelable.Creator<FillModeCustomItem> CREATOR = new Parcelable.Creator<FillModeCustomItem>() {
|
||||
@Override
|
||||
public FillModeCustomItem createFromParcel(Parcel source) {
|
||||
return new FillModeCustomItem(source);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FillModeCustomItem[] newArray(int size) {
|
||||
return new FillModeCustomItem[size];
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -0,0 +1,322 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.MediaMetadataRetriever;
|
||||
import android.net.Uri;
|
||||
import android.util.Log;
|
||||
import android.util.Size;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
|
||||
public class GPUMp4Composer {
|
||||
|
||||
private final static String TAG = GPUMp4Composer.class.getSimpleName();
|
||||
|
||||
private Context context;
|
||||
private final String srcPath;
|
||||
private final String destPath;
|
||||
private GlFilter filter;
|
||||
private Size outputResolution;
|
||||
private int bitrate = -1;
|
||||
private boolean mute = false;
|
||||
private Rotation rotation = Rotation.NORMAL;
|
||||
private Listener listener;
|
||||
private FillMode fillMode = FillMode.PRESERVE_ASPECT_FIT;
|
||||
private FillModeCustomItem fillModeCustomItem;
|
||||
private int timeScale = 1;
|
||||
private boolean flipVertical = false;
|
||||
private boolean flipHorizontal = false;
|
||||
|
||||
private ExecutorService executorService;
|
||||
|
||||
|
||||
public GPUMp4Composer(final String srcPath, final String destPath) {
|
||||
this.srcPath = srcPath;
|
||||
this.destPath = destPath;
|
||||
}
|
||||
|
||||
public GPUMp4Composer(final Context context, final String srcPath, final String destPath) {
|
||||
this.context = context;
|
||||
this.srcPath = srcPath;
|
||||
this.destPath = destPath;
|
||||
}
|
||||
|
||||
public GPUMp4Composer filter(GlFilter filter) {
|
||||
this.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer size(int width, int height) {
|
||||
this.outputResolution = new Size(width, height);
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer videoBitrate(int bitrate) {
|
||||
this.bitrate = bitrate;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer mute(boolean mute) {
|
||||
this.mute = mute;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer flipVertical(boolean flipVertical) {
|
||||
this.flipVertical = flipVertical;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer flipHorizontal(boolean flipHorizontal) {
|
||||
this.flipHorizontal = flipHorizontal;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer rotation(Rotation rotation) {
|
||||
this.rotation = rotation;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer fillMode(FillMode fillMode) {
|
||||
this.fillMode = fillMode;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer customFillMode(FillModeCustomItem fillModeCustomItem) {
|
||||
this.fillModeCustomItem = fillModeCustomItem;
|
||||
this.fillMode = FillMode.CUSTOM;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GPUMp4Composer listener(Listener listener) {
|
||||
this.listener = listener;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GPUMp4Composer timeScale(final int timeScale) {
|
||||
this.timeScale = timeScale;
|
||||
return this;
|
||||
}
|
||||
|
||||
private ExecutorService getExecutorService() {
|
||||
if (executorService == null) {
|
||||
executorService = Executors.newSingleThreadExecutor();
|
||||
}
|
||||
return executorService;
|
||||
}
|
||||
|
||||
|
||||
public GPUMp4Composer start() {
|
||||
getExecutorService().execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
GPUMp4ComposerEngine engine = new GPUMp4ComposerEngine();
|
||||
|
||||
engine.setProgressCallback(new GPUMp4ComposerEngine.ProgressCallback() {
|
||||
@Override
|
||||
public void onProgress(final double progress) {
|
||||
if (listener != null) {
|
||||
listener.onProgress(progress);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
final File srcFile = new File(srcPath);
|
||||
final FileInputStream fileInputStream;
|
||||
try {
|
||||
if (srcPath.contains("content:/")) {
|
||||
fileInputStream = (FileInputStream) context.getContentResolver().openInputStream(Uri.parse(srcPath));
|
||||
} else {
|
||||
fileInputStream = new FileInputStream(srcFile);
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
e.printStackTrace();
|
||||
if (listener != null) {
|
||||
listener.onFailed(e);
|
||||
}
|
||||
return;
|
||||
} catch (NullPointerException e) {
|
||||
Log.e(TAG, "Must have a context when use ScopedStorage");
|
||||
e.printStackTrace();
|
||||
if (listener != null) {
|
||||
listener.onFailed(e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
engine.setDataSource(fileInputStream.getFD());
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
if (listener != null) {
|
||||
listener.onFailed(e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
final int videoRotate = getVideoRotation(srcPath);
|
||||
final Size srcVideoResolution = getVideoResolution(srcPath, videoRotate);
|
||||
|
||||
if (filter == null) {
|
||||
filter = new GlFilter();
|
||||
}
|
||||
|
||||
if (fillMode == null) {
|
||||
fillMode = FillMode.PRESERVE_ASPECT_FIT;
|
||||
}
|
||||
|
||||
if (fillModeCustomItem != null) {
|
||||
fillMode = FillMode.CUSTOM;
|
||||
}
|
||||
|
||||
if (outputResolution == null) {
|
||||
if (fillMode == FillMode.CUSTOM) {
|
||||
outputResolution = srcVideoResolution;
|
||||
} else {
|
||||
Rotation rotate = Rotation.fromInt(rotation.getRotation() + videoRotate);
|
||||
if (rotate == Rotation.ROTATION_90 || rotate == Rotation.ROTATION_270) {
|
||||
outputResolution = new Size(srcVideoResolution.getHeight(), srcVideoResolution.getWidth());
|
||||
} else {
|
||||
outputResolution = srcVideoResolution;
|
||||
}
|
||||
}
|
||||
}
|
||||
// if (filter instanceof IResolutionFilter) {
|
||||
// ((IResolutionFilter) filter).setResolution(outputResolution);
|
||||
// }
|
||||
|
||||
if (timeScale < 2) {
|
||||
timeScale = 1;
|
||||
}
|
||||
|
||||
Log.d(TAG, "rotation = " + (rotation.getRotation() + videoRotate));
|
||||
Log.d(TAG, "inputResolution width = " + srcVideoResolution.getWidth() + " height = " + srcVideoResolution.getHeight());
|
||||
Log.d(TAG, "outputResolution width = " + outputResolution.getWidth() + " height = " + outputResolution.getHeight());
|
||||
Log.d(TAG, "fillMode = " + fillMode);
|
||||
|
||||
try {
|
||||
if (bitrate < 0) {
|
||||
bitrate = calcBitRate(outputResolution.getWidth(), outputResolution.getHeight());
|
||||
}
|
||||
engine.compose(
|
||||
destPath,
|
||||
outputResolution,
|
||||
filter,
|
||||
bitrate,
|
||||
mute,
|
||||
Rotation.fromInt(rotation.getRotation() + videoRotate),
|
||||
srcVideoResolution,
|
||||
fillMode,
|
||||
fillModeCustomItem,
|
||||
timeScale,
|
||||
flipVertical,
|
||||
flipHorizontal
|
||||
);
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
if (listener != null) {
|
||||
listener.onFailed(e);
|
||||
}
|
||||
executorService.shutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
if (listener != null) {
|
||||
listener.onCompleted();
|
||||
}
|
||||
executorService.shutdown();
|
||||
}
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public void cancel() {
|
||||
getExecutorService().shutdownNow();
|
||||
}
|
||||
|
||||
|
||||
public interface Listener {
|
||||
/**
|
||||
* Called to notify progress.
|
||||
*
|
||||
* @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
|
||||
*/
|
||||
void onProgress(double progress);
|
||||
|
||||
/**
|
||||
* Called when transcode completed.
|
||||
*/
|
||||
void onCompleted();
|
||||
|
||||
/**
|
||||
* Called when transcode canceled.
|
||||
*/
|
||||
void onCanceled();
|
||||
|
||||
|
||||
void onFailed(Exception exception);
|
||||
}
|
||||
|
||||
private int getVideoRotation(String videoFilePath) {
|
||||
MediaMetadataRetriever mediaMetadataRetriever = null;
|
||||
try {
|
||||
mediaMetadataRetriever = new MediaMetadataRetriever();
|
||||
mediaMetadataRetriever.setDataSource(videoFilePath);
|
||||
String orientation = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
|
||||
return Integer.valueOf(orientation);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Log.e("MediaMetadataRetriever", "getVideoRotation IllegalArgumentException");
|
||||
return 0;
|
||||
} catch (RuntimeException e) {
|
||||
Log.e("MediaMetadataRetriever", "getVideoRotation RuntimeException");
|
||||
return 0;
|
||||
} catch (Exception e) {
|
||||
Log.e("MediaMetadataRetriever", "getVideoRotation Exception");
|
||||
return 0;
|
||||
} finally {
|
||||
try {
|
||||
if (mediaMetadataRetriever != null) {
|
||||
mediaMetadataRetriever.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "Failed to release mediaMetadataRetriever.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private int calcBitRate(int width, int height) {
|
||||
final int bitrate = (int) (0.25 * 30 * width * height);
|
||||
Log.i(TAG, "bitrate=" + bitrate);
|
||||
return bitrate;
|
||||
}
|
||||
|
||||
private Size getVideoResolution(final String path, final int rotation) {
|
||||
MediaMetadataRetriever retriever = null;
|
||||
try {
|
||||
retriever = new MediaMetadataRetriever();
|
||||
retriever.setDataSource(path);
|
||||
int width = Integer.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));
|
||||
int height = Integer.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT));
|
||||
|
||||
return new Size(width, height);
|
||||
} finally {
|
||||
try {
|
||||
if (retriever != null) {
|
||||
retriever.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "Failed to release mediaMetadataRetriever.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,228 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.media.*;
|
||||
import android.util.Log;
|
||||
import android.util.Size;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import java.io.FileDescriptor;
|
||||
import java.io.IOException;
|
||||
|
||||
// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java
|
||||
|
||||
/**
|
||||
* Internal engine, do not use this directly.
|
||||
*/
|
||||
class GPUMp4ComposerEngine {
|
||||
private static final String TAG = "GPUMp4ComposerEngine";
|
||||
private static final double PROGRESS_UNKNOWN = -1.0;
|
||||
private static final long SLEEP_TO_WAIT_TRACK_TRANSCODERS = 10;
|
||||
private static final long PROGRESS_INTERVAL_STEPS = 10;
|
||||
private FileDescriptor inputFileDescriptor;
|
||||
private VideoComposer videoComposer;
|
||||
private IAudioComposer audioComposer;
|
||||
private MediaExtractor mediaExtractor;
|
||||
private MediaMuxer mediaMuxer;
|
||||
private ProgressCallback progressCallback;
|
||||
private long durationUs;
|
||||
private MediaMetadataRetriever mediaMetadataRetriever;
|
||||
|
||||
|
||||
void setDataSource(FileDescriptor fileDescriptor) {
|
||||
inputFileDescriptor = fileDescriptor;
|
||||
}
|
||||
|
||||
void setProgressCallback(ProgressCallback progressCallback) {
|
||||
this.progressCallback = progressCallback;
|
||||
}
|
||||
|
||||
|
||||
void compose(
|
||||
final String destPath,
|
||||
final Size outputResolution,
|
||||
final GlFilter filter,
|
||||
final int bitrate,
|
||||
final boolean mute,
|
||||
final Rotation rotation,
|
||||
final Size inputResolution,
|
||||
final FillMode fillMode,
|
||||
final FillModeCustomItem fillModeCustomItem,
|
||||
final int timeScale,
|
||||
final boolean flipVertical,
|
||||
final boolean flipHorizontal
|
||||
) throws IOException {
|
||||
|
||||
|
||||
try {
|
||||
mediaExtractor = new MediaExtractor();
|
||||
mediaExtractor.setDataSource(inputFileDescriptor);
|
||||
mediaMuxer = new MediaMuxer(destPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
||||
mediaMetadataRetriever = new MediaMetadataRetriever();
|
||||
mediaMetadataRetriever.setDataSource(inputFileDescriptor);
|
||||
try {
|
||||
durationUs = Long.parseLong(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000;
|
||||
} catch (NumberFormatException e) {
|
||||
durationUs = -1;
|
||||
}
|
||||
Log.d(TAG, "Duration (us): " + durationUs);
|
||||
|
||||
MediaFormat videoOutputFormat = MediaFormat.createVideoFormat("video/avc", outputResolution.getWidth(), outputResolution.getHeight());
|
||||
|
||||
videoOutputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
|
||||
videoOutputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
|
||||
videoOutputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
|
||||
videoOutputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
|
||||
|
||||
MuxRender muxRender = new MuxRender(mediaMuxer);
|
||||
|
||||
// identify track indices
|
||||
MediaFormat format = mediaExtractor.getTrackFormat(0);
|
||||
String mime = format.getString(MediaFormat.KEY_MIME);
|
||||
|
||||
final int videoTrackIndex;
|
||||
final int audioTrackIndex;
|
||||
|
||||
if (mime.startsWith("video/")) {
|
||||
videoTrackIndex = 0;
|
||||
audioTrackIndex = 1;
|
||||
} else {
|
||||
videoTrackIndex = 1;
|
||||
audioTrackIndex = 0;
|
||||
}
|
||||
|
||||
// setup video composer
|
||||
videoComposer = new VideoComposer(mediaExtractor, videoTrackIndex, videoOutputFormat, muxRender, timeScale);
|
||||
videoComposer.setUp(filter, rotation, outputResolution, inputResolution, fillMode, fillModeCustomItem, flipVertical, flipHorizontal);
|
||||
mediaExtractor.selectTrack(videoTrackIndex);
|
||||
|
||||
// setup audio if present and not muted
|
||||
if (mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_AUDIO) != null && !mute) {
|
||||
// has Audio video
|
||||
|
||||
if (timeScale < 2) {
|
||||
audioComposer = new AudioComposer(mediaExtractor, audioTrackIndex, muxRender);
|
||||
} else {
|
||||
audioComposer = new RemixAudioComposer(mediaExtractor, audioTrackIndex, mediaExtractor.getTrackFormat(audioTrackIndex), muxRender, timeScale);
|
||||
}
|
||||
|
||||
audioComposer.setup();
|
||||
|
||||
mediaExtractor.selectTrack(audioTrackIndex);
|
||||
|
||||
runPipelines();
|
||||
} else {
|
||||
// no audio video
|
||||
runPipelinesNoAudio();
|
||||
}
|
||||
|
||||
|
||||
mediaMuxer.stop();
|
||||
} finally {
|
||||
try {
|
||||
if (videoComposer != null) {
|
||||
videoComposer.release();
|
||||
videoComposer = null;
|
||||
}
|
||||
if (audioComposer != null) {
|
||||
audioComposer.release();
|
||||
audioComposer = null;
|
||||
}
|
||||
if (mediaExtractor != null) {
|
||||
mediaExtractor.release();
|
||||
mediaExtractor = null;
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
// Too fatal to make alive the app, because it may leak native resources.
|
||||
//noinspection ThrowFromFinallyBlock
|
||||
throw new Error("Could not shutdown mediaExtractor, codecs and mediaMuxer pipeline.", e);
|
||||
}
|
||||
try {
|
||||
if (mediaMuxer != null) {
|
||||
mediaMuxer.release();
|
||||
mediaMuxer = null;
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
Log.e(TAG, "Failed to release mediaMuxer.", e);
|
||||
}
|
||||
try {
|
||||
if (mediaMetadataRetriever != null) {
|
||||
mediaMetadataRetriever.release();
|
||||
mediaMetadataRetriever = null;
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
Log.e(TAG, "Failed to release mediaMetadataRetriever.", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void runPipelines() {
|
||||
long loopCount = 0;
|
||||
if (durationUs <= 0) {
|
||||
if (progressCallback != null) {
|
||||
progressCallback.onProgress(PROGRESS_UNKNOWN);
|
||||
}// unknown
|
||||
}
|
||||
while (!(videoComposer.isFinished() && audioComposer.isFinished())) {
|
||||
boolean stepped = videoComposer.stepPipeline()
|
||||
|| audioComposer.stepPipeline();
|
||||
loopCount++;
|
||||
if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
|
||||
double videoProgress = videoComposer.isFinished() ? 1.0 : Math.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
|
||||
double audioProgress = audioComposer.isFinished() ? 1.0 : Math.min(1.0, (double) audioComposer.getWrittenPresentationTimeUs() / durationUs);
|
||||
double progress = (videoProgress + audioProgress) / 2.0;
|
||||
if (progressCallback != null) {
|
||||
progressCallback.onProgress(progress);
|
||||
}
|
||||
}
|
||||
if (!stepped) {
|
||||
try {
|
||||
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
|
||||
} catch (InterruptedException e) {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void runPipelinesNoAudio() {
|
||||
long loopCount = 0;
|
||||
if (durationUs <= 0) {
|
||||
if (progressCallback != null) {
|
||||
progressCallback.onProgress(PROGRESS_UNKNOWN);
|
||||
} // unknown
|
||||
}
|
||||
while (!videoComposer.isFinished()) {
|
||||
boolean stepped = videoComposer.stepPipeline();
|
||||
loopCount++;
|
||||
if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
|
||||
double videoProgress = videoComposer.isFinished() ? 1.0 : Math.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
|
||||
if (progressCallback != null) {
|
||||
progressCallback.onProgress(videoProgress);
|
||||
}
|
||||
}
|
||||
if (!stepped) {
|
||||
try {
|
||||
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
|
||||
} catch (InterruptedException e) {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
interface ProgressCallback {
|
||||
/**
|
||||
* Called to notify progress. Same thread which initiated transcode is used.
|
||||
*
|
||||
* @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
|
||||
*/
|
||||
void onProgress(double progress);
|
||||
}
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
|
||||
interface IAudioComposer {
|
||||
|
||||
void setup();
|
||||
|
||||
boolean stepPipeline();
|
||||
|
||||
long getWrittenPresentationTimeUs();
|
||||
|
||||
boolean isFinished();
|
||||
|
||||
void release();
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/compat/MediaCodecBufferCompatWrapper.java
|
||||
|
||||
/**
|
||||
* A Wrapper to MediaCodec that facilitates the use of API-dependent get{Input/Output}Buffer methods,
|
||||
* in order to prevent: http://stackoverflow.com/q/30646885
|
||||
*/
|
||||
|
||||
class MediaCodecBufferCompatWrapper {
|
||||
private final MediaCodec mediaCodec;
|
||||
private final ByteBuffer[] inputBuffers;
|
||||
private final ByteBuffer[] putputBuffers;
|
||||
|
||||
MediaCodecBufferCompatWrapper(MediaCodec mediaCodec) {
|
||||
this.mediaCodec = mediaCodec;
|
||||
|
||||
if (Build.VERSION.SDK_INT < 21) {
|
||||
inputBuffers = mediaCodec.getInputBuffers();
|
||||
putputBuffers = mediaCodec.getOutputBuffers();
|
||||
} else {
|
||||
inputBuffers = putputBuffers = null;
|
||||
}
|
||||
}
|
||||
|
||||
ByteBuffer getInputBuffer(final int index) {
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
return mediaCodec.getInputBuffer(index);
|
||||
}
|
||||
return inputBuffers[index];
|
||||
}
|
||||
|
||||
ByteBuffer getOutputBuffer(final int index) {
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
return mediaCodec.getOutputBuffer(index);
|
||||
}
|
||||
return putputBuffers[index];
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,128 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMuxer;
|
||||
import android.util.Log;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java
|
||||
|
||||
class MuxRender {
|
||||
private static final String TAG = "MuxRender";
|
||||
private static final int BUFFER_SIZE = 64 * 1024; // I have no idea whether this value is appropriate or not...
|
||||
private final MediaMuxer muxer;
|
||||
private MediaFormat videoFormat;
|
||||
private MediaFormat audioFormat;
|
||||
private int videoTrackIndex;
|
||||
private int audioTrackIndex;
|
||||
private ByteBuffer byteBuffer;
|
||||
private final List<SampleInfo> sampleInfoList;
|
||||
private boolean started;
|
||||
|
||||
MuxRender(MediaMuxer muxer) {
|
||||
this.muxer = muxer;
|
||||
sampleInfoList = new ArrayList<>();
|
||||
}
|
||||
|
||||
void setOutputFormat(SampleType sampleType, MediaFormat format) {
|
||||
switch (sampleType) {
|
||||
case VIDEO:
|
||||
videoFormat = format;
|
||||
break;
|
||||
case AUDIO:
|
||||
audioFormat = format;
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
|
||||
void onSetOutputFormat() {
|
||||
|
||||
if (videoFormat != null && audioFormat != null) {
|
||||
|
||||
videoTrackIndex = muxer.addTrack(videoFormat);
|
||||
Log.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
|
||||
audioTrackIndex = muxer.addTrack(audioFormat);
|
||||
Log.v(TAG, "Added track #" + audioTrackIndex + " with " + audioFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
|
||||
|
||||
} else if (videoFormat != null) {
|
||||
|
||||
videoTrackIndex = muxer.addTrack(videoFormat);
|
||||
Log.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
|
||||
|
||||
}
|
||||
|
||||
muxer.start();
|
||||
started = true;
|
||||
|
||||
if (byteBuffer == null) {
|
||||
byteBuffer = ByteBuffer.allocate(0);
|
||||
}
|
||||
byteBuffer.flip();
|
||||
Log.v(TAG, "Output format determined, writing " + sampleInfoList.size() +
|
||||
" samples / " + byteBuffer.limit() + " bytes to muxer.");
|
||||
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
int offset = 0;
|
||||
for (SampleInfo sampleInfo : sampleInfoList) {
|
||||
sampleInfo.writeToBufferInfo(bufferInfo, offset);
|
||||
muxer.writeSampleData(getTrackIndexForSampleType(sampleInfo.sampleType), byteBuffer, bufferInfo);
|
||||
offset += sampleInfo.size;
|
||||
}
|
||||
sampleInfoList.clear();
|
||||
byteBuffer = null;
|
||||
|
||||
|
||||
}
|
||||
|
||||
void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
|
||||
if (started) {
|
||||
muxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo);
|
||||
return;
|
||||
}
|
||||
byteBuf.limit(bufferInfo.offset + bufferInfo.size);
|
||||
byteBuf.position(bufferInfo.offset);
|
||||
if (byteBuffer == null) {
|
||||
byteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
|
||||
}
|
||||
byteBuffer.put(byteBuf);
|
||||
sampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo));
|
||||
}
|
||||
|
||||
private int getTrackIndexForSampleType(SampleType sampleType) {
|
||||
switch (sampleType) {
|
||||
case VIDEO:
|
||||
return videoTrackIndex;
|
||||
case AUDIO:
|
||||
return audioTrackIndex;
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
|
||||
public enum SampleType {VIDEO, AUDIO}
|
||||
|
||||
private static class SampleInfo {
|
||||
private final SampleType sampleType;
|
||||
private final int size;
|
||||
private final long presentationTimeUs;
|
||||
private final int flags;
|
||||
|
||||
private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) {
|
||||
this.sampleType = sampleType;
|
||||
this.size = size;
|
||||
presentationTimeUs = bufferInfo.presentationTimeUs;
|
||||
flags = bufferInfo.flags;
|
||||
}
|
||||
|
||||
private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) {
|
||||
bufferInfo.set(offset, size, presentationTimeUs, flags);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,218 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/AudioTrackTranscoder.java
|
||||
|
||||
|
||||
|
||||
class RemixAudioComposer implements IAudioComposer {
|
||||
private static final MuxRender.SampleType SAMPLE_TYPE = MuxRender.SampleType.AUDIO;
|
||||
|
||||
private static final int DRAIN_STATE_NONE = 0;
|
||||
private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1;
|
||||
private static final int DRAIN_STATE_CONSUMED = 2;
|
||||
|
||||
private final MediaExtractor extractor;
|
||||
private final MuxRender muxer;
|
||||
private long writtenPresentationTimeUs;
|
||||
|
||||
private final int trackIndex;
|
||||
private int muxCount = 1;
|
||||
|
||||
private final MediaFormat outputFormat;
|
||||
|
||||
private final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
private MediaCodec decoder;
|
||||
private MediaCodec encoder;
|
||||
private MediaFormat actualOutputFormat;
|
||||
|
||||
private MediaCodecBufferCompatWrapper decoderBuffers;
|
||||
private MediaCodecBufferCompatWrapper encoderBuffers;
|
||||
|
||||
private boolean isExtractorEOS;
|
||||
private boolean isDecoderEOS;
|
||||
private boolean isEncoderEOS;
|
||||
private boolean decoderStarted;
|
||||
private boolean encoderStarted;
|
||||
|
||||
private AudioChannel audioChannel;
|
||||
private final int timeScale;
|
||||
|
||||
public RemixAudioComposer(MediaExtractor extractor, int trackIndex,
|
||||
MediaFormat outputFormat, MuxRender muxer, int timeScale) {
|
||||
this.extractor = extractor;
|
||||
this.trackIndex = trackIndex;
|
||||
this.outputFormat = outputFormat;
|
||||
this.muxer = muxer;
|
||||
this.timeScale = timeScale;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
extractor.selectTrack(trackIndex);
|
||||
try {
|
||||
encoder = MediaCodec.createEncoderByType(outputFormat.getString(MediaFormat.KEY_MIME));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
encoder.start();
|
||||
encoderStarted = true;
|
||||
encoderBuffers = new MediaCodecBufferCompatWrapper(encoder);
|
||||
|
||||
final MediaFormat inputFormat = extractor.getTrackFormat(trackIndex);
|
||||
try {
|
||||
decoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
decoder.configure(inputFormat, null, null, 0);
|
||||
decoder.start();
|
||||
decoderStarted = true;
|
||||
decoderBuffers = new MediaCodecBufferCompatWrapper(decoder);
|
||||
|
||||
audioChannel = new AudioChannel(decoder, encoder, outputFormat);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean stepPipeline() {
|
||||
boolean busy = false;
|
||||
|
||||
int status;
|
||||
while (drainEncoder(0) != DRAIN_STATE_NONE) busy = true;
|
||||
do {
|
||||
status = drainDecoder(0);
|
||||
if (status != DRAIN_STATE_NONE) busy = true;
|
||||
// NOTE: not repeating to keep from deadlock when encoder is full.
|
||||
} while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY);
|
||||
|
||||
while (audioChannel.feedEncoder(0)) busy = true;
|
||||
while (drainExtractor(0) != DRAIN_STATE_NONE) busy = true;
|
||||
|
||||
return busy;
|
||||
}
|
||||
|
||||
private int drainExtractor(long timeoutUs) {
|
||||
if (isExtractorEOS) return DRAIN_STATE_NONE;
|
||||
int trackIndex = extractor.getSampleTrackIndex();
|
||||
if (trackIndex >= 0 && trackIndex != this.trackIndex) {
|
||||
return DRAIN_STATE_NONE;
|
||||
}
|
||||
|
||||
final int result = decoder.dequeueInputBuffer(timeoutUs);
|
||||
if (result < 0) return DRAIN_STATE_NONE;
|
||||
if (trackIndex < 0) {
|
||||
isExtractorEOS = true;
|
||||
decoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
return DRAIN_STATE_NONE;
|
||||
}
|
||||
|
||||
final int sampleSize = extractor.readSampleData(decoderBuffers.getInputBuffer(result), 0);
|
||||
final boolean isKeyFrame = (extractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
|
||||
decoder.queueInputBuffer(result, 0, sampleSize, extractor.getSampleTime(), isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0);
|
||||
extractor.advance();
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
private int drainDecoder(long timeoutUs) {
|
||||
if (isDecoderEOS) return DRAIN_STATE_NONE;
|
||||
|
||||
int result = decoder.dequeueOutputBuffer(bufferInfo, timeoutUs);
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return DRAIN_STATE_NONE;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
audioChannel.setActualDecodedFormat(decoder.getOutputFormat());
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
|
||||
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
isDecoderEOS = true;
|
||||
audioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
|
||||
} else if (bufferInfo.size > 0) {
|
||||
audioChannel.drainDecoderBufferAndQueue(result, bufferInfo.presentationTimeUs / timeScale);
|
||||
}
|
||||
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
private int drainEncoder(long timeoutUs) {
|
||||
if (isEncoderEOS) return DRAIN_STATE_NONE;
|
||||
|
||||
int result = encoder.dequeueOutputBuffer(bufferInfo, timeoutUs);
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return DRAIN_STATE_NONE;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
if (actualOutputFormat != null) {
|
||||
throw new RuntimeException("Audio output format changed twice.");
|
||||
}
|
||||
actualOutputFormat = encoder.getOutputFormat();
|
||||
muxer.setOutputFormat(SAMPLE_TYPE, actualOutputFormat);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
encoderBuffers = new MediaCodecBufferCompatWrapper(encoder);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
|
||||
if (actualOutputFormat == null) {
|
||||
throw new RuntimeException("Could not determine actual output format.");
|
||||
}
|
||||
|
||||
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
isEncoderEOS = true;
|
||||
bufferInfo.set(0, 0, 0, bufferInfo.flags);
|
||||
}
|
||||
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
||||
// SPS or PPS, which should be passed by MediaFormat.
|
||||
encoder.releaseOutputBuffer(result, false);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
|
||||
if (muxCount == 1) {
|
||||
muxer.writeSampleData(SAMPLE_TYPE, encoderBuffers.getOutputBuffer(result), bufferInfo);
|
||||
}
|
||||
if (muxCount < timeScale) {
|
||||
muxCount++;
|
||||
} else {
|
||||
muxCount = 1;
|
||||
}
|
||||
|
||||
writtenPresentationTimeUs = bufferInfo.presentationTimeUs;
|
||||
encoder.releaseOutputBuffer(result, false);
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public long getWrittenPresentationTimeUs() {
|
||||
return writtenPresentationTimeUs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFinished() {
|
||||
return isEncoderEOS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (decoder != null) {
|
||||
if (decoderStarted) decoder.stop();
|
||||
decoder.release();
|
||||
decoder = null;
|
||||
}
|
||||
if (encoder != null) {
|
||||
if (encoderStarted) encoder.stop();
|
||||
encoder.release();
|
||||
encoder = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
public enum Rotation {
|
||||
NORMAL(0),
|
||||
ROTATION_90(90),
|
||||
ROTATION_180(180),
|
||||
ROTATION_270(270);
|
||||
|
||||
private final int rotation;
|
||||
|
||||
Rotation(int rotation) {
|
||||
this.rotation = rotation;
|
||||
}
|
||||
|
||||
public int getRotation() {
|
||||
return rotation;
|
||||
}
|
||||
|
||||
public static Rotation fromInt(int rotate) {
|
||||
for (Rotation rotation : Rotation.values()) {
|
||||
if (rotate == rotation.getRotation()) return rotation;
|
||||
}
|
||||
|
||||
return NORMAL;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,238 @@
|
||||
|
||||
package com.xypower.gpuv.composer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.util.Size;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
// Refer: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/ExtractDecodeEditEncodeMuxTest.java
|
||||
// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/VideoTrackTranscoder.java
|
||||
class VideoComposer {
|
||||
private static final String TAG = "VideoComposer";
|
||||
private static final int DRAIN_STATE_NONE = 0;
|
||||
private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1;
|
||||
private static final int DRAIN_STATE_CONSUMED = 2;
|
||||
|
||||
private final MediaExtractor mediaExtractor;
|
||||
private final int trackIndex;
|
||||
private final MediaFormat outputFormat;
|
||||
private final MuxRender muxRender;
|
||||
private final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
private MediaCodec decoder;
|
||||
private MediaCodec encoder;
|
||||
private ByteBuffer[] decoderInputBuffers;
|
||||
private ByteBuffer[] encoderOutputBuffers;
|
||||
private MediaFormat actualOutputFormat;
|
||||
private DecoderSurface decoderSurface;
|
||||
private EncoderSurface encoderSurface;
|
||||
private boolean isExtractorEOS;
|
||||
private boolean isDecoderEOS;
|
||||
private boolean isEncoderEOS;
|
||||
private boolean decoderStarted;
|
||||
private boolean encoderStarted;
|
||||
private long writtenPresentationTimeUs;
|
||||
private final int timeScale;
|
||||
|
||||
VideoComposer(MediaExtractor mediaExtractor, int trackIndex,
|
||||
MediaFormat outputFormat, MuxRender muxRender, int timeScale) {
|
||||
this.mediaExtractor = mediaExtractor;
|
||||
this.trackIndex = trackIndex;
|
||||
this.outputFormat = outputFormat;
|
||||
this.muxRender = muxRender;
|
||||
this.timeScale = timeScale;
|
||||
}
|
||||
|
||||
|
||||
void setUp(GlFilter filter,
|
||||
Rotation rotation,
|
||||
Size outputResolution,
|
||||
Size inputResolution,
|
||||
FillMode fillMode,
|
||||
FillModeCustomItem fillModeCustomItem,
|
||||
final boolean flipVertical,
|
||||
final boolean flipHorizontal) {
|
||||
mediaExtractor.selectTrack(trackIndex);
|
||||
try {
|
||||
encoder = MediaCodec.createEncoderByType(outputFormat.getString(MediaFormat.KEY_MIME));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
encoderSurface = new EncoderSurface(encoder.createInputSurface());
|
||||
encoderSurface.makeCurrent();
|
||||
encoder.start();
|
||||
encoderStarted = true;
|
||||
encoderOutputBuffers = encoder.getOutputBuffers();
|
||||
|
||||
MediaFormat inputFormat = mediaExtractor.getTrackFormat(trackIndex);
|
||||
if (inputFormat.containsKey("rotation-degrees")) {
|
||||
// Decoded video is rotated automatically in Android 5.0 lollipop.
|
||||
// Turn off here because we don't want to encode rotated one.
|
||||
// refer: https://android.googlesource.com/platform/frameworks/av/+blame/lollipop-release/media/libstagefright/Utils.cpp
|
||||
inputFormat.setInteger("rotation-degrees", 0);
|
||||
}
|
||||
decoderSurface = new DecoderSurface(filter);
|
||||
decoderSurface.setRotation(rotation);
|
||||
decoderSurface.setOutputResolution(outputResolution);
|
||||
decoderSurface.setInputResolution(inputResolution);
|
||||
decoderSurface.setFillMode(fillMode);
|
||||
decoderSurface.setFillModeCustomItem(fillModeCustomItem);
|
||||
decoderSurface.setFlipHorizontal(flipHorizontal);
|
||||
decoderSurface.setFlipVertical(flipVertical);
|
||||
decoderSurface.completeParams();
|
||||
|
||||
try {
|
||||
decoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
decoder.configure(inputFormat, decoderSurface.getSurface(), null, 0);
|
||||
decoder.start();
|
||||
decoderStarted = true;
|
||||
decoderInputBuffers = decoder.getInputBuffers();
|
||||
}
|
||||
|
||||
|
||||
boolean stepPipeline() {
|
||||
boolean busy = false;
|
||||
|
||||
int status;
|
||||
while (drainEncoder() != DRAIN_STATE_NONE) {
|
||||
busy = true;
|
||||
}
|
||||
do {
|
||||
status = drainDecoder();
|
||||
if (status != DRAIN_STATE_NONE) {
|
||||
busy = true;
|
||||
}
|
||||
// NOTE: not repeating to keep from deadlock when encoder is full.
|
||||
} while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY);
|
||||
while (drainExtractor() != DRAIN_STATE_NONE) {
|
||||
busy = true;
|
||||
}
|
||||
|
||||
return busy;
|
||||
}
|
||||
|
||||
|
||||
long getWrittenPresentationTimeUs() {
|
||||
return writtenPresentationTimeUs;
|
||||
}
|
||||
|
||||
|
||||
boolean isFinished() {
|
||||
return isEncoderEOS;
|
||||
}
|
||||
|
||||
|
||||
void release() {
|
||||
if (decoderSurface != null) {
|
||||
decoderSurface.release();
|
||||
decoderSurface = null;
|
||||
}
|
||||
if (encoderSurface != null) {
|
||||
encoderSurface.release();
|
||||
encoderSurface = null;
|
||||
}
|
||||
if (decoder != null) {
|
||||
if (decoderStarted) decoder.stop();
|
||||
decoder.release();
|
||||
decoder = null;
|
||||
}
|
||||
if (encoder != null) {
|
||||
if (encoderStarted) encoder.stop();
|
||||
encoder.release();
|
||||
encoder = null;
|
||||
}
|
||||
}
|
||||
|
||||
private int drainExtractor() {
|
||||
if (isExtractorEOS) return DRAIN_STATE_NONE;
|
||||
int trackIndex = mediaExtractor.getSampleTrackIndex();
|
||||
if (trackIndex >= 0 && trackIndex != this.trackIndex) {
|
||||
return DRAIN_STATE_NONE;
|
||||
}
|
||||
int result = decoder.dequeueInputBuffer(0);
|
||||
if (result < 0) return DRAIN_STATE_NONE;
|
||||
if (trackIndex < 0) {
|
||||
isExtractorEOS = true;
|
||||
decoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
return DRAIN_STATE_NONE;
|
||||
}
|
||||
int sampleSize = mediaExtractor.readSampleData(decoderInputBuffers[result], 0);
|
||||
boolean isKeyFrame = (mediaExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
|
||||
decoder.queueInputBuffer(result, 0, sampleSize, mediaExtractor.getSampleTime() / timeScale, isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0);
|
||||
mediaExtractor.advance();
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
private int drainDecoder() {
|
||||
if (isDecoderEOS) return DRAIN_STATE_NONE;
|
||||
int result = decoder.dequeueOutputBuffer(bufferInfo, 0);
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return DRAIN_STATE_NONE;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
encoder.signalEndOfInputStream();
|
||||
isDecoderEOS = true;
|
||||
bufferInfo.size = 0;
|
||||
}
|
||||
boolean doRender = (bufferInfo.size > 0);
|
||||
// NOTE: doRender will block if buffer (of encoder) is full.
|
||||
// Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
|
||||
decoder.releaseOutputBuffer(result, doRender);
|
||||
if (doRender) {
|
||||
decoderSurface.awaitNewImage();
|
||||
decoderSurface.drawImage();
|
||||
encoderSurface.setPresentationTime(bufferInfo.presentationTimeUs * 1000);
|
||||
encoderSurface.swapBuffers();
|
||||
}
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
private int drainEncoder() {
|
||||
if (isEncoderEOS) return DRAIN_STATE_NONE;
|
||||
int result = encoder.dequeueOutputBuffer(bufferInfo, 0);
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return DRAIN_STATE_NONE;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
if (actualOutputFormat != null) {
|
||||
throw new RuntimeException("Video output format changed twice.");
|
||||
}
|
||||
actualOutputFormat = encoder.getOutputFormat();
|
||||
muxRender.setOutputFormat(MuxRender.SampleType.VIDEO, actualOutputFormat);
|
||||
muxRender.onSetOutputFormat();
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
encoderOutputBuffers = encoder.getOutputBuffers();
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
if (actualOutputFormat == null) {
|
||||
throw new RuntimeException("Could not determine actual output format.");
|
||||
}
|
||||
|
||||
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
isEncoderEOS = true;
|
||||
bufferInfo.set(0, 0, 0, bufferInfo.flags);
|
||||
}
|
||||
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
||||
// SPS or PPS, which should be passed by MediaFormat.
|
||||
encoder.releaseOutputBuffer(result, false);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
muxRender.writeSampleData(MuxRender.SampleType.VIDEO, encoderOutputBuffers[result], bufferInfo);
|
||||
writtenPresentationTimeUs = bufferInfo.presentationTimeUs;
|
||||
encoder.releaseOutputBuffer(result, false);
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
}
|
@ -0,0 +1,139 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.opengl.GLSurfaceView;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_ALPHA_SIZE;
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_BLUE_SIZE;
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_DEPTH_SIZE;
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_GREEN_SIZE;
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_NONE;
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_RED_SIZE;
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_RENDERABLE_TYPE;
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_STENCIL_SIZE;
|
||||
|
||||
|
||||
|
||||
public class DefaultConfigChooser implements GLSurfaceView.EGLConfigChooser {
|
||||
|
||||
private final int[] configSpec;
|
||||
private final int redSize;
|
||||
private final int greenSize;
|
||||
private final int blueSize;
|
||||
private final int alphaSize;
|
||||
private final int depthSize;
|
||||
private final int stencilSize;
|
||||
|
||||
public DefaultConfigChooser(final int version) {
|
||||
this(true, version);
|
||||
}
|
||||
|
||||
public DefaultConfigChooser(final boolean withDepthBuffer, final int version) {
|
||||
this(
|
||||
8,
|
||||
8,
|
||||
8,
|
||||
0,
|
||||
withDepthBuffer ? 16 : 0,
|
||||
0,
|
||||
version
|
||||
);
|
||||
}
|
||||
|
||||
public DefaultConfigChooser(
|
||||
final int redSize,
|
||||
final int greenSize,
|
||||
final int blueSize,
|
||||
final int alphaSize,
|
||||
final int depthSize,
|
||||
final int stencilSize,
|
||||
final int version) {
|
||||
configSpec = filterConfigSpec(new int[]{
|
||||
EGL_RED_SIZE, redSize,
|
||||
EGL_GREEN_SIZE, greenSize,
|
||||
EGL_BLUE_SIZE, blueSize,
|
||||
EGL_ALPHA_SIZE, alphaSize,
|
||||
EGL_DEPTH_SIZE, depthSize,
|
||||
EGL_STENCIL_SIZE, stencilSize,
|
||||
EGL_NONE
|
||||
}, version);
|
||||
this.redSize = redSize;
|
||||
this.greenSize = greenSize;
|
||||
this.blueSize = blueSize;
|
||||
this.alphaSize = alphaSize;
|
||||
this.depthSize = depthSize;
|
||||
this.stencilSize = stencilSize;
|
||||
}
|
||||
|
||||
private static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
|
||||
private int[] filterConfigSpec(final int[] configSpec, final int version) {
|
||||
if (version != 2) {
|
||||
return configSpec;
|
||||
}
|
||||
|
||||
final int len = configSpec.length;
|
||||
final int[] newConfigSpec = new int[len + 2];
|
||||
System.arraycopy(configSpec, 0, newConfigSpec, 0, len - 1);
|
||||
newConfigSpec[len - 1] = EGL_RENDERABLE_TYPE;
|
||||
newConfigSpec[len] = EGL_OPENGL_ES2_BIT;
|
||||
newConfigSpec[len + 1] = EGL_NONE;
|
||||
return newConfigSpec;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
@Override
|
||||
public EGLConfig chooseConfig(final EGL10 egl, final EGLDisplay display) {
|
||||
// 要求されている仕様から使用可能な構成の数を抽出します。
|
||||
final int[] num_config = new int[1];
|
||||
if (!egl.eglChooseConfig(display, configSpec, null, 0, num_config)) {
|
||||
throw new IllegalArgumentException("eglChooseConfig failed");
|
||||
}
|
||||
final int config_size = num_config[0];
|
||||
if (config_size <= 0) {
|
||||
throw new IllegalArgumentException("No configs match configSpec");
|
||||
}
|
||||
|
||||
// 実際の構成を抽出します。
|
||||
final EGLConfig[] configs = new EGLConfig[config_size];
|
||||
if (!egl.eglChooseConfig(display, configSpec, configs, config_size, num_config)) {
|
||||
throw new IllegalArgumentException("eglChooseConfig#2 failed");
|
||||
}
|
||||
final EGLConfig config = chooseConfig(egl, display, configs);
|
||||
if (config == null) {
|
||||
throw new IllegalArgumentException("No config chosen");
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
private EGLConfig chooseConfig(final EGL10 egl, final EGLDisplay display, final EGLConfig[] configs) {
|
||||
for (final EGLConfig config : configs) {
|
||||
final int d = findConfigAttrib(egl, display, config, EGL_DEPTH_SIZE, 0);
|
||||
final int s = findConfigAttrib(egl, display, config, EGL_STENCIL_SIZE, 0);
|
||||
if ((d >= depthSize) && (s >= stencilSize)) {
|
||||
final int r = findConfigAttrib(egl, display, config, EGL_RED_SIZE, 0);
|
||||
final int g = findConfigAttrib(egl, display, config, EGL_GREEN_SIZE, 0);
|
||||
final int b = findConfigAttrib(egl, display, config, EGL_BLUE_SIZE, 0);
|
||||
final int a = findConfigAttrib(egl, display, config, EGL_ALPHA_SIZE, 0);
|
||||
if ((r == redSize) && (g == greenSize) && (b == blueSize) && (a == alphaSize)) {
|
||||
return config;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private int findConfigAttrib(final EGL10 egl, final EGLDisplay display, final EGLConfig config, final int attribute, final int defaultValue) {
|
||||
final int[] value = new int[1];
|
||||
if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
|
||||
return value[0];
|
||||
}
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,47 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.opengl.GLSurfaceView;
|
||||
import android.util.Log;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_NONE;
|
||||
import static javax.microedition.khronos.egl.EGL10.EGL_NO_CONTEXT;
|
||||
|
||||
|
||||
|
||||
public class DefaultContextFactory implements GLSurfaceView.EGLContextFactory {
|
||||
|
||||
private static final String TAG = "DefaultContextFactory";
|
||||
|
||||
private int EGLContextClientVersion;
|
||||
|
||||
public DefaultContextFactory(final int version) {
|
||||
EGLContextClientVersion = version;
|
||||
}
|
||||
|
||||
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
|
||||
|
||||
@Override
|
||||
public EGLContext createContext(final EGL10 egl, final EGLDisplay display, final EGLConfig config) {
|
||||
final int[] attrib_list;
|
||||
if (EGLContextClientVersion != 0) {
|
||||
attrib_list = new int[]{EGL_CONTEXT_CLIENT_VERSION, EGLContextClientVersion, EGL_NONE};
|
||||
} else {
|
||||
attrib_list = null;
|
||||
}
|
||||
return egl.eglCreateContext(display, config, EGL_NO_CONTEXT, attrib_list);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroyContext(final EGL10 egl, final EGLDisplay display, final EGLContext context) {
|
||||
if (!egl.eglDestroyContext(display, context)) {
|
||||
Log.e(TAG, "display:" + display + " context: " + context);
|
||||
throw new RuntimeException("eglDestroyContext" + egl.eglGetError());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,124 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.GLException;
|
||||
import android.opengl.GLUtils;
|
||||
import android.util.Log;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
import static android.opengl.GLES20.GL_ARRAY_BUFFER;
|
||||
import static android.opengl.GLES20.GL_CLAMP_TO_EDGE;
|
||||
import static android.opengl.GLES20.GL_LINK_STATUS;
|
||||
import static android.opengl.GLES20.GL_STATIC_DRAW;
|
||||
import static android.opengl.GLES20.GL_TEXTURE_MAG_FILTER;
|
||||
import static android.opengl.GLES20.GL_TEXTURE_MIN_FILTER;
|
||||
import static android.opengl.GLES20.GL_TEXTURE_WRAP_S;
|
||||
import static android.opengl.GLES20.GL_TEXTURE_WRAP_T;
|
||||
import static android.opengl.GLES20.GL_TRUE;
|
||||
import static android.opengl.GLES20.glCreateProgram;
|
||||
|
||||
|
||||
|
||||
public class EglUtil {
|
||||
private EglUtil() {
|
||||
}
|
||||
|
||||
public static final int NO_TEXTURE = -1;
|
||||
private static final int FLOAT_SIZE_BYTES = 4;
|
||||
|
||||
|
||||
public static int loadShader(final String strSource, final int iType) {
|
||||
int[] compiled = new int[1];
|
||||
int iShader = GLES20.glCreateShader(iType);
|
||||
GLES20.glShaderSource(iShader, strSource);
|
||||
GLES20.glCompileShader(iShader);
|
||||
GLES20.glGetShaderiv(iShader, GLES20.GL_COMPILE_STATUS, compiled, 0);
|
||||
if (compiled[0] == 0) {
|
||||
Log.d("Load Shader Failed", "Compilation\n" + GLES20.glGetShaderInfoLog(iShader));
|
||||
return 0;
|
||||
}
|
||||
return iShader;
|
||||
}
|
||||
|
||||
public static int createProgram(final int vertexShader, final int pixelShader) throws GLException {
|
||||
final int program = glCreateProgram();
|
||||
if (program == 0) {
|
||||
throw new RuntimeException("Could not create program");
|
||||
}
|
||||
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
GLES20.glAttachShader(program, pixelShader);
|
||||
|
||||
GLES20.glLinkProgram(program);
|
||||
final int[] linkStatus = new int[1];
|
||||
GLES20.glGetProgramiv(program, GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GL_TRUE) {
|
||||
GLES20.glDeleteProgram(program);
|
||||
throw new RuntimeException("Could not link program");
|
||||
}
|
||||
return program;
|
||||
}
|
||||
|
||||
public static void setupSampler(final int target, final int mag, final int min) {
|
||||
GLES20.glTexParameterf(target, GL_TEXTURE_MAG_FILTER, mag);
|
||||
GLES20.glTexParameterf(target, GL_TEXTURE_MIN_FILTER, min);
|
||||
GLES20.glTexParameteri(target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameteri(target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
|
||||
public static int createBuffer(final float[] data) {
|
||||
return createBuffer(toFloatBuffer(data));
|
||||
}
|
||||
|
||||
public static int createBuffer(final FloatBuffer data) {
|
||||
final int[] buffers = new int[1];
|
||||
GLES20.glGenBuffers(buffers.length, buffers, 0);
|
||||
updateBufferData(buffers[0], data);
|
||||
return buffers[0];
|
||||
}
|
||||
|
||||
public static FloatBuffer toFloatBuffer(final float[] data) {
|
||||
final FloatBuffer buffer = ByteBuffer
|
||||
.allocateDirect(data.length * FLOAT_SIZE_BYTES)
|
||||
.order(ByteOrder.nativeOrder())
|
||||
.asFloatBuffer();
|
||||
buffer.put(data).position(0);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
public static void updateBufferData(final int bufferName, final FloatBuffer data) {
|
||||
GLES20.glBindBuffer(GL_ARRAY_BUFFER, bufferName);
|
||||
GLES20.glBufferData(GL_ARRAY_BUFFER, data.capacity() * FLOAT_SIZE_BYTES, data, GL_STATIC_DRAW);
|
||||
GLES20.glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
}
|
||||
|
||||
public static int loadTexture(final Bitmap img, final int usedTexId, final boolean recycle) {
|
||||
int textures[] = new int[1];
|
||||
if (usedTexId == NO_TEXTURE) {
|
||||
GLES20.glGenTextures(1, textures, 0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
|
||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||
|
||||
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img, 0);
|
||||
} else {
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
|
||||
GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, img);
|
||||
textures[0] = usedTexId;
|
||||
}
|
||||
if (recycle) {
|
||||
img.recycle();
|
||||
}
|
||||
return textures[0];
|
||||
}
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
|
||||
|
||||
public class GlConfigChooser extends DefaultConfigChooser {
|
||||
|
||||
private static final int EGL_CONTEXT_CLIENT_VERSION = 2;
|
||||
|
||||
public GlConfigChooser(final boolean withDepthBuffer) {
|
||||
super(withDepthBuffer, EGL_CONTEXT_CLIENT_VERSION);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
|
||||
|
||||
public class GlContextFactory extends DefaultContextFactory {
|
||||
|
||||
private static final int EGL_CONTEXT_CLIENT_VERSION = 2;
|
||||
|
||||
public GlContextFactory() {
|
||||
super(EGL_CONTEXT_CLIENT_VERSION);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,78 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.GLSurfaceView;
|
||||
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.Queue;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.opengles.GL10;
|
||||
|
||||
import static android.opengl.GLES20.GL_COLOR_BUFFER_BIT;
|
||||
import static android.opengl.GLES20.GL_DEPTH_BUFFER_BIT;
|
||||
import static android.opengl.GLES20.GL_FRAMEBUFFER;
|
||||
|
||||
|
||||
|
||||
public abstract class GlFrameBufferObjectRenderer implements GLSurfaceView.Renderer {
|
||||
|
||||
private GlFramebufferObject framebufferObject;
|
||||
private GlFilter normalShader;
|
||||
|
||||
private final Queue<Runnable> runOnDraw;
|
||||
|
||||
|
||||
protected GlFrameBufferObjectRenderer() {
|
||||
runOnDraw = new LinkedList<Runnable>();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public final void onSurfaceCreated(final GL10 gl, final EGLConfig config) {
|
||||
framebufferObject = new GlFramebufferObject();
|
||||
normalShader = new GlFilter();
|
||||
normalShader.setup();
|
||||
onSurfaceCreated(config);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void onSurfaceChanged(final GL10 gl, final int width, final int height) {
|
||||
framebufferObject.setup(width, height);
|
||||
normalShader.setFrameSize(width, height);
|
||||
onSurfaceChanged(width, height);
|
||||
GLES20.glViewport(0, 0, framebufferObject.getWidth(), framebufferObject.getHeight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void onDrawFrame(final GL10 gl) {
|
||||
synchronized (runOnDraw) {
|
||||
while (!runOnDraw.isEmpty()) {
|
||||
runOnDraw.poll().run();
|
||||
}
|
||||
}
|
||||
framebufferObject.enable();
|
||||
|
||||
onDrawFrame(framebufferObject);
|
||||
|
||||
GLES20.glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
|
||||
GLES20.glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
normalShader.draw(framebufferObject.getTexName(), null);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
|
||||
}
|
||||
|
||||
public abstract void onSurfaceCreated(EGLConfig config);
|
||||
|
||||
public abstract void onSurfaceChanged(int width, int height);
|
||||
|
||||
public abstract void onDrawFrame(GlFramebufferObject fbo);
|
||||
|
||||
}
|
@ -0,0 +1,120 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import static android.opengl.GLES20.GL_COLOR_ATTACHMENT0;
|
||||
import static android.opengl.GLES20.GL_DEPTH_ATTACHMENT;
|
||||
import static android.opengl.GLES20.GL_DEPTH_COMPONENT16;
|
||||
import static android.opengl.GLES20.GL_FRAMEBUFFER;
|
||||
import static android.opengl.GLES20.GL_FRAMEBUFFER_BINDING;
|
||||
import static android.opengl.GLES20.GL_FRAMEBUFFER_COMPLETE;
|
||||
import static android.opengl.GLES20.GL_LINEAR;
|
||||
import static android.opengl.GLES20.GL_MAX_RENDERBUFFER_SIZE;
|
||||
import static android.opengl.GLES20.GL_MAX_TEXTURE_SIZE;
|
||||
import static android.opengl.GLES20.GL_NEAREST;
|
||||
import static android.opengl.GLES20.GL_RENDERBUFFER;
|
||||
import static android.opengl.GLES20.GL_RENDERBUFFER_BINDING;
|
||||
import static android.opengl.GLES20.GL_RGBA;
|
||||
import static android.opengl.GLES20.GL_TEXTURE_2D;
|
||||
import static android.opengl.GLES20.GL_TEXTURE_BINDING_2D;
|
||||
import static android.opengl.GLES20.GL_UNSIGNED_BYTE;
|
||||
|
||||
|
||||
|
||||
public class GlFramebufferObject {
|
||||
private int width;
|
||||
private int height;
|
||||
private int framebufferName;
|
||||
private int renderBufferName;
|
||||
private int texName;
|
||||
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
public int getTexName() {
|
||||
return texName;
|
||||
}
|
||||
|
||||
public void setup(final int width, final int height) {
|
||||
final int[] args = new int[1];
|
||||
|
||||
GLES20.glGetIntegerv(GL_MAX_TEXTURE_SIZE, args, 0);
|
||||
if (width > args[0] || height > args[0]) {
|
||||
throw new IllegalArgumentException("GL_MAX_TEXTURE_SIZE " + args[0]);
|
||||
}
|
||||
|
||||
GLES20.glGetIntegerv(GL_MAX_RENDERBUFFER_SIZE, args, 0);
|
||||
if (width > args[0] || height > args[0]) {
|
||||
throw new IllegalArgumentException("GL_MAX_RENDERBUFFER_SIZE " + args[0]);
|
||||
}
|
||||
|
||||
GLES20.glGetIntegerv(GL_FRAMEBUFFER_BINDING, args, 0);
|
||||
final int saveFramebuffer = args[0];
|
||||
GLES20.glGetIntegerv(GL_RENDERBUFFER_BINDING, args, 0);
|
||||
final int saveRenderbuffer = args[0];
|
||||
GLES20.glGetIntegerv(GL_TEXTURE_BINDING_2D, args, 0);
|
||||
final int saveTexName = args[0];
|
||||
|
||||
release();
|
||||
|
||||
try {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
|
||||
GLES20.glGenFramebuffers(args.length, args, 0);
|
||||
framebufferName = args[0];
|
||||
GLES20.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName);
|
||||
|
||||
GLES20.glGenRenderbuffers(args.length, args, 0);
|
||||
renderBufferName = args[0];
|
||||
GLES20.glBindRenderbuffer(GL_RENDERBUFFER, renderBufferName);
|
||||
GLES20.glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
|
||||
GLES20.glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, renderBufferName);
|
||||
|
||||
GLES20.glGenTextures(args.length, args, 0);
|
||||
texName = args[0];
|
||||
GLES20.glBindTexture(GL_TEXTURE_2D, texName);
|
||||
|
||||
EglUtil.setupSampler(GL_TEXTURE_2D, GL_LINEAR, GL_NEAREST);
|
||||
|
||||
GLES20.glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, null);
|
||||
GLES20.glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texName, 0);
|
||||
|
||||
final int status = GLES20.glCheckFramebufferStatus(GL_FRAMEBUFFER);
|
||||
if (status != GL_FRAMEBUFFER_COMPLETE) {
|
||||
throw new RuntimeException("Failed to initialize framebuffer object " + status);
|
||||
}
|
||||
} catch (final RuntimeException e) {
|
||||
release();
|
||||
throw e;
|
||||
}
|
||||
|
||||
GLES20.glBindFramebuffer(GL_FRAMEBUFFER, saveFramebuffer);
|
||||
GLES20.glBindRenderbuffer(GL_RENDERBUFFER, saveRenderbuffer);
|
||||
GLES20.glBindTexture(GL_TEXTURE_2D, saveTexName);
|
||||
}
|
||||
|
||||
public void release() {
|
||||
final int[] args = new int[1];
|
||||
args[0] = texName;
|
||||
GLES20.glDeleteTextures(args.length, args, 0);
|
||||
texName = 0;
|
||||
args[0] = renderBufferName;
|
||||
GLES20.glDeleteRenderbuffers(args.length, args, 0);
|
||||
renderBufferName = 0;
|
||||
args[0] = framebufferName;
|
||||
GLES20.glDeleteFramebuffers(args.length, args, 0);
|
||||
framebufferName = 0;
|
||||
}
|
||||
|
||||
public void enable() {
|
||||
GLES20.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import static android.opengl.GLES20.GL_ARRAY_BUFFER;
|
||||
import static android.opengl.GLES20.GL_FLOAT;
|
||||
import static android.opengl.GLES20.GL_TEXTURE0;
|
||||
import static android.opengl.GLES20.GL_TEXTURE_2D;
|
||||
import static android.opengl.GLES20.GL_TRIANGLE_STRIP;
|
||||
|
||||
|
||||
|
||||
public class GlPreview extends GlFilter {
|
||||
|
||||
public static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"uniform mat4 uMVPMatrix;\n" +
|
||||
"uniform mat4 uSTMatrix;\n" +
|
||||
"uniform float uCRatio;\n" +
|
||||
|
||||
"attribute vec4 aPosition;\n" +
|
||||
"attribute vec4 aTextureCoord;\n" +
|
||||
"varying highp vec2 vTextureCoord;\n" +
|
||||
|
||||
"void main() {\n" +
|
||||
"vec4 scaledPos = aPosition;\n" +
|
||||
"scaledPos.x = scaledPos.x * uCRatio;\n" +
|
||||
"gl_Position = uMVPMatrix * scaledPos;\n" +
|
||||
"vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
|
||||
"}\n";
|
||||
|
||||
private final int texTarget;
|
||||
|
||||
public GlPreview(final int texTarget) {
|
||||
super(VERTEX_SHADER, createFragmentShaderSourceOESIfNeed(texTarget));
|
||||
this.texTarget = texTarget;
|
||||
}
|
||||
|
||||
private static String createFragmentShaderSourceOESIfNeed(final int texTarget) {
|
||||
if (texTarget == GL_TEXTURE_EXTERNAL_OES) {
|
||||
return new StringBuilder()
|
||||
.append("#extension GL_OES_EGL_image_external : require\n")
|
||||
.append(DEFAULT_FRAGMENT_SHADER.replace("sampler2D", "samplerExternalOES"))
|
||||
.toString();
|
||||
}
|
||||
return DEFAULT_FRAGMENT_SHADER;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
super.setup();
|
||||
getHandle("uMVPMatrix");
|
||||
getHandle("uSTMatrix");
|
||||
getHandle("uCRatio");
|
||||
getHandle("aPosition");
|
||||
getHandle("aTextureCoord");
|
||||
}
|
||||
|
||||
public void draw(final int texName, final float[] mvpMatrix, final float[] stMatrix, final float aspectRatio) {
|
||||
useProgram();
|
||||
|
||||
GLES20.glUniformMatrix4fv(getHandle("uMVPMatrix"), 1, false, mvpMatrix, 0);
|
||||
GLES20.glUniformMatrix4fv(getHandle("uSTMatrix"), 1, false, stMatrix, 0);
|
||||
GLES20.glUniform1f(getHandle("uCRatio"), aspectRatio);
|
||||
|
||||
GLES20.glBindBuffer(GL_ARRAY_BUFFER, getVertexBufferName());
|
||||
GLES20.glEnableVertexAttribArray(getHandle("aPosition"));
|
||||
GLES20.glVertexAttribPointer(getHandle("aPosition"), VERTICES_DATA_POS_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_POS_OFFSET);
|
||||
GLES20.glEnableVertexAttribArray(getHandle("aTextureCoord"));
|
||||
GLES20.glVertexAttribPointer(getHandle("aTextureCoord"), VERTICES_DATA_UV_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_UV_OFFSET);
|
||||
|
||||
GLES20.glActiveTexture(GL_TEXTURE0);
|
||||
GLES20.glBindTexture(texTarget, texName);
|
||||
GLES20.glUniform1i(getHandle(DEFAULT_UNIFORM_SAMPLER), 0);
|
||||
|
||||
GLES20.glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
GLES20.glDisableVertexAttribArray(getHandle("aPosition"));
|
||||
GLES20.glDisableVertexAttribArray(getHandle("aTextureCoord"));
|
||||
GLES20.glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
GLES20.glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,70 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import static android.opengl.GLES20.*;
|
||||
|
||||
public class GlPreviewFilter extends GlFilter {
|
||||
|
||||
public static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"uniform mat4 uMVPMatrix;\n" +
|
||||
"uniform mat4 uSTMatrix;\n" +
|
||||
"uniform float uCRatio;\n" +
|
||||
|
||||
"attribute vec4 aPosition;\n" +
|
||||
"attribute vec4 aTextureCoord;\n" +
|
||||
"varying highp vec2 vTextureCoord;\n" +
|
||||
|
||||
"void main() {\n" +
|
||||
"vec4 scaledPos = aPosition;\n" +
|
||||
"scaledPos.x = scaledPos.x * uCRatio;\n" +
|
||||
"gl_Position = uMVPMatrix * scaledPos;\n" +
|
||||
"vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
|
||||
"}\n";
|
||||
|
||||
private final int texTarget;
|
||||
|
||||
public GlPreviewFilter(final int texTarget) {
|
||||
super(VERTEX_SHADER, createFragmentShaderSourceOESIfNeed(texTarget));
|
||||
this.texTarget = texTarget;
|
||||
}
|
||||
|
||||
private static String createFragmentShaderSourceOESIfNeed(final int texTarget) {
|
||||
if (texTarget == GL_TEXTURE_EXTERNAL_OES) {
|
||||
return new StringBuilder()
|
||||
.append("#extension GL_OES_EGL_image_external : require\n")
|
||||
.append(DEFAULT_FRAGMENT_SHADER.replace("sampler2D", "samplerExternalOES"))
|
||||
.toString();
|
||||
}
|
||||
return DEFAULT_FRAGMENT_SHADER;
|
||||
}
|
||||
|
||||
public void draw(final int texName, final float[] mvpMatrix, final float[] stMatrix, final float aspectRatio) {
|
||||
useProgram();
|
||||
|
||||
GLES20.glUniformMatrix4fv(getHandle("uMVPMatrix"), 1, false, mvpMatrix, 0);
|
||||
GLES20.glUniformMatrix4fv(getHandle("uSTMatrix"), 1, false, stMatrix, 0);
|
||||
GLES20.glUniform1f(getHandle("uCRatio"), aspectRatio);
|
||||
|
||||
GLES20.glBindBuffer(GL_ARRAY_BUFFER, getVertexBufferName());
|
||||
GLES20.glEnableVertexAttribArray(getHandle("aPosition"));
|
||||
GLES20.glVertexAttribPointer(getHandle("aPosition"), VERTICES_DATA_POS_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_POS_OFFSET);
|
||||
GLES20.glEnableVertexAttribArray(getHandle("aTextureCoord"));
|
||||
GLES20.glVertexAttribPointer(getHandle("aTextureCoord"), VERTICES_DATA_UV_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_UV_OFFSET);
|
||||
|
||||
GLES20.glActiveTexture(GL_TEXTURE0);
|
||||
GLES20.glBindTexture(texTarget, texName);
|
||||
GLES20.glUniform1i(getHandle(DEFAULT_UNIFORM_SAMPLER), 0);
|
||||
|
||||
GLES20.glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
GLES20.glDisableVertexAttribArray(getHandle("aPosition"));
|
||||
GLES20.glDisableVertexAttribArray(getHandle("aTextureCoord"));
|
||||
GLES20.glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
GLES20.glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,316 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.GLSurfaceView;
|
||||
import android.opengl.Matrix;
|
||||
import android.os.Handler;
|
||||
import android.util.Size;
|
||||
import com.xypower.gpuv.camerarecorder.capture.MediaVideoEncoder;
|
||||
import com.xypower.gpuv.egl.filter.GlFilter;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
|
||||
import static android.opengl.GLES20.*;
|
||||
|
||||
|
||||
|
||||
public class GlPreviewRenderer extends GlFrameBufferObjectRenderer implements SurfaceTexture.OnFrameAvailableListener {
|
||||
|
||||
private final Handler handler = new Handler();
|
||||
|
||||
private GlSurfaceTexture previewTexture;
|
||||
|
||||
// private final Camera camera;
|
||||
private int texName;
|
||||
|
||||
private float[] MVPMatrix = new float[16];
|
||||
private float[] ProjMatrix = new float[16];
|
||||
private float[] MMatrix = new float[16];
|
||||
private float[] VMatrix = new float[16];
|
||||
private float[] STMatrix = new float[16];
|
||||
|
||||
|
||||
private final GLSurfaceView glView;
|
||||
|
||||
private GlFramebufferObject filterFramebufferObject;
|
||||
private GlPreview previewShader;
|
||||
|
||||
private GlFilter glFilter;
|
||||
private boolean isNewShader;
|
||||
|
||||
private int angle = 0;
|
||||
private float aspectRatio = 1f;
|
||||
private float scaleRatio = 1f;
|
||||
private float drawScale = 1f;
|
||||
private float gestureScale = 1f;
|
||||
|
||||
private Size cameraResolution;
|
||||
|
||||
private int updateTexImageCounter = 0;
|
||||
private int updateTexImageCompare = 0;
|
||||
|
||||
private SurfaceCreateListener surfaceCreateListener;
|
||||
private MediaVideoEncoder videoEncoder;
|
||||
|
||||
|
||||
public GlPreviewRenderer(GLSurfaceView glView) {
|
||||
this.glView = glView;
|
||||
this.glView.setEGLConfigChooser(new GlConfigChooser(false));
|
||||
this.glView.setEGLContextFactory(new GlContextFactory());
|
||||
this.glView.setRenderer(this);
|
||||
this.glView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
|
||||
|
||||
|
||||
Matrix.setIdentityM(STMatrix, 0);
|
||||
}
|
||||
|
||||
public void onStartPreview(float cameraPreviewWidth, float cameraPreviewHeight, boolean isLandscapeDevice) {
|
||||
|
||||
Matrix.setIdentityM(MMatrix, 0);
|
||||
Matrix.rotateM(MMatrix, 0, -angle, 0.0f, 0.0f, 1.0f);
|
||||
|
||||
// Log.d("GPUCameraRecorder ", "angle" + angle);
|
||||
// Log.d("GPUCameraRecorder ", "getMeasuredHeight " + glView.getMeasuredHeight());
|
||||
// Log.d("GPUCameraRecorder ", "getMeasuredWidth " + glView.getMeasuredWidth());
|
||||
// Log.d("GPUCameraRecorder ", "cameraPreviewWidth " + cameraPreviewWidth);
|
||||
// Log.d("GPUCameraRecorder ", "cameraPreviewHeight " + cameraPreviewHeight);
|
||||
|
||||
|
||||
if (isLandscapeDevice) {
|
||||
|
||||
if (glView.getMeasuredWidth() == glView.getMeasuredHeight()) {
|
||||
|
||||
float scale = Math.max(cameraPreviewWidth / cameraPreviewHeight,
|
||||
cameraPreviewHeight / cameraPreviewWidth);
|
||||
Matrix.scaleM(MMatrix, 0, 1f * scale, 1f * scale, 1);
|
||||
|
||||
} else {
|
||||
float scale = Math.max(
|
||||
(float) glView.getMeasuredHeight() / cameraPreviewWidth,
|
||||
(float) glView.getMeasuredWidth() / cameraPreviewHeight);
|
||||
Matrix.scaleM(MMatrix, 0, 1f * scale, 1f * scale, 1);
|
||||
}
|
||||
|
||||
} else {
|
||||
// Portlate
|
||||
// View 1920 1080 Camera 1280 720 OK
|
||||
// View 1920 1080 Camera 800 600 OK
|
||||
// View 1440 1080 Camera 800 600 OK
|
||||
// View 1080 1080 Camera 1280 720 Need Scale
|
||||
// View 1080 1080 Camera 800 600 Need Scale
|
||||
|
||||
|
||||
float viewAspect = (float) glView.getMeasuredHeight() / glView.getMeasuredWidth();
|
||||
float cameraAspect = cameraPreviewWidth / cameraPreviewHeight;
|
||||
if (viewAspect >= cameraAspect) {
|
||||
Matrix.scaleM(MMatrix, 0, 1f, 1f, 1);
|
||||
} else {
|
||||
float adjust = cameraAspect / viewAspect;
|
||||
Matrix.scaleM(MMatrix, 0, 1f * adjust, 1f * adjust, 1);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void setGlFilter(final GlFilter filter) {
|
||||
glView.queueEvent(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (glFilter != null) {
|
||||
glFilter.release();
|
||||
}
|
||||
glFilter = filter;
|
||||
isNewShader = true;
|
||||
glView.requestRender();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
|
||||
// increment every time a new frame is avail
|
||||
updateTexImageCounter++;
|
||||
glView.requestRender();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSurfaceCreated(EGLConfig config) {
|
||||
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
|
||||
final int[] args = new int[1];
|
||||
|
||||
GLES20.glGenTextures(args.length, args, 0);
|
||||
texName = args[0];
|
||||
|
||||
// SurfaceTextureを生成
|
||||
previewTexture = new GlSurfaceTexture(texName);
|
||||
previewTexture.setOnFrameAvailableListener(this);
|
||||
|
||||
GLES20.glBindTexture(previewTexture.getTextureTarget(), texName);
|
||||
// GL_TEXTURE_EXTERNAL_OES
|
||||
EglUtil.setupSampler(previewTexture.getTextureTarget(), GL_LINEAR, GL_NEAREST);
|
||||
GLES20.glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
filterFramebufferObject = new GlFramebufferObject();
|
||||
// GL_TEXTURE_EXTERNAL_OES
|
||||
previewShader = new GlPreview(previewTexture.getTextureTarget());
|
||||
previewShader.setup();
|
||||
|
||||
|
||||
Matrix.setLookAtM(VMatrix, 0,
|
||||
0.0f, 0.0f, 5.0f,
|
||||
0.0f, 0.0f, 0.0f,
|
||||
0.0f, 1.0f, 0.0f
|
||||
);
|
||||
|
||||
|
||||
if (glFilter != null) {
|
||||
isNewShader = true;
|
||||
}
|
||||
|
||||
GLES20.glGetIntegerv(GL_MAX_TEXTURE_SIZE, args, 0);
|
||||
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (surfaceCreateListener != null) {
|
||||
surfaceCreateListener.onCreated(previewTexture.getSurfaceTexture());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSurfaceChanged(int width, int height) {
|
||||
|
||||
filterFramebufferObject.setup(width, height);
|
||||
previewShader.setFrameSize(width, height);
|
||||
if (glFilter != null) {
|
||||
glFilter.setFrameSize(width, height);
|
||||
}
|
||||
scaleRatio = (float) width / height;
|
||||
Matrix.frustumM(ProjMatrix, 0, -scaleRatio, scaleRatio, -1, 1, 5, 7);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDrawFrame(GlFramebufferObject fbo) {
|
||||
|
||||
if (drawScale != gestureScale) {
|
||||
|
||||
float tempScale = 1 / drawScale;
|
||||
Matrix.scaleM(MMatrix, 0, tempScale, tempScale, 1);
|
||||
drawScale = gestureScale;
|
||||
Matrix.scaleM(MMatrix, 0, drawScale, drawScale, 1);
|
||||
}
|
||||
|
||||
synchronized (this) {
|
||||
if (updateTexImageCompare != updateTexImageCounter) {
|
||||
// loop and call updateTexImage() for each time the onFrameAvailable() method was called below.
|
||||
while (updateTexImageCompare != updateTexImageCounter) {
|
||||
|
||||
previewTexture.updateTexImage();
|
||||
previewTexture.getTransformMatrix(STMatrix);
|
||||
updateTexImageCompare++; // increment the compare value until it's the same as _updateTexImageCounter
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (isNewShader) {
|
||||
if (glFilter != null) {
|
||||
glFilter.setup();
|
||||
glFilter.setFrameSize(fbo.getWidth(), fbo.getHeight());
|
||||
}
|
||||
isNewShader = false;
|
||||
}
|
||||
|
||||
if (glFilter != null) {
|
||||
filterFramebufferObject.enable();
|
||||
}
|
||||
|
||||
GLES20.glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
Matrix.multiplyMM(MVPMatrix, 0, VMatrix, 0, MMatrix, 0);
|
||||
Matrix.multiplyMM(MVPMatrix, 0, ProjMatrix, 0, MVPMatrix, 0);
|
||||
|
||||
previewShader.draw(texName, MVPMatrix, STMatrix, aspectRatio);
|
||||
|
||||
|
||||
if (glFilter != null) {
|
||||
fbo.enable();
|
||||
GLES20.glClear(GL_COLOR_BUFFER_BIT);
|
||||
glFilter.draw(filterFramebufferObject.getTexName(), fbo);
|
||||
}
|
||||
|
||||
synchronized (this) {
|
||||
if (videoEncoder != null) {
|
||||
// notify to capturing thread that the camera frame is available.
|
||||
videoEncoder.frameAvailableSoon(texName, STMatrix, MVPMatrix, aspectRatio);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void setCameraResolution(Size cameraResolution) {
|
||||
this.cameraResolution = cameraResolution;
|
||||
}
|
||||
|
||||
public void setVideoEncoder(final MediaVideoEncoder encoder) {
|
||||
glView.queueEvent(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
synchronized (GlPreviewRenderer.this) {
|
||||
if (encoder != null) {
|
||||
encoder.setEglContext(EGL14.eglGetCurrentContext(), texName);
|
||||
}
|
||||
videoEncoder = encoder;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
public GlSurfaceTexture getPreviewTexture() {
|
||||
return previewTexture;
|
||||
}
|
||||
|
||||
public void setAngle(int angle) {
|
||||
this.angle = angle;
|
||||
if (angle == 90 || angle == 270) {
|
||||
aspectRatio = (float) cameraResolution.getWidth() / cameraResolution.getHeight();
|
||||
} else {
|
||||
aspectRatio = (float) cameraResolution.getHeight() / cameraResolution.getWidth();
|
||||
}
|
||||
}
|
||||
|
||||
public void setGestureScale(float gestureScale) {
|
||||
this.gestureScale = gestureScale;
|
||||
}
|
||||
|
||||
public GlFilter getFilter() {
|
||||
return glFilter;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
glView.queueEvent(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (glFilter != null) {
|
||||
glFilter.release();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public interface SurfaceCreateListener {
|
||||
void onCreated(SurfaceTexture surface);
|
||||
}
|
||||
|
||||
public void setSurfaceCreateListener(SurfaceCreateListener surfaceCreateListener) {
|
||||
this.surfaceCreateListener = surfaceCreateListener;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,49 @@
|
||||
package com.xypower.gpuv.egl;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
|
||||
|
||||
|
||||
public class GlSurfaceTexture implements SurfaceTexture.OnFrameAvailableListener {
|
||||
|
||||
private SurfaceTexture surfaceTexture;
|
||||
private SurfaceTexture.OnFrameAvailableListener onFrameAvailableListener;
|
||||
|
||||
public GlSurfaceTexture(final int texName) {
|
||||
surfaceTexture = new SurfaceTexture(texName);
|
||||
surfaceTexture.setOnFrameAvailableListener(this);
|
||||
}
|
||||
|
||||
|
||||
public void setOnFrameAvailableListener(final SurfaceTexture.OnFrameAvailableListener l) {
|
||||
onFrameAvailableListener = l;
|
||||
}
|
||||
|
||||
|
||||
public int getTextureTarget() {
|
||||
return GlPreview.GL_TEXTURE_EXTERNAL_OES;
|
||||
}
|
||||
|
||||
public void updateTexImage() {
|
||||
surfaceTexture.updateTexImage();
|
||||
}
|
||||
|
||||
public void getTransformMatrix(final float[] mtx) {
|
||||
surfaceTexture.getTransformMatrix(mtx);
|
||||
}
|
||||
|
||||
public SurfaceTexture getSurfaceTexture() {
|
||||
return surfaceTexture;
|
||||
}
|
||||
|
||||
public void onFrameAvailable(final SurfaceTexture surfaceTexture) {
|
||||
if (onFrameAvailableListener != null) {
|
||||
onFrameAvailableListener.onFrameAvailable(this.surfaceTexture);
|
||||
}
|
||||
}
|
||||
|
||||
public void release() {
|
||||
surfaceTexture.release();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,149 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import static android.opengl.GLES20.glUniform1f;
|
||||
|
||||
|
||||
public class GlBilateralFilter extends GlFilter {
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"attribute vec4 aPosition;" +
|
||||
"attribute vec4 aTextureCoord;" +
|
||||
|
||||
"const lowp int GAUSSIAN_SAMPLES = 9;" +
|
||||
|
||||
"uniform highp float texelWidthOffset;" +
|
||||
"uniform highp float texelHeightOffset;" +
|
||||
"uniform highp float blurSize;" +
|
||||
|
||||
"varying highp vec2 vTextureCoord;" +
|
||||
"varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];" +
|
||||
|
||||
"void main() {" +
|
||||
"gl_Position = aPosition;" +
|
||||
"vTextureCoord = aTextureCoord.xy;" +
|
||||
|
||||
// Calculate the positions for the blur
|
||||
"int multiplier = 0;" +
|
||||
"highp vec2 blurStep;" +
|
||||
"highp vec2 singleStepOffset = vec2(texelHeightOffset, texelWidthOffset) * blurSize;" +
|
||||
|
||||
"for (lowp int i = 0; i < GAUSSIAN_SAMPLES; i++) {" +
|
||||
"multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));" +
|
||||
// Blur in x (horizontal)
|
||||
"blurStep = float(multiplier) * singleStepOffset;" +
|
||||
"blurCoordinates[i] = vTextureCoord.xy + blurStep;" +
|
||||
"}" +
|
||||
"}";
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
|
||||
"const lowp int GAUSSIAN_SAMPLES = 9;" +
|
||||
"varying highp vec2 vTextureCoord;" +
|
||||
"varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];" +
|
||||
|
||||
"const mediump float distanceNormalizationFactor = 1.5;" +
|
||||
|
||||
"void main() {" +
|
||||
"lowp vec4 centralColor = texture2D(sTexture, blurCoordinates[4]);" +
|
||||
"lowp float gaussianWeightTotal = 0.18;" +
|
||||
"lowp vec4 sum = centralColor * 0.18;" +
|
||||
|
||||
"lowp vec4 sampleColor = texture2D(sTexture, blurCoordinates[0]);" +
|
||||
"lowp float distanceFromCentralColor;" +
|
||||
|
||||
"distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
|
||||
|
||||
"lowp float gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);" +
|
||||
"gaussianWeightTotal += gaussianWeight;" +
|
||||
"sum += sampleColor * gaussianWeight;" +
|
||||
|
||||
"sampleColor = texture2D(sTexture, blurCoordinates[1]);" +
|
||||
"distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
|
||||
"gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);" +
|
||||
"gaussianWeightTotal += gaussianWeight;" +
|
||||
"sum += sampleColor * gaussianWeight;" +
|
||||
|
||||
"sampleColor = texture2D(sTexture, blurCoordinates[2]);" +
|
||||
"distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
|
||||
"gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);" +
|
||||
"gaussianWeightTotal += gaussianWeight;" +
|
||||
"sum += sampleColor * gaussianWeight;" +
|
||||
|
||||
"sampleColor = texture2D(sTexture, blurCoordinates[3]);" +
|
||||
"distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
|
||||
"gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);" +
|
||||
"gaussianWeightTotal += gaussianWeight;" +
|
||||
"sum += sampleColor * gaussianWeight;" +
|
||||
|
||||
"sampleColor = texture2D(sTexture, blurCoordinates[5]);" +
|
||||
"distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
|
||||
"gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);" +
|
||||
"gaussianWeightTotal += gaussianWeight;" +
|
||||
"sum += sampleColor * gaussianWeight;" +
|
||||
|
||||
"sampleColor = texture2D(sTexture, blurCoordinates[6]);" +
|
||||
"distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
|
||||
"gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);" +
|
||||
"gaussianWeightTotal += gaussianWeight;" +
|
||||
"sum += sampleColor * gaussianWeight;" +
|
||||
|
||||
"sampleColor = texture2D(sTexture, blurCoordinates[7]);" +
|
||||
"distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
|
||||
"gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);" +
|
||||
"gaussianWeightTotal += gaussianWeight;" +
|
||||
"sum += sampleColor * gaussianWeight;" +
|
||||
|
||||
"sampleColor = texture2D(sTexture, blurCoordinates[8]);" +
|
||||
"distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
|
||||
"gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);" +
|
||||
"gaussianWeightTotal += gaussianWeight;" +
|
||||
"sum += sampleColor * gaussianWeight;" +
|
||||
|
||||
"gl_FragColor = sum / gaussianWeightTotal;" +
|
||||
"}";
|
||||
|
||||
private float texelWidthOffset = 0.004f;
|
||||
private float texelHeightOffset = 0.004f;
|
||||
private float blurSize = 1.0f;
|
||||
|
||||
public GlBilateralFilter() {
|
||||
super(VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
|
||||
public float getTexelWidthOffset() {
|
||||
return texelWidthOffset;
|
||||
}
|
||||
|
||||
public void setTexelWidthOffset(final float texelWidthOffset) {
|
||||
this.texelWidthOffset = texelWidthOffset;
|
||||
}
|
||||
|
||||
public float getTexelHeightOffset() {
|
||||
return texelHeightOffset;
|
||||
}
|
||||
|
||||
public void setTexelHeightOffset(final float texelHeightOffset) {
|
||||
this.texelHeightOffset = texelHeightOffset;
|
||||
}
|
||||
|
||||
public float getBlurSize() {
|
||||
return blurSize;
|
||||
}
|
||||
|
||||
public void setBlurSize(final float blurSize) {
|
||||
this.blurSize = blurSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
glUniform1f(getHandle("texelWidthOffset"), texelWidthOffset);
|
||||
glUniform1f(getHandle("texelHeightOffset"), texelHeightOffset);
|
||||
glUniform1f(getHandle("blurSize"), blurSize);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,96 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
public class GlBoxBlurFilter extends GlFilter {
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"attribute vec4 aPosition;" +
|
||||
"attribute vec4 aTextureCoord;" +
|
||||
|
||||
"uniform highp float texelWidthOffset;" +
|
||||
"uniform highp float texelHeightOffset;" +
|
||||
"uniform highp float blurSize;" +
|
||||
|
||||
"varying highp vec2 centerTextureCoordinate;" +
|
||||
"varying highp vec2 oneStepLeftTextureCoordinate;" +
|
||||
"varying highp vec2 twoStepsLeftTextureCoordinate;" +
|
||||
"varying highp vec2 oneStepRightTextureCoordinate;" +
|
||||
"varying highp vec2 twoStepsRightTextureCoordinate;" +
|
||||
|
||||
"void main() {" +
|
||||
"gl_Position = aPosition;" +
|
||||
|
||||
"vec2 firstOffset = vec2(1.5 * texelWidthOffset, 1.5 * texelHeightOffset) * blurSize;" +
|
||||
"vec2 secondOffset = vec2(3.5 * texelWidthOffset, 3.5 * texelHeightOffset) * blurSize;" +
|
||||
|
||||
"centerTextureCoordinate = aTextureCoord.xy;" +
|
||||
"oneStepLeftTextureCoordinate = centerTextureCoordinate - firstOffset;" +
|
||||
"twoStepsLeftTextureCoordinate = centerTextureCoordinate - secondOffset;" +
|
||||
"oneStepRightTextureCoordinate = centerTextureCoordinate + firstOffset;" +
|
||||
"twoStepsRightTextureCoordinate = centerTextureCoordinate + secondOffset;" +
|
||||
"}";
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
|
||||
"varying highp vec2 centerTextureCoordinate;" +
|
||||
"varying highp vec2 oneStepLeftTextureCoordinate;" +
|
||||
"varying highp vec2 twoStepsLeftTextureCoordinate;" +
|
||||
"varying highp vec2 oneStepRightTextureCoordinate;" +
|
||||
"varying highp vec2 twoStepsRightTextureCoordinate;" +
|
||||
|
||||
"void main() {" +
|
||||
"lowp vec4 color = texture2D(sTexture, centerTextureCoordinate) * 0.2;" +
|
||||
"color += texture2D(sTexture, oneStepLeftTextureCoordinate) * 0.2;" +
|
||||
"color += texture2D(sTexture, oneStepRightTextureCoordinate) * 0.2;" +
|
||||
"color += texture2D(sTexture, twoStepsLeftTextureCoordinate) * 0.2;" +
|
||||
"color += texture2D(sTexture, twoStepsRightTextureCoordinate) * 0.2;" +
|
||||
"gl_FragColor = color;" +
|
||||
"}";
|
||||
|
||||
private float texelWidthOffset = 0.003f;
|
||||
private float texelHeightOffset = 0.003f;
|
||||
private float blurSize = 1.0f;
|
||||
|
||||
|
||||
public GlBoxBlurFilter() {
|
||||
super(VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public float getTexelWidthOffset() {
|
||||
return texelWidthOffset;
|
||||
}
|
||||
|
||||
public void setTexelWidthOffset(final float texelWidthOffset) {
|
||||
this.texelWidthOffset = texelWidthOffset;
|
||||
}
|
||||
|
||||
public float getTexelHeightOffset() {
|
||||
return texelHeightOffset;
|
||||
}
|
||||
|
||||
public void setTexelHeightOffset(final float texelHeightOffset) {
|
||||
this.texelHeightOffset = texelHeightOffset;
|
||||
}
|
||||
|
||||
public float getBlurSize() {
|
||||
return blurSize;
|
||||
}
|
||||
|
||||
public void setBlurSize(final float blurSize) {
|
||||
this.blurSize = blurSize;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("texelWidthOffset"), texelWidthOffset);
|
||||
GLES20.glUniform1f(getHandle("texelHeightOffset"), texelHeightOffset);
|
||||
GLES20.glUniform1f(getHandle("blurSize"), blurSize);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
/**
|
||||
* brightness value ranges from -1.0 to 1.0, with 0.0 as the normal level
|
||||
*/
|
||||
public class GlBrightnessFilter extends GlFilter {
|
||||
private static final String BRIGHTNESS_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
" \n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform lowp float brightness;\n" +
|
||||
" \n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);\n" +
|
||||
" }";
|
||||
|
||||
public GlBrightnessFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, BRIGHTNESS_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float brightness = 0f;
|
||||
|
||||
public void setBrightness(float brightness) {
|
||||
this.brightness = brightness;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("brightness"), brightness);
|
||||
}
|
||||
}
|
@ -0,0 +1,81 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
public class GlBulgeDistortionFilter extends GlFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
|
||||
"varying highp vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
|
||||
"uniform highp vec2 center;" +
|
||||
"uniform highp float radius;" +
|
||||
"uniform highp float scale;" +
|
||||
|
||||
"void main() {" +
|
||||
"highp vec2 textureCoordinateToUse = vTextureCoord;" +
|
||||
"highp float dist = distance(center, vTextureCoord);" +
|
||||
"textureCoordinateToUse -= center;" +
|
||||
"if (dist < radius) {" +
|
||||
"highp float percent = 1.0 - ((radius - dist) / radius) * scale;" +
|
||||
"percent = percent * percent;" +
|
||||
"textureCoordinateToUse = textureCoordinateToUse * percent;" +
|
||||
"}" +
|
||||
"textureCoordinateToUse += center;" +
|
||||
|
||||
"gl_FragColor = texture2D(sTexture, textureCoordinateToUse);" +
|
||||
"}";
|
||||
|
||||
private float centerX = 0.5f;
|
||||
private float centerY = 0.5f;
|
||||
private float radius = 0.25f;
|
||||
private float scale = 0.5f;
|
||||
|
||||
public GlBulgeDistortionFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public float getCenterX() {
|
||||
return centerX;
|
||||
}
|
||||
|
||||
public void setCenterX(final float centerX) {
|
||||
this.centerX = centerX;
|
||||
}
|
||||
|
||||
public float getCenterY() {
|
||||
return centerY;
|
||||
}
|
||||
|
||||
public void setCenterY(final float centerY) {
|
||||
this.centerY = centerY;
|
||||
}
|
||||
|
||||
public float getRadius() {
|
||||
return radius;
|
||||
}
|
||||
|
||||
public void setRadius(final float radius) {
|
||||
this.radius = radius;
|
||||
}
|
||||
|
||||
public float getScale() {
|
||||
return scale;
|
||||
}
|
||||
|
||||
public void setScale(final float scale) {
|
||||
this.scale = scale;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform2f(getHandle("center"), centerX, centerY);
|
||||
GLES20.glUniform1f(getHandle("radius"), radius);
|
||||
GLES20.glUniform1f(getHandle("scale"), scale);
|
||||
}
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
|
||||
|
||||
public class GlCGAColorspaceFilter extends GlFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
|
||||
"varying vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
|
||||
"void main() {" +
|
||||
"highp vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0);" +
|
||||
|
||||
"highp vec2 samplePos = vTextureCoord - mod(vTextureCoord, sampleDivisor);" +
|
||||
"highp vec4 color = texture2D(sTexture, samplePos);" +
|
||||
|
||||
"mediump vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0);" +
|
||||
"mediump vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0);" +
|
||||
"mediump vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0);" +
|
||||
"mediump vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0);" +
|
||||
|
||||
"mediump vec4 endColor;" +
|
||||
"highp float blackDistance = distance(color, colorBlack);" +
|
||||
"highp float whiteDistance = distance(color, colorWhite);" +
|
||||
"highp float magentaDistance = distance(color, colorMagenta);" +
|
||||
"highp float cyanDistance = distance(color, colorCyan);" +
|
||||
|
||||
"mediump vec4 finalColor;" +
|
||||
|
||||
"highp float colorDistance = min(magentaDistance, cyanDistance);" +
|
||||
"colorDistance = min(colorDistance, whiteDistance);" +
|
||||
"colorDistance = min(colorDistance, blackDistance);" +
|
||||
|
||||
"if (colorDistance == blackDistance) {" +
|
||||
"finalColor = colorBlack;" +
|
||||
"} else if (colorDistance == whiteDistance) {" +
|
||||
"finalColor = colorWhite;" +
|
||||
"} else if (colorDistance == cyanDistance) {" +
|
||||
"finalColor = colorCyan;" +
|
||||
"} else {" +
|
||||
"finalColor = colorMagenta;" +
|
||||
"}" +
|
||||
|
||||
"gl_FragColor = finalColor;" +
|
||||
"}";
|
||||
|
||||
|
||||
public GlCGAColorspaceFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
/**
|
||||
* Changes the contrast of the image.
|
||||
* contrast value ranges from 0.0 to 4.0, with 1.0 as the normal level
|
||||
*/
|
||||
public class GlContrastFilter extends GlFilter {
|
||||
|
||||
private static final String CONTRAST_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
" \n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform lowp float contrast;\n" +
|
||||
" \n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);\n" +
|
||||
" }";
|
||||
|
||||
|
||||
public GlContrastFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, CONTRAST_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float contrast = 1.2f;
|
||||
|
||||
public void setContrast(float contrast) {
|
||||
this.contrast = contrast;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("contrast"), contrast);
|
||||
}
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlCrosshatchFilter extends GlFilter {
|
||||
|
||||
private static final String CROSSHATCH_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
"uniform highp float crossHatchSpacing;\n" +
|
||||
"uniform highp float lineWidth;\n" +
|
||||
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
"highp float luminance = dot(texture2D(sTexture, vTextureCoord).rgb, W);\n" +
|
||||
"lowp vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0);\n" +
|
||||
"if (luminance < 1.00)\n" +
|
||||
"{\n" +
|
||||
"if (mod(vTextureCoord.x + vTextureCoord.y, crossHatchSpacing) <= lineWidth)\n" +
|
||||
"{\n" +
|
||||
"colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n" +
|
||||
"}\n" +
|
||||
"}\n" +
|
||||
"if (luminance < 0.75)\n" +
|
||||
"{\n" +
|
||||
"if (mod(vTextureCoord.x - vTextureCoord.y, crossHatchSpacing) <= lineWidth)\n" +
|
||||
"{\n" +
|
||||
"colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n" +
|
||||
"}\n" +
|
||||
"}\n" +
|
||||
"if (luminance < 0.50)\n" +
|
||||
"{\n" +
|
||||
"if (mod(vTextureCoord.x + vTextureCoord.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)\n" +
|
||||
"{\n" +
|
||||
"colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n" +
|
||||
"}\n" +
|
||||
"}\n" +
|
||||
"if (luminance < 0.3)\n" +
|
||||
"{\n" +
|
||||
"if (mod(vTextureCoord.x - vTextureCoord.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)\n" +
|
||||
"{\n" +
|
||||
"colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n" +
|
||||
"}\n" +
|
||||
"}\n" +
|
||||
"gl_FragColor = colorToDisplay;\n" +
|
||||
"}\n";
|
||||
|
||||
public GlCrosshatchFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, CROSSHATCH_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float crossHatchSpacing = 0.03f;
|
||||
private float lineWidth = 0.003f;
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("crossHatchSpacing"), crossHatchSpacing);
|
||||
GLES20.glUniform1f(getHandle("lineWidth"), lineWidth);
|
||||
}
|
||||
|
||||
public void setCrossHatchSpacing(float crossHatchSpacing) {
|
||||
this.crossHatchSpacing = crossHatchSpacing;
|
||||
}
|
||||
|
||||
public void setLineWidth(float lineWidth) {
|
||||
this.lineWidth = lineWidth;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFrameSize(int width, int height) {
|
||||
super.setFrameSize(width, height);
|
||||
|
||||
float singlePixelSpacing;
|
||||
if (width != 0) {
|
||||
singlePixelSpacing = 1.0f / (float) width;
|
||||
} else {
|
||||
singlePixelSpacing = 1.0f / 2048.0f;
|
||||
}
|
||||
if (crossHatchSpacing < singlePixelSpacing) {
|
||||
this.crossHatchSpacing = singlePixelSpacing;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
/**
|
||||
* exposure: The adjusted exposure (-10.0 - 10.0, with 0.0 as the default)
|
||||
*/
|
||||
public class GlExposureFilter extends GlFilter {
|
||||
|
||||
private static final String EXPOSURE_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
" \n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform highp float exposure;\n" +
|
||||
" \n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);\n" +
|
||||
" } ";
|
||||
|
||||
public GlExposureFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, EXPOSURE_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float exposure = 1f;
|
||||
|
||||
public void setExposure(float exposure) {
|
||||
this.exposure = exposure;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("exposure"), exposure);
|
||||
}
|
||||
}
|
@ -0,0 +1,160 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.content.res.Resources;
|
||||
import android.opengl.GLES20;
|
||||
import com.xypower.gpuv.egl.EglUtil;
|
||||
import com.xypower.gpuv.egl.GlFramebufferObject;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
import static android.opengl.GLES20.GL_FLOAT;
|
||||
|
||||
|
||||
public class GlFilter {
|
||||
public static final String DEFAULT_UNIFORM_SAMPLER = "sTexture";
|
||||
|
||||
protected static final String DEFAULT_VERTEX_SHADER =
|
||||
"attribute highp vec4 aPosition;\n" +
|
||||
"attribute highp vec4 aTextureCoord;\n" +
|
||||
"varying highp vec2 vTextureCoord;\n" +
|
||||
"void main() {\n" +
|
||||
"gl_Position = aPosition;\n" +
|
||||
"vTextureCoord = aTextureCoord.xy;\n" +
|
||||
"}\n";
|
||||
|
||||
protected static final String DEFAULT_FRAGMENT_SHADER =
|
||||
"precision mediump float;\n" +
|
||||
"varying highp vec2 vTextureCoord;\n" +
|
||||
"uniform lowp sampler2D sTexture;\n" +
|
||||
"void main() {\n" +
|
||||
"gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
"}\n";
|
||||
|
||||
private static final float[] VERTICES_DATA = new float[]{
|
||||
// X, Y, Z, U, V
|
||||
-1.0f, 1.0f, 0.0f, 0.0f, 1.0f,
|
||||
1.0f, 1.0f, 0.0f, 1.0f, 1.0f,
|
||||
-1.0f, -1.0f, 0.0f, 0.0f, 0.0f,
|
||||
1.0f, -1.0f, 0.0f, 1.0f, 0.0f
|
||||
};
|
||||
|
||||
private static final int FLOAT_SIZE_BYTES = 4;
|
||||
protected static final int VERTICES_DATA_POS_SIZE = 3;
|
||||
protected static final int VERTICES_DATA_UV_SIZE = 2;
|
||||
protected static final int VERTICES_DATA_STRIDE_BYTES = (VERTICES_DATA_POS_SIZE + VERTICES_DATA_UV_SIZE) * FLOAT_SIZE_BYTES;
|
||||
protected static final int VERTICES_DATA_POS_OFFSET = 0 * FLOAT_SIZE_BYTES;
|
||||
protected static final int VERTICES_DATA_UV_OFFSET = VERTICES_DATA_POS_OFFSET + VERTICES_DATA_POS_SIZE * FLOAT_SIZE_BYTES;
|
||||
|
||||
private final String vertexShaderSource;
|
||||
private String fragmentShaderSource;
|
||||
|
||||
private int program;
|
||||
|
||||
private int vertexShader;
|
||||
private int fragmentShader;
|
||||
|
||||
private int vertexBufferName;
|
||||
|
||||
private final HashMap<String, Integer> handleMap = new HashMap<String, Integer>();
|
||||
|
||||
public GlFilter() {
|
||||
this(DEFAULT_VERTEX_SHADER, DEFAULT_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public GlFilter(final Resources res, final int vertexShaderSourceResId, final int fragmentShaderSourceResId) {
|
||||
this(res.getString(vertexShaderSourceResId), res.getString(fragmentShaderSourceResId));
|
||||
}
|
||||
|
||||
public GlFilter(final String vertexShaderSource, final String fragmentShaderSource) {
|
||||
this.vertexShaderSource = vertexShaderSource;
|
||||
this.fragmentShaderSource = fragmentShaderSource;
|
||||
}
|
||||
|
||||
public void setup() {
|
||||
release();
|
||||
vertexShader = EglUtil.loadShader(vertexShaderSource, GLES20.GL_VERTEX_SHADER);
|
||||
fragmentShader = EglUtil.loadShader(fragmentShaderSource, GLES20.GL_FRAGMENT_SHADER);
|
||||
program = EglUtil.createProgram(vertexShader, fragmentShader);
|
||||
vertexBufferName = EglUtil.createBuffer(VERTICES_DATA);
|
||||
|
||||
getHandle("aPosition");
|
||||
getHandle("aTextureCoord");
|
||||
getHandle("sTexture");
|
||||
}
|
||||
|
||||
public void setFragmentShaderSource(String fragmentShaderSource) {
|
||||
this.fragmentShaderSource = fragmentShaderSource;
|
||||
}
|
||||
|
||||
|
||||
public void setFrameSize(final int width, final int height) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
public void release() {
|
||||
GLES20.glDeleteProgram(program);
|
||||
program = 0;
|
||||
GLES20.glDeleteShader(vertexShader);
|
||||
vertexShader = 0;
|
||||
GLES20.glDeleteShader(fragmentShader);
|
||||
fragmentShader = 0;
|
||||
GLES20.glDeleteBuffers(1, new int[]{vertexBufferName}, 0);
|
||||
vertexBufferName = 0;
|
||||
|
||||
handleMap.clear();
|
||||
}
|
||||
|
||||
//
|
||||
public void draw(final int texName, final GlFramebufferObject fbo) {
|
||||
useProgram();
|
||||
|
||||
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferName);
|
||||
GLES20.glEnableVertexAttribArray(getHandle("aPosition"));
|
||||
GLES20.glVertexAttribPointer(getHandle("aPosition"), VERTICES_DATA_POS_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_POS_OFFSET);
|
||||
GLES20.glEnableVertexAttribArray(getHandle("aTextureCoord"));
|
||||
GLES20.glVertexAttribPointer(getHandle("aTextureCoord"), VERTICES_DATA_UV_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_UV_OFFSET);
|
||||
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texName);
|
||||
GLES20.glUniform1i(getHandle("sTexture"), 0);
|
||||
|
||||
onDraw();
|
||||
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
GLES20.glDisableVertexAttribArray(getHandle("aPosition"));
|
||||
GLES20.glDisableVertexAttribArray(getHandle("aTextureCoord"));
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
|
||||
}
|
||||
|
||||
protected void onDraw() {
|
||||
}
|
||||
|
||||
protected final void useProgram() {
|
||||
GLES20.glUseProgram(program);
|
||||
}
|
||||
|
||||
protected final int getVertexBufferName() {
|
||||
return vertexBufferName;
|
||||
}
|
||||
|
||||
protected final int getHandle(final String name) {
|
||||
final Integer value = handleMap.get(name);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
int location = GLES20.glGetAttribLocation(program, name);
|
||||
if (location == -1) {
|
||||
location = GLES20.glGetUniformLocation(program, name);
|
||||
}
|
||||
if (location == -1) {
|
||||
throw new IllegalStateException("Could not get attrib or uniform location for " + name);
|
||||
}
|
||||
handleMap.put(name, Integer.valueOf(location));
|
||||
return location;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,108 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
import android.util.Pair;
|
||||
import com.xypower.gpuv.egl.GlFramebufferObject;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import static android.opengl.GLES20.GL_COLOR_BUFFER_BIT;
|
||||
import static android.opengl.GLES20.GL_FRAMEBUFFER;
|
||||
|
||||
|
||||
public class GlFilterGroup extends GlFilter {
|
||||
|
||||
private final Collection<GlFilter> filters;
|
||||
|
||||
private final ArrayList<Pair<GlFilter, GlFramebufferObject>> list = new ArrayList<Pair<GlFilter, GlFramebufferObject>>();
|
||||
|
||||
public GlFilterGroup(final GlFilter... glFilters) {
|
||||
this(Arrays.asList(glFilters));
|
||||
}
|
||||
|
||||
public GlFilterGroup(final Collection<GlFilter> glFilters) {
|
||||
filters = glFilters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
super.setup();
|
||||
|
||||
if (filters != null) {
|
||||
final int max = filters.size();
|
||||
int count = 0;
|
||||
|
||||
for (final GlFilter shader : filters) {
|
||||
shader.setup();
|
||||
final GlFramebufferObject fbo;
|
||||
if ((count + 1) < max) {
|
||||
fbo = new GlFramebufferObject();
|
||||
} else {
|
||||
fbo = null;
|
||||
}
|
||||
list.add(Pair.create(shader, fbo));
|
||||
count++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
for (final Pair<GlFilter, GlFramebufferObject> pair : list) {
|
||||
if (pair.first != null) {
|
||||
pair.first.release();
|
||||
}
|
||||
if (pair.second != null) {
|
||||
pair.second.release();
|
||||
}
|
||||
}
|
||||
list.clear();
|
||||
super.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFrameSize(final int width, final int height) {
|
||||
super.setFrameSize(width, height);
|
||||
|
||||
for (final Pair<GlFilter, GlFramebufferObject> pair : list) {
|
||||
if (pair.first != null) {
|
||||
pair.first.setFrameSize(width, height);
|
||||
}
|
||||
if (pair.second != null) {
|
||||
pair.second.setup(width, height);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private int prevTexName;
|
||||
|
||||
@Override
|
||||
public void draw(final int texName, final GlFramebufferObject fbo) {
|
||||
prevTexName = texName;
|
||||
for (final Pair<GlFilter, GlFramebufferObject> pair : list) {
|
||||
if (pair.second != null) {
|
||||
if (pair.first != null) {
|
||||
pair.second.enable();
|
||||
GLES20.glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
pair.first.draw(prevTexName, pair.second);
|
||||
}
|
||||
prevTexName = pair.second.getTexName();
|
||||
|
||||
} else {
|
||||
if (fbo != null) {
|
||||
fbo.enable();
|
||||
} else {
|
||||
GLES20.glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
}
|
||||
|
||||
if (pair.first != null) {
|
||||
pair.first.draw(prevTexName, fbo);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlGammaFilter extends GlFilter {
|
||||
private static final String GAMMA_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
" \n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform lowp float gamma;\n" +
|
||||
" \n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);\n" +
|
||||
" }";
|
||||
|
||||
public GlGammaFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, GAMMA_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float gamma = 1.2f;
|
||||
|
||||
public void setGamma(float gamma) {
|
||||
this.gamma = gamma;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("gamma"), gamma);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,102 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
|
||||
public class GlGaussianBlurFilter extends GlFilter {
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"attribute vec4 aPosition;" +
|
||||
"attribute vec4 aTextureCoord;" +
|
||||
|
||||
"const lowp int GAUSSIAN_SAMPLES = 9;" +
|
||||
|
||||
"uniform highp float texelWidthOffset;" +
|
||||
"uniform highp float texelHeightOffset;" +
|
||||
"uniform highp float blurSize;" +
|
||||
|
||||
"varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];" +
|
||||
|
||||
"void main() {" +
|
||||
"gl_Position = aPosition;" +
|
||||
"highp vec2 vTextureCoord = aTextureCoord.xy;" +
|
||||
|
||||
// Calculate the positions for the blur
|
||||
"int multiplier = 0;" +
|
||||
"highp vec2 blurStep;" +
|
||||
"highp vec2 singleStepOffset = vec2(texelHeightOffset, texelWidthOffset) * blurSize;" +
|
||||
|
||||
"for (lowp int i = 0; i < GAUSSIAN_SAMPLES; i++) {" +
|
||||
"multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));" +
|
||||
// Blur in x (horizontal)
|
||||
"blurStep = float(multiplier) * singleStepOffset;" +
|
||||
"blurCoordinates[i] = vTextureCoord.xy + blurStep;" +
|
||||
"}" +
|
||||
"}";
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
|
||||
"const lowp int GAUSSIAN_SAMPLES = 9;" +
|
||||
"varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];" +
|
||||
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
|
||||
"void main() {" +
|
||||
"lowp vec4 sum = vec4(0.0);" +
|
||||
|
||||
"sum += texture2D(sTexture, blurCoordinates[0]) * 0.05;" +
|
||||
"sum += texture2D(sTexture, blurCoordinates[1]) * 0.09;" +
|
||||
"sum += texture2D(sTexture, blurCoordinates[2]) * 0.12;" +
|
||||
"sum += texture2D(sTexture, blurCoordinates[3]) * 0.15;" +
|
||||
"sum += texture2D(sTexture, blurCoordinates[4]) * 0.18;" +
|
||||
"sum += texture2D(sTexture, blurCoordinates[5]) * 0.15;" +
|
||||
"sum += texture2D(sTexture, blurCoordinates[6]) * 0.12;" +
|
||||
"sum += texture2D(sTexture, blurCoordinates[7]) * 0.09;" +
|
||||
"sum += texture2D(sTexture, blurCoordinates[8]) * 0.05;" +
|
||||
|
||||
"gl_FragColor = sum;" +
|
||||
"}";
|
||||
|
||||
private float texelWidthOffset = 0.01f;
|
||||
private float texelHeightOffset = 0.01f;
|
||||
private float blurSize = 0.2f;
|
||||
|
||||
public GlGaussianBlurFilter() {
|
||||
super(VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public float getTexelWidthOffset() {
|
||||
return texelWidthOffset;
|
||||
}
|
||||
|
||||
public void setTexelWidthOffset(final float texelWidthOffset) {
|
||||
this.texelWidthOffset = texelWidthOffset;
|
||||
}
|
||||
|
||||
public float getTexelHeightOffset() {
|
||||
return texelHeightOffset;
|
||||
}
|
||||
|
||||
public void setTexelHeightOffset(final float texelHeightOffset) {
|
||||
this.texelHeightOffset = texelHeightOffset;
|
||||
}
|
||||
|
||||
public float getBlurSize() {
|
||||
return blurSize;
|
||||
}
|
||||
|
||||
public void setBlurSize(final float blurSize) {
|
||||
this.blurSize = blurSize;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("texelWidthOffset"), texelWidthOffset);
|
||||
GLES20.glUniform1f(getHandle("texelHeightOffset"), texelHeightOffset);
|
||||
GLES20.glUniform1f(getHandle("blurSize"), blurSize);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
|
||||
|
||||
public class GlGrayScaleFilter extends GlFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
"varying vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
"const highp vec3 weight = vec3(0.2125, 0.7154, 0.0721);" +
|
||||
"void main() {" +
|
||||
"float luminance = dot(texture2D(sTexture, vTextureCoord).rgb, weight);" +
|
||||
"gl_FragColor = vec4(vec3(luminance), 1.0);" +
|
||||
"}";
|
||||
|
||||
public GlGrayScaleFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlHalftoneFilter extends GlFilter {
|
||||
|
||||
private static final String HALFTONE_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
|
||||
"uniform highp float fractionalWidthOfPixel;\n" +
|
||||
"uniform highp float aspectRatio;\n" +
|
||||
|
||||
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
|
||||
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
" highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n" +
|
||||
" highp vec2 samplePos = vTextureCoord - mod(vTextureCoord, sampleDivisor) + 0.5 * sampleDivisor;\n" +
|
||||
" highp vec2 textureCoordinateToUse = vec2(vTextureCoord.x, (vTextureCoord.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n" +
|
||||
" highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n" +
|
||||
" highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);\n" +
|
||||
" lowp vec3 sampledColor = texture2D(sTexture, samplePos).rgb;\n" +
|
||||
" highp float dotScaling = 1.0 - dot(sampledColor, W);\n" +
|
||||
" lowp float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);\n" +
|
||||
" gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0);\n" +
|
||||
"}";
|
||||
|
||||
public GlHalftoneFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, HALFTONE_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float fractionalWidthOfPixel = 0.01f;
|
||||
private float aspectRatio = 1f;
|
||||
|
||||
public void setFractionalWidthOfAPixel(float fractionalWidthOfAPixel) {
|
||||
this.fractionalWidthOfPixel = fractionalWidthOfAPixel;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFrameSize(int width, int height) {
|
||||
super.setFrameSize(width, height);
|
||||
aspectRatio = (float) height / (float) width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("fractionalWidthOfPixel"), fractionalWidthOfPixel);
|
||||
GLES20.glUniform1f(getHandle("aspectRatio"), aspectRatio);
|
||||
}
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
|
||||
public class GlHazeFilter extends GlFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
"varying highp vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
"uniform lowp float distance;" +
|
||||
"uniform highp float slope;" +
|
||||
|
||||
"void main() {" +
|
||||
"highp vec4 color = vec4(1.0);" +
|
||||
|
||||
"highp float d = vTextureCoord.y * slope + distance;" +
|
||||
|
||||
"highp vec4 c = texture2D(sTexture, vTextureCoord);" +
|
||||
"c = (c - d * color) / (1.0 -d);" +
|
||||
"gl_FragColor = c;" + // consider using premultiply(c);
|
||||
"}";
|
||||
|
||||
private float distance = 0.2f;
|
||||
private float slope = 0.0f;
|
||||
|
||||
public GlHazeFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public float getDistance() {
|
||||
return distance;
|
||||
}
|
||||
|
||||
public void setDistance(final float distance) {
|
||||
this.distance = distance;
|
||||
}
|
||||
|
||||
public float getSlope() {
|
||||
return slope;
|
||||
}
|
||||
|
||||
public void setSlope(final float slope) {
|
||||
this.slope = slope;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("distance"), distance);
|
||||
GLES20.glUniform1f(getHandle("slope"), slope);
|
||||
}
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlHighlightShadowFilter extends GlFilter {
|
||||
|
||||
private static final String HIGHLIGHT_SHADOW_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
" \n" +
|
||||
" uniform lowp float shadows;\n" +
|
||||
" uniform lowp float highlights;\n" +
|
||||
" \n" +
|
||||
" const mediump vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3);\n" +
|
||||
" \n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" lowp vec4 source = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" mediump float luminance = dot(source.rgb, luminanceWeighting);\n" +
|
||||
" \n" +
|
||||
" mediump float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0);\n" +
|
||||
" mediump float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0);\n" +
|
||||
" lowp vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0));\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(result.rgb, source.a);\n" +
|
||||
" }";
|
||||
|
||||
public GlHighlightShadowFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, HIGHLIGHT_SHADOW_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float shadows = 1f;
|
||||
private float highlights = 0f;
|
||||
|
||||
public void setShadows(float shadows) {
|
||||
this.shadows = shadows;
|
||||
}
|
||||
|
||||
public void setHighlights(float highlights) {
|
||||
this.highlights = highlights;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("shadows"), shadows);
|
||||
GLES20.glUniform1f(getHandle("highlights"), highlights);
|
||||
}
|
||||
}
|
@ -0,0 +1,67 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlHueFilter extends GlFilter {
|
||||
|
||||
private static final String HUE_FRAGMENT_SHADER = "" +
|
||||
"precision highp float;\n" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
"\n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
"uniform mediump float hueAdjust;\n" +
|
||||
"const highp vec4 kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0);\n" +
|
||||
"const highp vec4 kRGBToI = vec4 (0.595716, -0.274453, -0.321263, 0.0);\n" +
|
||||
"const highp vec4 kRGBToQ = vec4 (0.211456, -0.522591, 0.31135, 0.0);\n" +
|
||||
"\n" +
|
||||
"const highp vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0);\n" +
|
||||
"const highp vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0);\n" +
|
||||
"const highp vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0);\n" +
|
||||
"\n" +
|
||||
"void main ()\n" +
|
||||
"{\n" +
|
||||
" // Sample the input pixel\n" +
|
||||
" highp vec4 color = texture2D(sTexture, vTextureCoord);\n" +
|
||||
"\n" +
|
||||
" // Convert to YIQ\n" +
|
||||
" highp float YPrime = dot (color, kRGBToYPrime);\n" +
|
||||
" highp float I = dot (color, kRGBToI);\n" +
|
||||
" highp float Q = dot (color, kRGBToQ);\n" +
|
||||
"\n" +
|
||||
" // Calculate the hue and chroma\n" +
|
||||
" highp float hue = atan (Q, I);\n" +
|
||||
" highp float chroma = sqrt (I * I + Q * Q);\n" +
|
||||
"\n" +
|
||||
" // Make the user's adjustments\n" +
|
||||
" hue += (-hueAdjust); //why negative rotation?\n" +
|
||||
"\n" +
|
||||
" // Convert back to YIQ\n" +
|
||||
" Q = chroma * sin (hue);\n" +
|
||||
" I = chroma * cos (hue);\n" +
|
||||
"\n" +
|
||||
" // Convert back to RGB\n" +
|
||||
" highp vec4 yIQ = vec4 (YPrime, I, Q, 0.0);\n" +
|
||||
" color.r = dot (yIQ, kYIQToR);\n" +
|
||||
" color.g = dot (yIQ, kYIQToG);\n" +
|
||||
" color.b = dot (yIQ, kYIQToB);\n" +
|
||||
"\n" +
|
||||
" // Save the result\n" +
|
||||
" gl_FragColor = color;\n" +
|
||||
"}\n";
|
||||
|
||||
public GlHueFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, HUE_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float hue = 90f;
|
||||
|
||||
public void setHue(float hue) {
|
||||
this.hue = hue;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("hueAdjust"), hue);
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
|
||||
|
||||
public class GlInvertFilter extends GlFilter {
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
"varying vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
"void main() {" +
|
||||
"lowp vec4 color = texture2D(sTexture, vTextureCoord);" +
|
||||
"gl_FragColor = vec4((1.0 - color.rgb), color.w);" +
|
||||
"}";
|
||||
|
||||
public GlInvertFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
}
|
@ -0,0 +1,91 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.content.res.Resources;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.BitmapFactory;
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import com.xypower.gpuv.egl.EglUtil;
|
||||
|
||||
|
||||
|
||||
public class GlLookUpTableFilter extends GlFilter {
|
||||
|
||||
private final static String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
"uniform mediump sampler2D lutTexture; \n" +
|
||||
"uniform lowp sampler2D sTexture; \n" +
|
||||
"varying highp vec2 vTextureCoord; \n" +
|
||||
"vec4 sampleAs3DTexture(vec3 uv) {\n" +
|
||||
" float width = 16.;\n" +
|
||||
" float sliceSize = 1.0 / width;\n" +
|
||||
" float slicePixelSize = sliceSize / width;\n" +
|
||||
" float sliceInnerSize = slicePixelSize * (width - 1.0);\n" +
|
||||
" float zSlice0 = min(floor(uv.z * width), width - 1.0);\n" +
|
||||
" float zSlice1 = min(zSlice0 + 1.0, width - 1.0);\n" +
|
||||
" float xOffset = slicePixelSize * 0.5 + uv.x * sliceInnerSize;\n" +
|
||||
" float s0 = xOffset + (zSlice0 * sliceSize);\n" +
|
||||
" float s1 = xOffset + (zSlice1 * sliceSize);\n" +
|
||||
" vec4 slice0Color = texture2D(lutTexture, vec2(s0, uv.y));\n" +
|
||||
" vec4 slice1Color = texture2D(lutTexture, vec2(s1, uv.y));\n" +
|
||||
" float zOffset = mod(uv.z * width, 1.0);\n" +
|
||||
" vec4 result = mix(slice0Color, slice1Color, zOffset);\n" +
|
||||
" return result;\n" +
|
||||
"}\n" +
|
||||
"void main() {\n" +
|
||||
" vec4 pixel = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" vec4 gradedPixel = sampleAs3DTexture(pixel.rgb);\n" +
|
||||
" gradedPixel.a = pixel.a;\n" +
|
||||
" pixel = gradedPixel;\n" +
|
||||
" gl_FragColor = pixel;\n " +
|
||||
"}";
|
||||
|
||||
public GlLookUpTableFilter(Bitmap bitmap) {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
this.lutTexture = bitmap;
|
||||
hTex = EglUtil.NO_TEXTURE;
|
||||
}
|
||||
|
||||
|
||||
public GlLookUpTableFilter(Resources resources, int fxID) {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
this.lutTexture = BitmapFactory.decodeResource(resources, fxID);
|
||||
hTex = EglUtil.NO_TEXTURE;
|
||||
}
|
||||
|
||||
private int hTex;
|
||||
|
||||
private Bitmap lutTexture;
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
int offsetDepthMapTextureUniform = getHandle("lutTexture");
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, hTex);
|
||||
GLES20.glUniform1i(offsetDepthMapTextureUniform, 3);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
super.setup();
|
||||
loadTexture();
|
||||
}
|
||||
|
||||
private void loadTexture() {
|
||||
if (hTex == EglUtil.NO_TEXTURE) {
|
||||
hTex = EglUtil.loadTexture(lutTexture, EglUtil.NO_TEXTURE, false);
|
||||
}
|
||||
}
|
||||
|
||||
public void releaseLutBitmap() {
|
||||
if (lutTexture != null && !lutTexture.isRecycled()) {
|
||||
lutTexture.recycle();
|
||||
lutTexture = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
hTex = EglUtil.NO_TEXTURE;
|
||||
hTex = EglUtil.loadTexture(lutTexture, EglUtil.NO_TEXTURE, false);
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
public class GlLuminanceFilter extends GlFilter {
|
||||
|
||||
private static final String LUMINANCE_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
"\n" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
"\n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
"\n" +
|
||||
"// Values from \"Graphics Shaders: Theory and Practice\" by Bailey and Cunningham\n" +
|
||||
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
|
||||
"\n" +
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" float luminance = dot(textureColor.rgb, W);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(vec3(luminance), textureColor.a);\n" +
|
||||
"}";
|
||||
|
||||
public GlLuminanceFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, LUMINANCE_FRAGMENT_SHADER);
|
||||
}
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlLuminanceThresholdFilter extends GlFilter {
|
||||
|
||||
private static final String LUMINANCE_THRESHOLD_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
"varying highp vec2 vTextureCoord;\n" +
|
||||
"\n" +
|
||||
"uniform lowp sampler2D sTexture;\n" +
|
||||
"uniform highp float threshold;\n" +
|
||||
"\n" +
|
||||
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
|
||||
"\n" +
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
" highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" highp float luminance = dot(textureColor.rgb, W);\n" +
|
||||
" highp float thresholdResult = step(threshold, luminance);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);\n" +
|
||||
"}";
|
||||
|
||||
public GlLuminanceThresholdFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, LUMINANCE_THRESHOLD_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float threshold = 0.5f;
|
||||
|
||||
public void setThreshold(float threshold) {
|
||||
this.threshold = threshold;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("threshold"), threshold);
|
||||
}
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
|
||||
public class GlMonochromeFilter extends GlFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision lowp float;" +
|
||||
|
||||
"varying highp vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
"uniform float intensity;" +
|
||||
|
||||
"const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);" +
|
||||
|
||||
"void main() {" +
|
||||
|
||||
"lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);" +
|
||||
"float luminance = dot(textureColor.rgb, luminanceWeighting);" +
|
||||
|
||||
"lowp vec4 desat = vec4(vec3(luminance), 1.0);" +
|
||||
|
||||
"lowp vec4 outputColor = vec4(" +
|
||||
"(desat.r < 0.5 ? (2.0 * desat.r * 0.6) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - 0.6)))," +
|
||||
"(desat.g < 0.5 ? (2.0 * desat.g * 0.45) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - 0.45)))," +
|
||||
"(desat.b < 0.5 ? (2.0 * desat.b * 0.3) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - 0.3)))," +
|
||||
"1.0" +
|
||||
");" +
|
||||
|
||||
"gl_FragColor = vec4(mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a);" +
|
||||
"}";
|
||||
|
||||
private float intensity = 1.0f;
|
||||
|
||||
public GlMonochromeFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public float getIntensity() {
|
||||
return intensity;
|
||||
}
|
||||
|
||||
public void setIntensity(float intensity) {
|
||||
this.intensity = intensity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("intensity"), intensity);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
/**
|
||||
* Adjusts the alpha channel of the incoming image
|
||||
* opacity: The value to multiply the incoming alpha channel for each pixel by (0.0 - 1.0, with 1.0 as the default)
|
||||
*/
|
||||
public class GlOpacityFilter extends GlFilter {
|
||||
|
||||
private static final String OPACITY_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying highp vec2 vTextureCoord;\n" +
|
||||
" \n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform lowp float opacity;\n" +
|
||||
" \n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);\n" +
|
||||
" }\n";
|
||||
|
||||
public GlOpacityFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, OPACITY_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float opacity = 1f;
|
||||
|
||||
public void setOpacity(float opacity) {
|
||||
this.opacity = opacity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("opacity"), opacity);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,99 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.Color;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.GLUtils;
|
||||
import android.util.Size;
|
||||
|
||||
|
||||
|
||||
public abstract class GlOverlayFilter extends GlFilter {
|
||||
|
||||
private int[] textures = new int[1];
|
||||
|
||||
private Bitmap bitmap = null;
|
||||
|
||||
protected Size inputResolution = new Size(1280, 720);
|
||||
|
||||
public GlOverlayFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private final static String FRAGMENT_SHADER =
|
||||
"precision mediump float;\n" +
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"uniform lowp sampler2D sTexture;\n" +
|
||||
"uniform lowp sampler2D oTexture;\n" +
|
||||
"void main() {\n" +
|
||||
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" lowp vec4 textureColor2 = texture2D(oTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);\n" +
|
||||
"}\n";
|
||||
|
||||
public void setResolution(Size resolution) {
|
||||
this.inputResolution = resolution;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFrameSize(int width, int height) {
|
||||
super.setFrameSize(width, height);
|
||||
setResolution(new Size(width, height));
|
||||
}
|
||||
|
||||
private void createBitmap() {
|
||||
releaseBitmap(bitmap);
|
||||
bitmap = Bitmap.createBitmap(inputResolution.getWidth(), inputResolution.getHeight(), Bitmap.Config.ARGB_8888);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
super.setup();// 1
|
||||
GLES20.glGenTextures(1, textures, 0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
|
||||
|
||||
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||
|
||||
createBitmap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
if (bitmap == null) {
|
||||
createBitmap();
|
||||
}
|
||||
if (bitmap.getWidth() != inputResolution.getWidth() || bitmap.getHeight() != inputResolution.getHeight()) {
|
||||
createBitmap();
|
||||
}
|
||||
|
||||
bitmap.eraseColor(Color.argb(0, 0, 0, 0));
|
||||
Canvas bitmapCanvas = new Canvas(bitmap);
|
||||
bitmapCanvas.scale(1, -1, bitmapCanvas.getWidth() / 2, bitmapCanvas.getHeight() / 2);
|
||||
drawCanvas(bitmapCanvas);
|
||||
|
||||
int offsetDepthMapTextureUniform = getHandle("oTexture");// 3
|
||||
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
|
||||
|
||||
if (bitmap != null && !bitmap.isRecycled()) {
|
||||
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, bitmap, 0);
|
||||
}
|
||||
|
||||
GLES20.glUniform1i(offsetDepthMapTextureUniform, 3);
|
||||
}
|
||||
|
||||
protected abstract void drawCanvas(Canvas canvas);
|
||||
|
||||
public static void releaseBitmap(Bitmap bitmap) {
|
||||
if (bitmap != null && !bitmap.isRecycled()) {
|
||||
bitmap.recycle();
|
||||
bitmap = null;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,52 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlPixelationFilter extends GlFilter {
|
||||
|
||||
private static final String PIXELATION_FRAGMENT_SHADER = "" +
|
||||
"precision highp float;\n" +
|
||||
|
||||
"varying highp vec2 vTextureCoord;\n" +
|
||||
|
||||
"uniform float imageWidthFactor;\n" +
|
||||
"uniform float imageHeightFactor;\n" +
|
||||
"uniform lowp sampler2D sTexture;\n" +
|
||||
"uniform float pixel;\n" +
|
||||
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
" vec2 uv = vTextureCoord.xy;\n" +
|
||||
" float dx = pixel * imageWidthFactor;\n" +
|
||||
" float dy = pixel * imageHeightFactor;\n" +
|
||||
" vec2 coord = vec2(dx * floor(uv.x / dx), dy * floor(uv.y / dy));\n" +
|
||||
" vec3 tc = texture2D(sTexture, coord).xyz;\n" +
|
||||
" gl_FragColor = vec4(tc, 1.0);\n" +
|
||||
"}";
|
||||
|
||||
public GlPixelationFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, PIXELATION_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float pixel = 1f;
|
||||
private float imageWidthFactor = 1f / 720;
|
||||
private float imageHeightFactor = 1f / 720;
|
||||
|
||||
@Override
|
||||
public void setFrameSize(int width, int height) {
|
||||
super.setFrameSize(width, height);
|
||||
imageWidthFactor = 1f / width;
|
||||
imageHeightFactor = 1f / height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("pixel"), pixel);
|
||||
GLES20.glUniform1f(getHandle("imageWidthFactor"), imageWidthFactor);
|
||||
GLES20.glUniform1f(getHandle("imageHeightFactor"), imageHeightFactor);
|
||||
}
|
||||
|
||||
public void setPixel(final float pixel) {
|
||||
this.pixel = pixel;
|
||||
}
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlPosterizeFilter extends GlFilter {
|
||||
|
||||
private static final String POSTERIZE_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
"\n" +
|
||||
"uniform lowp sampler2D sTexture;\n" +
|
||||
"uniform highp float colorLevels;\n" +
|
||||
"\n" +
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
" highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;\n" +
|
||||
"}";
|
||||
|
||||
public GlPosterizeFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, POSTERIZE_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private int colorLevels = 10;
|
||||
|
||||
public void setColorLevels(int colorLevels) {
|
||||
if (colorLevels < 0) {
|
||||
this.colorLevels = 0;
|
||||
} else if (colorLevels > 256) {
|
||||
this.colorLevels = 256;
|
||||
} else {
|
||||
this.colorLevels = colorLevels;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("colorLevels"), colorLevels);
|
||||
}
|
||||
}
|
@ -0,0 +1,55 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
/**
|
||||
* Adjusts the individual RGB channels of an image
|
||||
* red: Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
|
||||
* green:
|
||||
* blue:
|
||||
*/
|
||||
public class GlRGBFilter extends GlFilter {
|
||||
|
||||
private static final String RGB_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
" \n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform highp float red;\n" +
|
||||
" uniform highp float green;\n" +
|
||||
" uniform highp float blue;\n" +
|
||||
" \n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(textureColor.r * red, textureColor.g * green, textureColor.b * blue, 1.0);\n" +
|
||||
" }\n";
|
||||
|
||||
public GlRGBFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, RGB_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float red = 1f;
|
||||
private float green = 1f;
|
||||
private float blue = 1f;
|
||||
|
||||
public void setRed(float red) {
|
||||
this.red = red;
|
||||
}
|
||||
|
||||
public void setGreen(float green) {
|
||||
this.green = green;
|
||||
}
|
||||
|
||||
public void setBlue(float blue) {
|
||||
this.blue = blue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("red"), red);
|
||||
GLES20.glUniform1f(getHandle("green"), green);
|
||||
GLES20.glUniform1f(getHandle("blue"), blue);
|
||||
}
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlSaturationFilter extends GlFilter {
|
||||
private static final String SATURATION_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
" \n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform lowp float saturation;\n" +
|
||||
" \n" +
|
||||
" const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n" +
|
||||
" \n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" lowp float luminance = dot(textureColor.rgb, luminanceWeighting);\n" +
|
||||
" lowp vec3 greyScaleColor = vec3(luminance);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);\n" +
|
||||
" \n" +
|
||||
" }";
|
||||
|
||||
|
||||
public GlSaturationFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, SATURATION_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float saturation = 1f;
|
||||
|
||||
public void setSaturation(float saturation) {
|
||||
this.saturation = saturation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("saturation"), saturation);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
|
||||
public class GlSepiaFilter extends GlFilter {
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
"varying vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
"const highp vec3 weight = vec3(0.2125, 0.7154, 0.0721);" +
|
||||
"void main() {" +
|
||||
" vec4 FragColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" gl_FragColor.r = dot(FragColor.rgb, vec3(.393, .769, .189));\n" +
|
||||
" gl_FragColor.g = dot(FragColor.rgb, vec3(.349, .686, .168));\n" +
|
||||
" gl_FragColor.b = dot(FragColor.rgb, vec3(.272, .534, .131));\n" +
|
||||
"}";
|
||||
|
||||
public GlSepiaFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
}
|
@ -0,0 +1,96 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
|
||||
public class GlSharpenFilter extends GlFilter {
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"attribute vec4 aPosition;" +
|
||||
"attribute vec4 aTextureCoord;" +
|
||||
|
||||
"uniform float imageWidthFactor;" +
|
||||
"uniform float imageHeightFactor;" +
|
||||
"uniform float sharpness;" +
|
||||
|
||||
"varying highp vec2 textureCoordinate;" +
|
||||
"varying highp vec2 leftTextureCoordinate;" +
|
||||
"varying highp vec2 rightTextureCoordinate;" +
|
||||
"varying highp vec2 topTextureCoordinate;" +
|
||||
"varying highp vec2 bottomTextureCoordinate;" +
|
||||
|
||||
"varying float centerMultiplier;" +
|
||||
"varying float edgeMultiplier;" +
|
||||
|
||||
"void main() {" +
|
||||
"gl_Position = aPosition;" +
|
||||
|
||||
"mediump vec2 widthStep = vec2(imageWidthFactor, 0.0);" +
|
||||
"mediump vec2 heightStep = vec2(0.0, imageHeightFactor);" +
|
||||
|
||||
"textureCoordinate = aTextureCoord.xy;" +
|
||||
"leftTextureCoordinate = textureCoordinate - widthStep;" +
|
||||
"rightTextureCoordinate = textureCoordinate + widthStep;" +
|
||||
"topTextureCoordinate = textureCoordinate + heightStep;" +
|
||||
"bottomTextureCoordinate = textureCoordinate - heightStep;" +
|
||||
|
||||
"centerMultiplier = 1.0 + 4.0 * sharpness;" +
|
||||
"edgeMultiplier = sharpness;" +
|
||||
"}";
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision highp float;" +
|
||||
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
|
||||
"varying highp vec2 textureCoordinate;" +
|
||||
"varying highp vec2 leftTextureCoordinate;" +
|
||||
"varying highp vec2 rightTextureCoordinate;" +
|
||||
"varying highp vec2 topTextureCoordinate;" +
|
||||
"varying highp vec2 bottomTextureCoordinate;" +
|
||||
|
||||
"varying float centerMultiplier;" +
|
||||
"varying float edgeMultiplier;" +
|
||||
|
||||
"void main() {" +
|
||||
"mediump vec3 textureColor = texture2D(sTexture, textureCoordinate).rgb;" +
|
||||
"mediump vec3 leftTextureColor = texture2D(sTexture, leftTextureCoordinate).rgb;" +
|
||||
"mediump vec3 rightTextureColor = texture2D(sTexture, rightTextureCoordinate).rgb;" +
|
||||
"mediump vec3 topTextureColor = texture2D(sTexture, topTextureCoordinate).rgb;" +
|
||||
"mediump vec3 bottomTextureColor = texture2D(sTexture, bottomTextureCoordinate).rgb;" +
|
||||
|
||||
"gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(sTexture, bottomTextureCoordinate).w);" +
|
||||
"}";
|
||||
|
||||
private float imageWidthFactor = 0.004f;
|
||||
private float imageHeightFactor = 0.004f;
|
||||
private float sharpness = 1.f;
|
||||
|
||||
public GlSharpenFilter() {
|
||||
super(VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public float getSharpness() {
|
||||
return sharpness;
|
||||
}
|
||||
|
||||
public void setSharpness(final float sharpness) {
|
||||
this.sharpness = sharpness;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setFrameSize(final int width, final int height) {
|
||||
imageWidthFactor = 1f / width;
|
||||
imageHeightFactor = 1f / height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("imageWidthFactor"), imageWidthFactor);
|
||||
GLES20.glUniform1f(getHandle("imageHeightFactor"), imageHeightFactor);
|
||||
GLES20.glUniform1f(getHandle("sharpness"), sharpness);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlSolarizeFilter extends GlFilter {
|
||||
|
||||
private static final String SOLATIZE_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
"\n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform highp float threshold;\n" +
|
||||
"\n" +
|
||||
" const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
|
||||
"\n" +
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
" highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" highp float luminance = dot(textureColor.rgb, W);\n" +
|
||||
" highp float thresholdResult = step(luminance, threshold);\n" +
|
||||
" highp vec3 finalColor = abs(thresholdResult - textureColor.rgb);\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = vec4(finalColor, textureColor.w);\n" +
|
||||
"}";
|
||||
|
||||
public GlSolarizeFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, SOLATIZE_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float threshold = 0.5f;
|
||||
|
||||
public void setThreshold(float threshold) {
|
||||
this.threshold = threshold;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("threshold"), threshold);
|
||||
}
|
||||
}
|
@ -0,0 +1,73 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
|
||||
public class GlSphereRefractionFilter extends GlFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
|
||||
"varying vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
"uniform highp vec2 center;" +
|
||||
"uniform highp float radius;" +
|
||||
"uniform highp float aspectRatio;" +
|
||||
"uniform highp float refractiveIndex;" +
|
||||
|
||||
"void main() {" +
|
||||
"highp vec2 textureCoordinateToUse = vec2(vTextureCoord.x, (vTextureCoord.y * aspectRatio + 0.5 - 0.5 * aspectRatio));" +
|
||||
"highp float distanceFromCenter = distance(center, textureCoordinateToUse);" +
|
||||
"lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);" +
|
||||
|
||||
"distanceFromCenter = distanceFromCenter / radius;" +
|
||||
|
||||
"highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);" +
|
||||
"highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));" +
|
||||
|
||||
"highp vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);" +
|
||||
|
||||
"gl_FragColor = texture2D(sTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere;" +
|
||||
"}";
|
||||
|
||||
private float centerX = 0.5f;
|
||||
private float centerY = 0.5f;
|
||||
private float radius = 0.5f;
|
||||
private float aspectRatio = 1.0f;
|
||||
private float refractiveIndex = 0.71f;
|
||||
|
||||
public GlSphereRefractionFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public void setCenterX(float centerX) {
|
||||
this.centerX = centerX;
|
||||
}
|
||||
|
||||
public void setCenterY(float centerY) {
|
||||
this.centerY = centerY;
|
||||
}
|
||||
|
||||
public void setRadius(float radius) {
|
||||
this.radius = radius;
|
||||
}
|
||||
|
||||
public void setAspectRatio(float aspectRatio) {
|
||||
this.aspectRatio = aspectRatio;
|
||||
}
|
||||
|
||||
public void setRefractiveIndex(float refractiveIndex) {
|
||||
this.refractiveIndex = refractiveIndex;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform2f(getHandle("center"), centerX, centerY);
|
||||
GLES20.glUniform1f(getHandle("radius"), radius);
|
||||
GLES20.glUniform1f(getHandle("aspectRatio"), aspectRatio);
|
||||
GLES20.glUniform1f(getHandle("refractiveIndex"), refractiveIndex);
|
||||
}
|
||||
}
|
@ -0,0 +1,65 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.graphics.PointF;
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlSwirlFilter extends GlFilter {
|
||||
|
||||
private static final String SWIRL_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
"\n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
"\n" +
|
||||
"uniform highp vec2 center;\n" +
|
||||
"uniform highp float radius;\n" +
|
||||
"uniform highp float angle;\n" +
|
||||
"\n" +
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
"highp vec2 textureCoordinateToUse = vTextureCoord;\n" +
|
||||
"highp float dist = distance(center, vTextureCoord);\n" +
|
||||
"if (dist < radius)\n" +
|
||||
"{\n" +
|
||||
"textureCoordinateToUse -= center;\n" +
|
||||
"highp float percent = (radius - dist) / radius;\n" +
|
||||
"highp float theta = percent * percent * angle * 8.0;\n" +
|
||||
"highp float s = sin(theta);\n" +
|
||||
"highp float c = cos(theta);\n" +
|
||||
"textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c)));\n" +
|
||||
"textureCoordinateToUse += center;\n" +
|
||||
"}\n" +
|
||||
"\n" +
|
||||
"gl_FragColor = texture2D(sTexture, textureCoordinateToUse );\n" +
|
||||
"\n" +
|
||||
"}\n";
|
||||
|
||||
public GlSwirlFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, SWIRL_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float angle = 1.0f;
|
||||
private float radius = 0.5f;
|
||||
private PointF center = new PointF(0.5f, 0.5f);
|
||||
|
||||
public void setAngle(float angle) {
|
||||
this.angle = angle;
|
||||
}
|
||||
|
||||
public void setRadius(float radius) {
|
||||
this.radius = radius;
|
||||
}
|
||||
|
||||
public void setCenter(PointF center) {
|
||||
this.center = center;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform2f(getHandle("center"), center.x, center.y);
|
||||
GLES20.glUniform1f(getHandle("radius"), radius);
|
||||
GLES20.glUniform1f(getHandle("angle"), angle);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,83 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
|
||||
public class GlThreex3TextureSamplingFilter extends GlFilter {
|
||||
private static final String THREE_X_THREE_TEXTURE_SAMPLING_VERTEX_SHADER =
|
||||
"attribute vec4 aPosition;" +
|
||||
"attribute vec4 aTextureCoord;" +
|
||||
|
||||
"uniform highp float texelWidth;" +
|
||||
"uniform highp float texelHeight;" +
|
||||
|
||||
"varying highp vec2 textureCoordinate;" +
|
||||
"varying highp vec2 leftTextureCoordinate;" +
|
||||
"varying highp vec2 rightTextureCoordinate;" +
|
||||
|
||||
"varying highp vec2 topTextureCoordinate;" +
|
||||
"varying highp vec2 topLeftTextureCoordinate;" +
|
||||
"varying highp vec2 topRightTextureCoordinate;" +
|
||||
|
||||
"varying highp vec2 bottomTextureCoordinate;" +
|
||||
"varying highp vec2 bottomLeftTextureCoordinate;" +
|
||||
"varying highp vec2 bottomRightTextureCoordinate;" +
|
||||
|
||||
"void main() {" +
|
||||
"gl_Position = aPosition;" +
|
||||
|
||||
"vec2 widthStep = vec2(texelWidth, 0.0);" +
|
||||
"vec2 heightStep = vec2(0.0, texelHeight);" +
|
||||
"vec2 widthHeightStep = vec2(texelWidth, texelHeight);" +
|
||||
"vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);" +
|
||||
|
||||
"textureCoordinate = aTextureCoord.xy;" +
|
||||
"leftTextureCoordinate = textureCoordinate - widthStep;" +
|
||||
"rightTextureCoordinate = textureCoordinate + widthStep;" +
|
||||
|
||||
"topTextureCoordinate = textureCoordinate - heightStep;" +
|
||||
"topLeftTextureCoordinate = textureCoordinate - widthHeightStep;" +
|
||||
"topRightTextureCoordinate = textureCoordinate + widthNegativeHeightStep;" +
|
||||
|
||||
"bottomTextureCoordinate = textureCoordinate + heightStep;" +
|
||||
"bottomLeftTextureCoordinate = textureCoordinate - widthNegativeHeightStep;" +
|
||||
"bottomRightTextureCoordinate = textureCoordinate + widthHeightStep;" +
|
||||
"}";
|
||||
|
||||
private float texelWidth;
|
||||
private float texelHeight;
|
||||
|
||||
public GlThreex3TextureSamplingFilter(String fragmentShaderSource) {
|
||||
super(THREE_X_THREE_TEXTURE_SAMPLING_VERTEX_SHADER, fragmentShaderSource);
|
||||
}
|
||||
|
||||
public float getTexelWidth() {
|
||||
return texelWidth;
|
||||
}
|
||||
|
||||
public void setTexelWidth(float texelWidth) {
|
||||
this.texelWidth = texelWidth;
|
||||
}
|
||||
|
||||
public float getTexelHeight() {
|
||||
return texelHeight;
|
||||
}
|
||||
|
||||
public void setTexelHeight(float texelHeight) {
|
||||
this.texelHeight = texelHeight;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFrameSize(final int width, final int height) {
|
||||
texelWidth = 1f / width;
|
||||
texelHeight = 1f / height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("texelWidth"), texelWidth);
|
||||
GLES20.glUniform1f(getHandle("texelHeight"), texelHeight);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,371 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.graphics.Point;
|
||||
import android.graphics.PointF;
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.LinkedList;
|
||||
|
||||
|
||||
public class GlToneCurveFilter extends GlFilter {
|
||||
|
||||
private final static String FRAGMENT_SHADER =
|
||||
"precision mediump float;\n" +
|
||||
" varying highp vec2 vTextureCoord;\n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform mediump sampler2D toneCurveTexture;\n" +
|
||||
"\n" +
|
||||
" void main()\n" +
|
||||
" {\n" +
|
||||
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" lowp float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r;\n" +
|
||||
" lowp float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g;\n" +
|
||||
" lowp float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b;\n" +
|
||||
"\n" +
|
||||
" gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);\n" +
|
||||
" }";
|
||||
|
||||
private PointF[] rgbCompositeControlPoints;
|
||||
private PointF[] redControlPoints;
|
||||
private PointF[] greenControlPoints;
|
||||
private PointF[] blueControlPoints;
|
||||
|
||||
private ArrayList<Float> rgbCompositeCurve;
|
||||
private ArrayList<Float> redCurve;
|
||||
private ArrayList<Float> greenCurve;
|
||||
private ArrayList<Float> blueCurve;
|
||||
|
||||
private final LinkedList<Runnable> runOnDraw;
|
||||
|
||||
private int[] textures = new int[1];
|
||||
|
||||
private byte[] toneCurveByteArray;
|
||||
|
||||
|
||||
public GlToneCurveFilter(InputStream input) {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
PointF[] defaultCurvePoints = new PointF[]{new PointF(0.0f, 0.0f), new PointF(0.5f, 0.5f), new PointF(1.0f, 1.0f)};
|
||||
rgbCompositeControlPoints = defaultCurvePoints;
|
||||
redControlPoints = defaultCurvePoints;
|
||||
greenControlPoints = defaultCurvePoints;
|
||||
blueControlPoints = defaultCurvePoints;
|
||||
|
||||
runOnDraw = new LinkedList<>();
|
||||
|
||||
setFromCurveFileInputStream(input);
|
||||
|
||||
setRgbCompositeControlPoints(rgbCompositeControlPoints);
|
||||
setRedControlPoints(redControlPoints);
|
||||
setGreenControlPoints(greenControlPoints);
|
||||
setBlueControlPoints(blueControlPoints);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
super.setup();// 1
|
||||
GLES20.glGenTextures(1, textures, 0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
|
||||
|
||||
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||
|
||||
while (!runOnDraw.isEmpty()) {
|
||||
runOnDraw.removeFirst().run();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
|
||||
int offsetDepthMapTextureUniform = getHandle("toneCurveTexture");// 3
|
||||
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
|
||||
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(toneCurveByteArray));
|
||||
|
||||
GLES20.glUniform1i(offsetDepthMapTextureUniform, 3);
|
||||
}
|
||||
|
||||
private void setFromCurveFileInputStream(InputStream input) {
|
||||
try {
|
||||
int version = readShort(input);
|
||||
int totalCurves = readShort(input);
|
||||
|
||||
ArrayList<PointF[]> curves = new ArrayList<PointF[]>(totalCurves);
|
||||
float pointRate = 1.0f / 255;
|
||||
|
||||
for (int i = 0; i < totalCurves; i++) {
|
||||
// 2 bytes, Count of points in the curve (short integer toAndroidFormat 2...19)
|
||||
short pointCount = readShort(input);
|
||||
|
||||
PointF[] points = new PointF[pointCount];
|
||||
|
||||
// point count * 4
|
||||
// Curve points. Each curve point is a pair of short integers where
|
||||
// the first number is the output getNode (vertical coordinate on the
|
||||
// Curves dialog graph) and the second is the input getNode. All coordinates have range 0 to 255.
|
||||
for (int j = 0; j < pointCount; j++) {
|
||||
short y = readShort(input);
|
||||
short x = readShort(input);
|
||||
|
||||
points[j] = new PointF(x * pointRate, y * pointRate);
|
||||
}
|
||||
|
||||
curves.add(points);
|
||||
}
|
||||
input.close();
|
||||
|
||||
rgbCompositeControlPoints = curves.get(0);
|
||||
redControlPoints = curves.get(1);
|
||||
greenControlPoints = curves.get(2);
|
||||
blueControlPoints = curves.get(3);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private short readShort(InputStream input) throws IOException {
|
||||
return (short) (input.read() << 8 | input.read());
|
||||
}
|
||||
|
||||
private void setRgbCompositeControlPoints(PointF[] points) {
|
||||
rgbCompositeControlPoints = points;
|
||||
rgbCompositeCurve = createSplineCurve(rgbCompositeControlPoints);
|
||||
updateToneCurveTexture();
|
||||
}
|
||||
|
||||
private void setRedControlPoints(PointF[] points) {
|
||||
redControlPoints = points;
|
||||
redCurve = createSplineCurve(redControlPoints);
|
||||
updateToneCurveTexture();
|
||||
}
|
||||
|
||||
private void setGreenControlPoints(PointF[] points) {
|
||||
greenControlPoints = points;
|
||||
greenCurve = createSplineCurve(greenControlPoints);
|
||||
updateToneCurveTexture();
|
||||
}
|
||||
|
||||
private void setBlueControlPoints(PointF[] points) {
|
||||
blueControlPoints = points;
|
||||
blueCurve = createSplineCurve(blueControlPoints);
|
||||
updateToneCurveTexture();
|
||||
}
|
||||
|
||||
private void runOnDraw(final Runnable runnable) {
|
||||
synchronized (runOnDraw) {
|
||||
runOnDraw.addLast(runnable);
|
||||
}
|
||||
}
|
||||
|
||||
private void updateToneCurveTexture() {
|
||||
runOnDraw(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
|
||||
|
||||
if ((redCurve.size() >= 256) && (greenCurve.size() >= 256) && (blueCurve.size() >= 256) && (rgbCompositeCurve.size() >= 256)) {
|
||||
toneCurveByteArray = new byte[256 * 4];
|
||||
for (int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++) {
|
||||
// BGRA for upload to texture
|
||||
toneCurveByteArray[currentCurveIndex * 4 + 2] = (byte) ((int) Math.min(Math.max(currentCurveIndex + blueCurve.get(currentCurveIndex) + rgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
|
||||
toneCurveByteArray[currentCurveIndex * 4 + 1] = (byte) ((int) Math.min(Math.max(currentCurveIndex + greenCurve.get(currentCurveIndex) + rgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
|
||||
toneCurveByteArray[currentCurveIndex * 4] = (byte) ((int) Math.min(Math.max(currentCurveIndex + redCurve.get(currentCurveIndex) + rgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
|
||||
toneCurveByteArray[currentCurveIndex * 4 + 3] = (byte) (255 & 0xff);
|
||||
}
|
||||
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(toneCurveByteArray));
|
||||
}
|
||||
// Buffer pixels!
|
||||
// GLES20.glTexImage2D(int target,
|
||||
// int level,
|
||||
// int internalformat,
|
||||
// int width,
|
||||
// int height,
|
||||
// int border,
|
||||
// int format,
|
||||
// int type,
|
||||
// java.nio.Buffer pixels);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private ArrayList<Float> createSplineCurve(PointF[] points) {
|
||||
if (points == null || points.length <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Sort the array
|
||||
PointF[] pointsSorted = points.clone();
|
||||
Arrays.sort(pointsSorted, new Comparator<PointF>() {
|
||||
@Override
|
||||
public int compare(PointF point1, PointF point2) {
|
||||
if (point1.x < point2.x) {
|
||||
return -1;
|
||||
} else if (point1.x > point2.x) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Convert toAndroidFormat (0, 1) to (0, 255).
|
||||
Point[] convertedPoints = new Point[pointsSorted.length];
|
||||
for (int i = 0; i < points.length; i++) {
|
||||
PointF point = pointsSorted[i];
|
||||
convertedPoints[i] = new Point((int) (point.x * 255), (int) (point.y * 255));
|
||||
}
|
||||
|
||||
ArrayList<Point> splinePoints = createSplineCurve2(convertedPoints);
|
||||
|
||||
// If we have a first point like (0.3, 0) we'll be missing some points at the beginning
|
||||
// that should be 0.
|
||||
Point firstSplinePoint = splinePoints.get(0);
|
||||
if (firstSplinePoint.x > 0) {
|
||||
for (int i = firstSplinePoint.x; i >= 0; i--) {
|
||||
splinePoints.add(0, new Point(i, 0));
|
||||
}
|
||||
}
|
||||
|
||||
// Insert points similarly at the end, if necessary.
|
||||
Point lastSplinePoint = splinePoints.get(splinePoints.size() - 1);
|
||||
if (lastSplinePoint.x < 255) {
|
||||
for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
|
||||
splinePoints.add(new Point(i, 255));
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare the spline points.
|
||||
ArrayList<Float> preparedSplinePoints = new ArrayList<Float>(splinePoints.size());
|
||||
for (Point newPoint : splinePoints) {
|
||||
Point origPoint = new Point(newPoint.x, newPoint.x);
|
||||
|
||||
float distance = (float) Math.sqrt(Math.pow((origPoint.x - newPoint.x), 2.0) + Math.pow((origPoint.y - newPoint.y), 2.0));
|
||||
|
||||
if (origPoint.y > newPoint.y) {
|
||||
distance = -distance;
|
||||
}
|
||||
|
||||
preparedSplinePoints.add(distance);
|
||||
}
|
||||
|
||||
return preparedSplinePoints;
|
||||
}
|
||||
|
||||
private ArrayList<Point> createSplineCurve2(Point[] points) {
|
||||
ArrayList<Double> sdA = createSecondDerivative(points);
|
||||
|
||||
// Is [points count] equal to [sdA count]?
|
||||
// int n = [points count];
|
||||
int n = sdA.size();
|
||||
if (n < 1) {
|
||||
return null;
|
||||
}
|
||||
double sd[] = new double[n];
|
||||
|
||||
// From NSMutableArray to sd[n];
|
||||
for (int i = 0; i < n; i++) {
|
||||
sd[i] = sdA.get(i);
|
||||
}
|
||||
|
||||
|
||||
ArrayList<Point> output = new ArrayList<Point>(n + 1);
|
||||
|
||||
for (int i = 0; i < n - 1; i++) {
|
||||
Point cur = points[i];
|
||||
Point next = points[i + 1];
|
||||
|
||||
for (int x = cur.x; x < next.x; x++) {
|
||||
double t = (double) (x - cur.x) / (next.x - cur.x);
|
||||
|
||||
double a = 1 - t;
|
||||
double b = t;
|
||||
double h = next.x - cur.x;
|
||||
|
||||
double y = a * cur.y + b * next.y + (h * h / 6) * ((a * a * a - a) * sd[i] + (b * b * b - b) * sd[i + 1]);
|
||||
|
||||
if (y > 255.0) {
|
||||
y = 255.0;
|
||||
} else if (y < 0.0) {
|
||||
y = 0.0;
|
||||
}
|
||||
|
||||
output.add(new Point(x, (int) Math.round(y)));
|
||||
}
|
||||
}
|
||||
|
||||
// If the last point is (255, 255) it doesn't get added.
|
||||
if (output.size() == 255) {
|
||||
output.add(points[points.length - 1]);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
private ArrayList<Double> createSecondDerivative(Point[] points) {
|
||||
int n = points.length;
|
||||
if (n <= 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
double matrix[][] = new double[n][3];
|
||||
double result[] = new double[n];
|
||||
matrix[0][1] = 1;
|
||||
// What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.)
|
||||
matrix[0][0] = 0;
|
||||
matrix[0][2] = 0;
|
||||
|
||||
for (int i = 1; i < n - 1; i++) {
|
||||
Point P1 = points[i - 1];
|
||||
Point P2 = points[i];
|
||||
Point P3 = points[i + 1];
|
||||
|
||||
matrix[i][0] = (double) (P2.x - P1.x) / 6;
|
||||
matrix[i][1] = (double) (P3.x - P1.x) / 3;
|
||||
matrix[i][2] = (double) (P3.x - P2.x) / 6;
|
||||
result[i] = (double) (P3.y - P2.y) / (P3.x - P2.x) - (double) (P2.y - P1.y) / (P2.x - P1.x);
|
||||
}
|
||||
|
||||
// What about result[0] and result[n-1]? Assuming 0 for now (Brad L.)
|
||||
result[0] = 0;
|
||||
result[n - 1] = 0;
|
||||
|
||||
matrix[n - 1][1] = 1;
|
||||
// What about matrix[n-1][0] and matrix[n-1][2]? For now, assuming they are 0 (Brad L.)
|
||||
matrix[n - 1][0] = 0;
|
||||
matrix[n - 1][2] = 0;
|
||||
|
||||
// solving pass1 (up->down)
|
||||
for (int i = 1; i < n; i++) {
|
||||
double k = matrix[i][0] / matrix[i - 1][1];
|
||||
matrix[i][1] -= k * matrix[i - 1][2];
|
||||
matrix[i][0] = 0;
|
||||
result[i] -= k * result[i - 1];
|
||||
}
|
||||
// solving pass2 (down->up)
|
||||
for (int i = n - 2; i >= 0; i--) {
|
||||
double k = matrix[i][2] / matrix[i + 1][1];
|
||||
matrix[i][1] -= k * matrix[i + 1][0];
|
||||
matrix[i][2] = 0;
|
||||
result[i] -= k * result[i + 1];
|
||||
}
|
||||
|
||||
ArrayList<Double> output = new ArrayList<Double>(n);
|
||||
for (int i = 0; i < n; i++) output.add(result[i] / matrix[i][1]);
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
|
||||
public class GlToneFilter extends GlThreex3TextureSamplingFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision highp float;\n" +
|
||||
|
||||
"uniform lowp sampler2D sTexture;\n" +
|
||||
|
||||
"varying vec2 textureCoordinate;\n" +
|
||||
"varying vec2 leftTextureCoordinate;\n" +
|
||||
"varying vec2 rightTextureCoordinate;\n" +
|
||||
|
||||
"varying vec2 topTextureCoordinate;\n" +
|
||||
"varying vec2 topLeftTextureCoordinate;\n" +
|
||||
"varying vec2 topRightTextureCoordinate;\n" +
|
||||
|
||||
"varying vec2 bottomTextureCoordinate;\n" +
|
||||
"varying vec2 bottomLeftTextureCoordinate;\n" +
|
||||
"varying vec2 bottomRightTextureCoordinate;\n" +
|
||||
|
||||
// "uniform highp float intensity;" +
|
||||
"uniform highp float threshold;" +
|
||||
"uniform highp float quantizationLevels;" +
|
||||
|
||||
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);" +
|
||||
|
||||
"void main() {\n" +
|
||||
"vec4 textureColor = texture2D(sTexture, textureCoordinate);" +
|
||||
|
||||
"float bottomLeftIntensity = texture2D(sTexture, bottomLeftTextureCoordinate).r;" +
|
||||
"float topRightIntensity = texture2D(sTexture, topRightTextureCoordinate).r;" +
|
||||
"float topLeftIntensity = texture2D(sTexture, topLeftTextureCoordinate).r;" +
|
||||
"float bottomRightIntensity = texture2D(sTexture, bottomRightTextureCoordinate).r;" +
|
||||
"float leftIntensity = texture2D(sTexture, leftTextureCoordinate).r;" +
|
||||
"float rightIntensity = texture2D(sTexture, rightTextureCoordinate).r;" +
|
||||
"float bottomIntensity = texture2D(sTexture, bottomTextureCoordinate).r;" +
|
||||
"float topIntensity = texture2D(sTexture, topTextureCoordinate).r;" +
|
||||
"float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;" +
|
||||
"float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;" +
|
||||
|
||||
"float mag = length(vec2(h, v));" +
|
||||
"vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;" +
|
||||
"float thresholdTest = 1.0 - step(threshold, mag);" +
|
||||
"gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);" +
|
||||
"}";
|
||||
|
||||
private float threshold = 0.2f;
|
||||
private float quantizationLevels = 10f;
|
||||
|
||||
|
||||
public GlToneFilter() {
|
||||
super(FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
public float getThreshold() {
|
||||
return threshold;
|
||||
}
|
||||
|
||||
public void setThreshold(final float threshold) {
|
||||
this.threshold = threshold;
|
||||
}
|
||||
|
||||
public float getQuantizationLevels() {
|
||||
return quantizationLevels;
|
||||
}
|
||||
|
||||
public void setQuantizationLevels(final float quantizationLevels) {
|
||||
this.quantizationLevels = quantizationLevels;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("threshold"), threshold);
|
||||
GLES20.glUniform1f(getHandle("quantizationLevels"), quantizationLevels);
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlVibranceFilter extends GlFilter {
|
||||
|
||||
private static final String VIBRANCE_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
"\n" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" uniform lowp float vibrance;\n" +
|
||||
"\n" +
|
||||
"void main() {\n" +
|
||||
" lowp vec4 color = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" lowp float average = (color.r + color.g + color.b) / 3.0;\n" +
|
||||
" lowp float mx = max(color.r, max(color.g, color.b));\n" +
|
||||
" lowp float amt = (mx - average) * (-vibrance * 3.0);\n" +
|
||||
" color.rgb = mix(color.rgb, vec3(mx), amt);\n" +
|
||||
" gl_FragColor = color;\n" +
|
||||
"}";
|
||||
|
||||
public GlVibranceFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, VIBRANCE_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float vibrance = 0f;
|
||||
|
||||
public void setVibrance(float vibrance) {
|
||||
this.vibrance = vibrance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("vibrance"), vibrance);
|
||||
}
|
||||
}
|
@ -0,0 +1,61 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
|
||||
|
||||
public class GlVignetteFilter extends GlFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision mediump float;" +
|
||||
|
||||
"varying vec2 vTextureCoord;" +
|
||||
"uniform lowp sampler2D sTexture;" +
|
||||
|
||||
"uniform lowp vec2 vignetteCenter;" +
|
||||
"uniform highp float vignetteStart;" +
|
||||
"uniform highp float vignetteEnd;" +
|
||||
|
||||
"void main() {" +
|
||||
"lowp vec3 rgb = texture2D(sTexture, vTextureCoord).rgb;" +
|
||||
"lowp float d = distance(vTextureCoord, vec2(vignetteCenter.x, vignetteCenter.y));" +
|
||||
"lowp float percent = smoothstep(vignetteStart, vignetteEnd, d);" +
|
||||
"gl_FragColor = vec4(mix(rgb.x, 0.0, percent), mix(rgb.y, 0.0, percent), mix(rgb.z, 0.0, percent), 1.0);" +
|
||||
"}";
|
||||
|
||||
private float vignetteCenterX = 0.5f;
|
||||
private float vignetteCenterY = 0.5f;
|
||||
private float vignetteStart = 0.2f;
|
||||
private float vignetteEnd = 0.85f;
|
||||
|
||||
public GlVignetteFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
|
||||
public float getVignetteStart() {
|
||||
return vignetteStart;
|
||||
}
|
||||
|
||||
public void setVignetteStart(final float vignetteStart) {
|
||||
this.vignetteStart = vignetteStart;
|
||||
}
|
||||
|
||||
public float getVignetteEnd() {
|
||||
return vignetteEnd;
|
||||
}
|
||||
|
||||
public void setVignetteEnd(final float vignetteEnd) {
|
||||
this.vignetteEnd = vignetteEnd;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform2f(getHandle("vignetteCenter"), vignetteCenterX, vignetteCenterY);
|
||||
GLES20.glUniform1f(getHandle("vignetteStart"), vignetteStart);
|
||||
GLES20.glUniform1f(getHandle("vignetteEnd"), vignetteEnd);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,56 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.Rect;
|
||||
|
||||
public class GlWatermarkFilter extends GlOverlayFilter {
|
||||
|
||||
private Bitmap bitmap;
|
||||
private Position position = Position.LEFT_TOP;
|
||||
|
||||
public GlWatermarkFilter(Bitmap bitmap) {
|
||||
this.bitmap = bitmap;
|
||||
}
|
||||
|
||||
|
||||
public GlWatermarkFilter(Bitmap bitmap, Position position) {
|
||||
this.bitmap = bitmap;
|
||||
this.position = position;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void drawCanvas(Canvas canvas) {
|
||||
synchronized (bitmap) {
|
||||
canvas.drawBitmap(bitmap, null, canvas.getClipBounds(), null);
|
||||
}
|
||||
|
||||
/*
|
||||
if (bitmap != null && !bitmap.isRecycled()) {
|
||||
switch (position) {
|
||||
case LEFT_TOP:
|
||||
// canvas.drawBitmap(bitmap, 0, 0, null);
|
||||
canvas.drawBitmap(bitmap, null, canvas.getClipBounds(), null);
|
||||
break;
|
||||
case LEFT_BOTTOM:
|
||||
canvas.drawBitmap(bitmap, 0, canvas.getHeight() - bitmap.getHeight(), null);
|
||||
break;
|
||||
case RIGHT_TOP:
|
||||
canvas.drawBitmap(bitmap, canvas.getWidth() - bitmap.getWidth(), 0, null);
|
||||
break;
|
||||
case RIGHT_BOTTOM:
|
||||
canvas.drawBitmap(bitmap, canvas.getWidth() - bitmap.getWidth(), canvas.getHeight() - bitmap.getHeight(), null);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
||||
|
||||
public enum Position {
|
||||
LEFT_TOP,
|
||||
LEFT_BOTTOM,
|
||||
RIGHT_TOP,
|
||||
RIGHT_BOTTOM
|
||||
}
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
|
||||
|
||||
public class GlWeakPixelInclusionFilter extends GlThreex3TextureSamplingFilter {
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"precision lowp float;\n" +
|
||||
|
||||
"uniform lowp sampler2D sTexture;\n" +
|
||||
|
||||
"varying vec2 textureCoordinate;\n" +
|
||||
"varying vec2 leftTextureCoordinate;\n" +
|
||||
"varying vec2 rightTextureCoordinate;\n" +
|
||||
|
||||
"varying vec2 topTextureCoordinate;\n" +
|
||||
"varying vec2 topLeftTextureCoordinate;\n" +
|
||||
"varying vec2 topRightTextureCoordinate;\n" +
|
||||
|
||||
"varying vec2 bottomTextureCoordinate;\n" +
|
||||
"varying vec2 bottomLeftTextureCoordinate;\n" +
|
||||
"varying vec2 bottomRightTextureCoordinate;\n" +
|
||||
|
||||
"void main() {\n" +
|
||||
"float bottomLeftIntensity = texture2D(sTexture, bottomLeftTextureCoordinate).r;" +
|
||||
"float topRightIntensity = texture2D(sTexture, topRightTextureCoordinate).r;" +
|
||||
"float topLeftIntensity = texture2D(sTexture, topLeftTextureCoordinate).r;" +
|
||||
"float bottomRightIntensity = texture2D(sTexture, bottomRightTextureCoordinate).r;" +
|
||||
"float leftIntensity = texture2D(sTexture, leftTextureCoordinate).r;" +
|
||||
"float rightIntensity = texture2D(sTexture, rightTextureCoordinate).r;" +
|
||||
"float bottomIntensity = texture2D(sTexture, bottomTextureCoordinate).r;" +
|
||||
"float topIntensity = texture2D(sTexture, topTextureCoordinate).r;" +
|
||||
"float centerIntensity = texture2D(sTexture, textureCoordinate).r;" +
|
||||
|
||||
"float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity;" +
|
||||
"float sumTest = step(1.5, pixelIntensitySum);" +
|
||||
"float pixelTest = step(0.01, centerIntensity);" +
|
||||
|
||||
"gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0);" +
|
||||
"}";
|
||||
|
||||
public GlWeakPixelInclusionFilter() {
|
||||
super(FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,58 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlWhiteBalanceFilter extends GlFilter {
|
||||
|
||||
private static final String WHITE_BALANCE_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" uniform lowp sampler2D sTexture;\n" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
|
||||
" \n" +
|
||||
"uniform lowp float temperature;\n" +
|
||||
"uniform lowp float tint;\n" +
|
||||
"\n" +
|
||||
"const lowp vec3 warmFilter = vec3(0.93, 0.54, 0.0);\n" +
|
||||
"\n" +
|
||||
"const mediump mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311);\n" +
|
||||
"const mediump mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702);\n" +
|
||||
"\n" +
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
" lowp vec4 source = texture2D(sTexture, vTextureCoord);\n" +
|
||||
" \n" +
|
||||
" mediump vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint\n" +
|
||||
" yiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226);\n" +
|
||||
" lowp vec3 rgb = YIQtoRGB * yiq;\n" +
|
||||
"\n" +
|
||||
" lowp vec3 processed = vec3(\n" +
|
||||
" (rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature\n" +
|
||||
" (rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))), \n" +
|
||||
" (rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b))));\n" +
|
||||
"\n" +
|
||||
" gl_FragColor = vec4(mix(rgb, processed, temperature), source.a);\n" +
|
||||
"}";
|
||||
|
||||
public GlWhiteBalanceFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, WHITE_BALANCE_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
private float temperature = 5000f;
|
||||
private float tint = 0f;
|
||||
|
||||
public void setTemperature(final float temperature) {
|
||||
this.temperature = temperature < 5000 ? (float) (0.0004 * (temperature - 5000.0)) : (float) (0.00006 * (temperature - 5000.0));
|
||||
}
|
||||
|
||||
public void setTint(final float tint) {
|
||||
this.tint = (float) (tint / 100.0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform1f(getHandle("temperature"), temperature);
|
||||
GLES20.glUniform1f(getHandle("tint"), tint);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,56 @@
|
||||
package com.xypower.gpuv.egl.filter;
|
||||
|
||||
import android.graphics.PointF;
|
||||
import android.opengl.GLES20;
|
||||
|
||||
public class GlZoomBlurFilter extends GlFilter {
|
||||
|
||||
private static final String ZOOM_BLUR_FRAGMENT_SHADER = "" +
|
||||
"precision mediump float;" +
|
||||
" varying vec2 vTextureCoord;\n" +
|
||||
"\n" +
|
||||
"uniform lowp sampler2D sTexture;\n" +
|
||||
"\n" +
|
||||
"uniform highp vec2 blurCenter;\n" +
|
||||
"uniform highp float blurSize;\n" +
|
||||
"\n" +
|
||||
"void main()\n" +
|
||||
"{\n" +
|
||||
" // TODO: Do a more intelligent scaling based on resolution here\n" +
|
||||
" highp vec2 samplingOffset = 1.0/100.0 * (blurCenter - vTextureCoord) * blurSize;\n" +
|
||||
" \n" +
|
||||
" lowp vec4 fragmentColor = texture2D(sTexture, vTextureCoord) * 0.18;\n" +
|
||||
" fragmentColor += texture2D(sTexture, vTextureCoord + samplingOffset) * 0.15;\n" +
|
||||
" fragmentColor += texture2D(sTexture, vTextureCoord + (2.0 * samplingOffset)) * 0.12;\n" +
|
||||
" fragmentColor += texture2D(sTexture, vTextureCoord + (3.0 * samplingOffset)) * 0.09;\n" +
|
||||
" fragmentColor += texture2D(sTexture, vTextureCoord + (4.0 * samplingOffset)) * 0.05;\n" +
|
||||
" fragmentColor += texture2D(sTexture, vTextureCoord - samplingOffset) * 0.15;\n" +
|
||||
" fragmentColor += texture2D(sTexture, vTextureCoord - (2.0 * samplingOffset)) * 0.12;\n" +
|
||||
" fragmentColor += texture2D(sTexture, vTextureCoord - (3.0 * samplingOffset)) * 0.09;\n" +
|
||||
" fragmentColor += texture2D(sTexture, vTextureCoord - (4.0 * samplingOffset)) * 0.05;\n" +
|
||||
" \n" +
|
||||
" gl_FragColor = fragmentColor;\n" +
|
||||
"}\n";
|
||||
|
||||
private PointF blurCenter = new PointF(0.5f, 0.5f);
|
||||
private float blurSize = 1f;
|
||||
|
||||
public GlZoomBlurFilter() {
|
||||
super(DEFAULT_VERTEX_SHADER, ZOOM_BLUR_FRAGMENT_SHADER);
|
||||
}
|
||||
|
||||
public void setBlurCenter(PointF blurCenter) {
|
||||
this.blurCenter = blurCenter;
|
||||
}
|
||||
|
||||
public void setBlurSize(float blurSize) {
|
||||
this.blurSize = blurSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDraw() {
|
||||
GLES20.glUniform2f(getHandle("blurCenter"), blurCenter.x, blurCenter.y);
|
||||
GLES20.glUniform1f(getHandle("blurSize"), blurSize);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,3 @@
|
||||
<resources>
|
||||
<string name="app_name">gpuv</string>
|
||||
</resources>
|
Loading…
Reference in New Issue