You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
MpPreview/app/src/main/java/com/xypower/mppreview/Camera2RawFragment.java

1586 lines
65 KiB
Java

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

package com.xypower.mppreview;
import static com.xypower.mppreview.HdrUtil.generateTimestamp;
import static java.lang.System.loadLibrary;
import android.Manifest;
import android.animation.Animator;
import android.animation.AnimatorInflater;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.SensorManager;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentActivity;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import android.util.Log;
import android.util.Range;
import android.util.Rational;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.OrientationEventListener;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.LinearInterpolator;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
import com.xypower.mppreview.bean.Contants;
import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback;
import com.xypower.mppreview.widget.ErrorDialog;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class Camera2RawFragment extends Fragment {
static {
loadLibrary("mppreview");
}
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
/**
* Request code for camera permissions.
*/
private static final int REQUEST_CAMERA_PERMISSIONS = 1;
/**
* Permissions required to take a picture.
*/
private static final String[] CAMERA_PERMISSIONS = {Manifest.permission.CAMERA, Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,};
/**
* Timeout for the pre-capture sequence.
*/
private static final long PRECAPTURE_TIMEOUT_MS = 1000;
/**
* Tolerance when comparing aspect ratios.
*/
private static final double ASPECT_RATIO_TOLERANCE = 0.005;
/**
* Max preview width that is guaranteed by Camera2 API
*/
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_HEIGHT = 1080;
private static final String TAG = "Camera2RawFragment";
private static final int STATE_CLOSED = 0;
private static final int STATE_OPENED = 1;
private static final int STATE_PREVIEW = 2;
private static final int STATE_WAITING_FOR_3A_CONVERGENCE = 3;
private OrientationEventListener mOrientationListener;
private ExecutorService executorService;
private Button takepic;
private ImageView rorpic;
public static native boolean makeHdr(long exposureTime1, String path1, long exposureTime2, String path2, String outputPath);
public static native boolean makeHdr2(long exposureTime1, String path1, long exposureTime2, String path2, long exposureTime3, String path3, String outputPath);
public static native boolean makeHdr3(long exposureTime1, Bitmap img1, int length1, long exposureTime2, Bitmap img2, int length2, String outputPath);
// public static native boolean decodeDng(ByteBuffer byteBuffer, String outputPath);
private int mExposureComp = MainActivity.ExposureComp;
private Long exposetime;
private Integer sensitivity;
private int pic1 = 0;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events of a
* {@link TextureView}.
*/
private final TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
synchronized (mCameraStateLock) {
mPreviewSize = null;
}
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView mTextureView;
/**
* An additional thread for running tasks that shouldn't block the UI. This is used for all
* callbacks from the {@link CameraDevice} and {@link CameraCaptureSession}s.
*/
private HandlerThread mBackgroundThread;
/**
* A counter for tracking corresponding {@link CaptureRequest}s and {@link CaptureResult}s
* across the {@link CameraCaptureSession} capture callbacks.
*/
private final AtomicInteger mRequestCounter = new AtomicInteger();
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* A lock protecting camera state.
*/
private final Object mCameraStateLock = new Object();
// *********************************************************************************************
// State protected by mCameraStateLock.
//
// The following state is used across both the UI and background threads. Methods with "Locked"
// in the name expect mCameraStateLock to be held while calling.
/**
* ID of the current {@link CameraDevice}.
*/
private String mCameraId;
/**
* A {@link CameraCaptureSession } for camera preview.
*/
private CameraCaptureSession mCaptureSession;
/**
* A reference to the open {@link CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* The {@link Size} of camera preview.
*/
private Size mPreviewSize;
/**
* The {@link CameraCharacteristics} for the currently configured camera device.
*/
private CameraCharacteristics mCharacteristics;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
/**
* A reference counted holder wrapping the {@link ImageReader} that handles JPEG image
* captures. This is used to allow us to clean up the {@link ImageReader} when all background
* tasks using its {@link Image}s have completed.
*/
// private RefCountedAutoCloseable<ImageReader> mJpegImageReader;
/**
* A reference counted holder wrapping the {@link ImageReader} that handles RAW image captures.
* This is used to allow us to clean up the {@link ImageReader} when all background tasks using
* its {@link Image}s have completed.
*/
private RefCountedAutoCloseable<ImageReader> mRawImageReader;
/**
* Whether or not the currently configured camera device is fixed-focus.
*/
private boolean mNoAFRun = false;
/**
* Number of pending user requests to capture a photo.
*/
private int mPendingUserCaptures = 0;
/**
* Request ID to {@link ImageSaverBuilder} mapping for in-progress JPEG captures.
*/
private final TreeMap<Integer, ImageSaverBuilder> mJpegResultQueue = new TreeMap<>();
/**
* Request ID to {@link ImageSaverBuilder} mapping for in-progress RAW captures.
*/
private final TreeMap<Integer, ImageSaverBuilder> mRawResultQueue = new TreeMap<>();
/**
* {@link CaptureRequest.Builder} for the camera preview
*/
private CaptureRequest.Builder mPreviewRequestBuilder;
/**
* The state of the camera device.
*
* @see #mPreCaptureCallback
*/
private int mState = STATE_CLOSED;
/**
* Timer to use with pre-capture sequence to ensure a timely capture if 3A convergence is
* taking too long.
*/
private long mCaptureTimer;
//**********************************************************************************************
/**
* {@link CameraDevice.StateCallback} is called when the currently active {@link CameraDevice}
* changes its state.
*/
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here if
// the TextureView displaying this has been set up.
synchronized (mCameraStateLock) {
mState = STATE_OPENED;
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
// Start the preview session if the TextureView has been set up already.
if (mPreviewSize != null && mTextureView.isAvailable()) {
createCameraPreviewSessionLocked();
}
}
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
synchronized (mCameraStateLock) {
mState = STATE_CLOSED;
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
Log.e(TAG, "Received camera device error: " + error);
synchronized (mCameraStateLock) {
mState = STATE_CLOSED;
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
/**
* This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
* JPEG image is ready to be saved.
*/
private final ImageReader.OnImageAvailableListener mOnJpegImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
// dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader);
}
};
/**
* This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
* RAW image is ready to be saved.
*/
private final ImageReader.OnImageAvailableListener mOnRawImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
}
};
/**
* /**
* A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and
* pre-capture sequence.
*/
private CameraCaptureSession.CaptureCallback mPreCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
synchronized (mCameraStateLock) {
switch (mState) {
case STATE_PREVIEW: {
// We have nothing to do when the camera preview is running normally.
break;
}
case STATE_WAITING_FOR_3A_CONVERGENCE: {
boolean readyToCapture = true;
if (!mNoAFRun) {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
//获取自动曝光时间
exposetime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
//获取自动ISO
sensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);
if (afState == null) {
break;
}
readyToCapture = (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
}
if (!isLegacyLocked()) {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
if (aeState == null || awbState == null) {
break;
}
readyToCapture = readyToCapture && aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED && awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED;
}
if (!readyToCapture && hitTimeoutLocked()) {
Log.w(TAG, "Timed out waiting for pre-capture sequence to complete.");
readyToCapture = true;
}
if (readyToCapture && mPendingUserCaptures > 0) {
while (mPendingUserCaptures > 0) {
captureStillPictureLocked(exposetime, sensitivity);
mPendingUserCaptures--;
}
mState = STATE_PREVIEW;
}
}
}
}
}
@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
process(result);
}
};
private final CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
synchronized (mCameraStateLock) {
String currentDateTime = generateTimestamp();
File directory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
File rawFile = new File(directory, "RAW_" + currentDateTime + ".dng");
// File jpegFile = new File(directory, "JPEG_" + currentDateTime + ".png");
// ImageSaverBuilder jpegBuilder;
ImageSaverBuilder rawBuilder;
int requestId = (int) request.getTag();
// jpegBuilder = mJpegResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId);
// if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
if (rawBuilder != null) rawBuilder.setFile(rawFile);
}
}
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
int requestId = (int) request.getTag();
ImageSaverBuilder jpegBuilder;
ImageSaverBuilder rawBuilder;
StringBuilder sb = new StringBuilder();
synchronized (mCameraStateLock) {
// jpegBuilder = mJpegResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId);
// if (jpegBuilder != null) {
// jpegBuilder.setResult(result);
// sb.append("Saving JPEG as: ");
// sb.append(jpegBuilder.getSaveLocation());
// }
if (rawBuilder != null) {
rawBuilder.setResult(result);
if (rawBuilder != null) sb.append(", ");
sb.append("Saving RAW as: ");
sb.append(rawBuilder.getSaveLocation());
}
// handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue);
handleCompletionLocked(requestId, rawBuilder, mRawResultQueue);
finishedCaptureLocked();
}
// showToast(sb.toString());
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
int requestId = (int) request.getTag();
synchronized (mCameraStateLock) {
mJpegResultQueue.remove(requestId);
mRawResultQueue.remove(requestId);
finishedCaptureLocked();
}
// showToast("Capture failed!");
}
};
private final Handler mMessageHandler = new Handler(Looper.getMainLooper()) {
@Override
public void handleMessage(Message msg) {
Activity activity = getActivity();
if (activity != null) {
Toast.makeText(activity, (String) msg.obj, Toast.LENGTH_SHORT).show();
}
}
};
public static Camera2RawFragment newInstance() {
return new Camera2RawFragment();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera2_raw, container, false);
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
Bundle arguments = getArguments();
if (arguments != null) {
pic1 = arguments.getInt(Contants.HDRNUM);
}
mOrientationListener = new OrientationEventListener(getActivity(), SensorManager.SENSOR_DELAY_NORMAL) {
@Override
public void onOrientationChanged(int orientation) {
if (mTextureView != null && mTextureView.isAvailable()) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
};
takepic = view.findViewById(R.id.takepic);
rorpic = view.findViewById(R.id.rorpic);
takepic.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
takepic.setVisibility(View.INVISIBLE);
rorpic.setVisibility(View.VISIBLE);
Animation animation = AnimationUtils.loadAnimation(getContext(), R.anim.r);
animation.setInterpolator(new LinearInterpolator());
rorpic.startAnimation(animation);
takePicture();
}
});
executorService = Executors.newFixedThreadPool(2);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
openCamera();
if (mTextureView.isAvailable()) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
if (mOrientationListener != null && mOrientationListener.canDetectOrientation()) {
mOrientationListener.enable();
}
}
@Override
public void onPause() {
if (mOrientationListener != null) {
mOrientationListener.disable();
}
closeCamera();
stopBackgroundThread();
super.onPause();
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSIONS) {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
showMissingPermissionError();
return;
}
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private void restartFragment() {
final FragmentActivity activity = getActivity();
mBackgroundHandler.postDelayed(new Runnable() {
@Override
public void run() {
activity.getSupportFragmentManager().beginTransaction().replace(R.id.container, Camera2RawFragment.newInstance()).commit();
}
}, 100);
}
public void doFocus() {
int previewWidth = mTextureView.getWidth();
int previewHeight = mTextureView.getHeight();
RectF previewRect = new RectF(0, 0, previewWidth, previewHeight);
CoordinateTransformer cf = new CoordinateTransformer(mCharacteristics, previewRect);
RectF rect = cf.toCameraSpace(previewRect);
MeteringRectangle mr = new MeteringRectangle(new Rect((int) rect.left, (int) rect.top, (int) rect.right, (int) rect.bottom), 1000);
startControlAFRequest(mr, mPreCaptureCallback);
}
public void startControlAFRequest(MeteringRectangle rect, CameraCaptureSession.CaptureCallback captureCallback) {
MeteringRectangle[] rectangle = new MeteringRectangle[]{rect};
// Focus Mode AUTO
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_AUTO);
//AE
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, rectangle);
//AF 此处AF和AE用的同一个rect, 实际AE矩形面积比AF稍大, 这样测光效果更好
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, rectangle);
try {
// AE/AF区域设置通过setRepeatingRequest不断发请求
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), captureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
//触发对焦
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
try {
//触发对焦通过capture发送请求, 因为用户点击屏幕后只需触发一次对焦
mCaptureSession.capture(mPreviewRequestBuilder.build(), captureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Sets up state related to camera that is needed before opening a {@link CameraDevice}.
*/
private boolean setUpCameraOutputs() {
Activity activity = getActivity();
assert activity != null;
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
if (manager == null) {
ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog");
return false;
}
try {
// Find a CameraDevice that supports RAW captures, and configure state.
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We only use a camera that supports RAW in this sample.
if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
continue;
}
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// For still image captures, we use the largest available size.
Size[] outputSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
outputSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
Size largestRaw = Collections.max(Arrays.asList(outputSizes), new CompareSizesByArea());
// Size largestRaw = Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)).get(1);
synchronized (mCameraStateLock) {
// Set up ImageReaders for JPEG and RAW outputs. Place these in a reference
// counted wrapper to ensure they are only closed when all background tasks
// using them are finished.
// if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
// mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
// }
// mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler);
if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
mRawImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
}
mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler);
mCharacteristics = characteristics;
mCameraId = cameraId;
}
return true;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
// If we found no suitable cameras for capturing RAW, warn the user.
ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog");
return false;
}
/**
* Opens the camera specified by {@link #mCameraId}.
*/
@SuppressWarnings("MissingPermission")
private void openCamera() {
if (!setUpCameraOutputs()) {
return;
}
if (!hasAllPermissionsGranted()) {
requestCameraPermissions();
return;
}
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
Handler backgroundHandler;
synchronized (mCameraStateLock) {
backgroundHandler = mBackgroundHandler;
}
manager.openCamera(mCameraId, mStateCallback, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
/**
* Requests permissions necessary to use camera and save pictures.
*/
private void requestCameraPermissions() {
}
/**
* Tells whether all the necessary permissions are granted to this app.
*
* @return True if all the required permissions are granted.
*/
private boolean hasAllPermissionsGranted() {
return true;
}
/**
* Gets whether you should show UI with rationale for requesting the permissions.
*
* @return True if the UI should be shown.
*/
private boolean shouldShowRationale() {
return false;
}
/**
* Shows that this app really needs the permission and finishes the app.
*/
private void showMissingPermissionError() {
Activity activity = getActivity();
if (activity != null) {
Toast.makeText(activity, R.string.request_permission, Toast.LENGTH_SHORT).show();
activity.finish();
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
synchronized (mCameraStateLock) {
// Reset state and clean up resources used by the camera.
// Note: After calling this, the ImageReaders will be closed after any background
// tasks saving Images from these readers have been completed.
mPendingUserCaptures = 0;
mState = STATE_CLOSED;
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
// if (null != mJpegImageReader) {
// mJpegImageReader.close();
// mJpegImageReader = null;
// }
if (null != mRawImageReader) {
mRawImageReader.close();
mRawImageReader = null;
}
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
synchronized (mCameraStateLock) {
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
synchronized (mCameraStateLock) {
mBackgroundHandler = null;
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Creates a new {@link CameraCaptureSession} for camera preview.
* <p/>
* Call this only with {@link #mCameraStateLock} held.
*/
private void createCameraPreviewSessionLocked() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
synchronized (mCameraStateLock) {
// The camera is already closed
if (null == mCameraDevice) {
return;
}
try {
setup3AControlsLocked(mPreviewRequestBuilder);
if (mExposureComp != 0) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, mExposureComp);
}
// Finally, we start displaying the camera preview.
cameraCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
mState = STATE_PREVIEW;
} catch (CameraAccessException | IllegalStateException e) {
e.printStackTrace();
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
}
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
// showToast("Failed to configure camera.");
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Configure the given {@link CaptureRequest.Builder} to use auto-focus, auto-exposure, and
* auto-white-balance controls if available.
* <p/>
* Call this only with {@link #mCameraStateLock} held.
*
* @param builder the builder to configure.
*/
private void setup3AControlsLocked(CaptureRequest.Builder builder) {
// parameters.setPreviewFpsRange(10,30);
// parameters.setPreviewFrameRate(10);
Range<Integer>[] fpsRanges = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
Range<Integer> fpsRange = new Range<>(5, 15);
builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
// Enable auto-magical 3A run by camera device
builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
Float minFocusDist = mCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
// If MINIMUM_FOCUS_DISTANCE is 0, lens is fixed-focus and we need to skip the AF run.
mNoAFRun = (minFocusDist == null || minFocusDist == 0);
if (!mNoAFRun) {
// If there is a "continuous picture" mode available, use it, otherwise default to AUTO.
if (contains(mCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES), CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
} else {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
}
// MeteringRectangle mr = new MeteringRectangle(rect, 1000);
// builder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[] {mr});
// builder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {mr});
// builder.set(CaptureRequest.CONTROL_AF_REGIONS, arrayOf(MeteringRectangle(rect, 1000)))
// set(CaptureRequest.CONTROL_AE_REGIONS, arrayOf(MeteringRectangle(rect, 1000)))
}
// If there is an auto-magical flash control mode available, use it, otherwise default to
// the "on" mode, which is guaranteed to always be available.
if (contains(mCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES), CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH)) {
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
} else {
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
}
// If there is an auto-magical white balance control mode available, use it.
if (contains(mCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES), CaptureRequest.CONTROL_AWB_MODE_DAYLIGHT)) {
// Allow AWB to run auto-magically if this device supports this
builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_DAYLIGHT);
} else if (contains(mCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES), CaptureRequest.CONTROL_AWB_MODE_AUTO)) {
// Allow AWB to run auto-magically if this device supports this
builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
}
}
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
synchronized (mCameraStateLock) {
if (null == mTextureView || null == activity) {
return;
}
StreamConfigurationMap map = null;
if (map == null) {
try {
map = mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
} catch (Exception ex) {
ex.printStackTrace();
}
}
// For still image captures, we always use the largest available size.
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
// Find the rotation of the device relative to the native device orientation.
int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
// Find the rotation of the device relative to the camera sensor's orientation.
int totalRotation = sensorToDeviceRotation(mCharacteristics, deviceRotation);
// Swap the view dimensions for calculation as needed if they are rotated relative to
// the sensor.
boolean swappedDimensions = totalRotation == 90 || totalRotation == 270;
int rotatedViewWidth = viewWidth;
int rotatedViewHeight = viewHeight;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedViewWidth = viewHeight;
rotatedViewHeight = viewWidth;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
// Preview should not be larger than display size and 1080p.
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Find the best preview size for these view dimensions and configured JPEG size.
// Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg);
Size previewSize = new Size(3840, 2160);
if (swappedDimensions) {
mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
} else {
mTextureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
}
// Find rotation of device in degrees (reverse device orientation for front-facing
// cameras).
int rotation = (mCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) ? (360 + ORIENTATIONS.get(deviceRotation)) % 360 : (360 - ORIENTATIONS.get(deviceRotation)) % 360;
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
// if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
// bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
// matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
// float scale = Math.max((float) viewHeight / previewSize.getHeight(), (float) viewWidth / previewSize.getWidth());
// matrix.postScale(scale, scale, centerX, centerY);
//
// }
if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
if (rotation == 0) {
matrix.postScale(1, 1);
} else if (rotation == 90) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scaleh = (float) viewHeight / previewSize.getHeight();
float scalew = (float) viewWidth / previewSize.getWidth();
matrix.postScale(scalew, scaleh, centerX, centerY);
} else if (rotation == 180) {
matrix.postScale(1, 1);
} else if (rotation == 270) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scaleh = (float) viewHeight / previewSize.getHeight();
float scalew = (float) viewWidth / previewSize.getWidth();
matrix.postScale(scaleh, scalew, centerX, centerY);
}
}
matrix.postRotate(rotation, centerX, centerY);
mTextureView.setTransform(matrix);
// Start or restart the active capture session if the preview was initialized or
// if its aspect ratio changed significantly.
if (mPreviewSize == null || !checkAspectsEqual(previewSize, mPreviewSize)) {
mPreviewSize = previewSize;
if (mState != STATE_CLOSED) {
createCameraPreviewSessionLocked();
}
}
}
}
public void takePicture() {
synchronized (mCameraStateLock) {
mPendingUserCaptures++;
if (mState != STATE_PREVIEW) {
return;
}
try {
if (!isLegacyLocked()) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
}
if (!mNoAFRun) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
}
mState = STATE_WAITING_FOR_3A_CONVERGENCE;
startTimerLocked();
mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
private void captureStillPictureLocked(long exposureTime, int sensitivity) {
try {
final Activity activity = getActivity();
if (null == activity || null == mCameraDevice) {
return;
}
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
// captureBuilder.addTarget(mJpegImageReader.get().getSurface());
captureBuilder.addTarget(mRawImageReader.get().getSurface());
// Use the same AE and AF modes as the preview.
setup3AControlsLocked(captureBuilder);
if (mExposureComp != 0) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, mExposureComp);
}
// Set orientation.
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, sensorToDeviceRotation(mCharacteristics, rotation));
Range<Integer> range = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
Rational rational = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
double step = rational.doubleValue();
captureBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
if (pic1 < 21) {
// mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP)
ArrayList<PngPhotoBean> mlist = new ArrayList<>();
List<CaptureRequest> requests = new ArrayList<>();
long v = 0;
ImageSaver.ImagePair imagePair = new ImageSaver.ImagePair(2);
ImageSaver.ImagePairRunnable runnable = new ImageSaver.ImagePairRunnable(imagePair) {
@Override
public void run() {
final List<ImageSaver.ImageInfo> images = imagePair.getImages();
final String outputPath = "/sdcard/DCIM/";
new Thread(new Runnable() {
@Override
public void run() {
if (images.size() != 2) {
return;
}
ImageSaver.ImageInfo img1 = images.get(0);
ImageSaver.ImageInfo img2 = images.get(1);
Log.d("开始Hdr处理", "strat");
String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp";
boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
img1.bitmap.recycle();
img2.bitmap.recycle();
img1 = null;
img2 = null;
images.clear();
Log.d("结束Hdr处理", "end");
if (b) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
// 在主线程中执行UI更新
// ...
takepic.setVisibility(View.VISIBLE);
rorpic.clearAnimation();
rorpic.setVisibility(View.GONE);
showToast("HDR拍摄成功");
}
});
}
}
}).start();
}
};
imagePair.setRunnable(runnable);
for (int idx = 0; idx < 2; idx++) {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
if (idx == 0) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// 设置曝光时间例如设置为1000微秒
// long exposureTime = 1000 000000L; // 1000微秒
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
v = exposureTime;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
}
}
if (idx == 1) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
if (pic1 <= 0) {
v = exposureTime * 7;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
} else {
v = exposureTime * pic1;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
}
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100);
}
}
CaptureRequest request = captureBuilder.build();
// ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
rawBuilder.setImagePair(imagePair);
rawBuilder.setCallback(new CompleteCallback() {
@Override
public void onResult() {
showToast("HDR拍摄成功");
}
});
rawBuilder.setList(mlist);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
requests.add(request);
}
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
} else {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
// captureBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
captureBuilder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.CONTROL_POST_RAW_SENSITIVITY_BOOST, 100);
captureBuilder.set(CaptureRequest.DISTORTION_CORRECTION_MODE, CaptureRequest.DISTORTION_CORRECTION_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR);
CaptureRequest request = captureBuilder.build();
// Create an ImageSaverBuilder in which to collect results, and add it to the queue
// of active requests.
// ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void finishedCaptureLocked() {
try {
if (!mNoAFRun) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void dequeueAndSaveImage(TreeMap<Integer, ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
synchronized (mCameraStateLock) {
Map.Entry<Integer, ImageSaverBuilder> entry = null;
if (pendingQueue != null) {
for (Map.Entry<Integer, ImageSaverBuilder> item : pendingQueue.entrySet()) {
ImageSaverBuilder value = item.getValue();
if (value.mImage == null) {
entry = item;
break;
}
}
}
if (entry == null) {
return;
}
ImageSaverBuilder builder = entry.getValue();
if (reader == null || reader.getAndRetain() == null) {
Log.e(TAG, "Paused the activity before we could save the image," + " ImageReader already closed.");
pendingQueue.remove(entry.getKey());
return;
}
Image image;
try {
image = reader.get().acquireNextImage();
} catch (IllegalStateException e) {
Log.e(TAG, "Too many images queued for saving, dropping image for request: " + entry.getKey());
pendingQueue.remove(entry.getKey());
return;
}
builder.setRefCountedReader(reader).setImage(image);
handleCompletionLocked(entry.getKey(), builder, pendingQueue);
}
}
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
/**
* A wrapper for an {@link AutoCloseable} object that implements reference counting to allow
* for resource management.
*/
public static class RefCountedAutoCloseable<T extends AutoCloseable> implements AutoCloseable {
private T mObject;
private long mRefCount = 0;
/**
* Wrap the given object.
*
* @param object an object to wrap.
*/
public RefCountedAutoCloseable(T object) {
if (object == null) throw new NullPointerException();
mObject = object;
}
/**
* Increment the reference count and return the wrapped object.
*
* @return the wrapped object, or null if the object has been released.
*/
public synchronized T getAndRetain() {
if (mRefCount < 0) {
return null;
}
mRefCount++;
return mObject;
}
/**
* Return the wrapped object.
*
* @return the wrapped object, or null if the object has been released.
*/
public synchronized T get() {
return mObject;
}
/**
* Decrement the reference count and release the wrapped object if there are no other
* users retaining this object.
*/
@Override
public synchronized void close() {
if (mRefCount >= 0) {
mRefCount--;
if (mRefCount < 0) {
try {
mObject.close();
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
mObject = null;
}
}
}
}
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that
* is at least as large as the respective texture view size, and that is at most as large as the
* respective max size, and whose aspect ratio matches with the specified value. If such size
* doesn't exist, choose the largest one that is at most as large as the respective max size,
* and whose aspect ratio matches with the specified value.
*
* @param choices The list of sizes that the camera supports for the intended output
* class
* @param textureViewWidth The width of the texture view relative to sensor coordinate
* @param textureViewHeight The height of the texture view relative to sensor coordinate
* @param maxWidth The maximum width that can be chosen
* @param maxHeight The maximum height that can be chosen
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
// Collect the supported resolutions that are smaller than the preview Surface
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
// largest of those not big enough.
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
private static boolean contains(int[] modes, int mode) {
if (modes == null) {
return false;
}
for (int i : modes) {
if (i == mode) {
return true;
}
}
return false;
}
private static boolean checkAspectsEqual(Size a, Size b) {
double aAspect = a.getWidth() / (double) a.getHeight();
double bAspect = b.getWidth() / (double) b.getHeight();
return Math.abs(aAspect - bAspect) <= ASPECT_RATIO_TOLERANCE;
}
/**
* Rotation need to transform from the camera sensor orientation to the device's current
* orientation.
*
* @param c the {@link CameraCharacteristics} to query for the camera sensor
* orientation.
* @param deviceOrientation the current device orientation relative to the native device
* orientation.
* @return the total rotation from the sensor orientation to the current device orientation.
*/
private static int sensorToDeviceRotation(CameraCharacteristics c, int deviceOrientation) {
int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Get device orientation in degrees
deviceOrientation = ORIENTATIONS.get(deviceOrientation);
// Reverse device orientation for front-facing cameras
if (c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
deviceOrientation = -deviceOrientation;
}
// Calculate desired JPEG orientation relative to camera orientation to make
// the image upright relative to the device orientation
return (sensorOrientation - deviceOrientation + 360) % 360;
}
/**
* Shows a {@link Toast} on the UI thread.
*
* @param text The message to show.
*/
public void showToast(String text) {
// We show a Toast by sending request message to mMessageHandler. This makes sure that the
// Toast is shown on the UI thread.
Message message = Message.obtain();
message.obj = text;
mMessageHandler.sendMessage(message);
}
/**
* If the given request has been completed, remove it from the queue of active requests and
* send an {@link ImageSaver} with the results from this request to a background thread to
* save a file.
* <p/>
* Call this only with {@link #mCameraStateLock} held.
*
* @param requestId the ID of the {@link CaptureRequest} to handle.
* @param queue the queue to remove this request from, if completed.
*/
private void handleCompletionLocked(int requestId, ImageSaverBuilder builder, TreeMap<Integer, ImageSaverBuilder> queue) {
if (builder == null) {
return;
}
ImageSaver saver = builder.buildIfComplete();
System.out.println();
if (saver != null) {
queue.remove(requestId);
// AsyncTaskWithCustomThreadPool.THREAD_POOL_EXECUTOR.execute(saver);
executorService.execute(saver);
}
}
/**
* Check if we are using a device that only supports the LEGACY hardware level.
* <p/>
* Call this only with {@link #mCameraStateLock} held.
*
* @return true if this is a legacy device.
*/
private boolean isLegacyLocked() {
return mCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
/**
* Start the timer for the pre-capture sequence.
* <p/>
* Call this only with {@link #mCameraStateLock} held.
*/
private void startTimerLocked() {
mCaptureTimer = SystemClock.elapsedRealtime();
}
/**
* Check if the timer for the pre-capture sequence has been hit.
* <p/>
* Call this only with {@link #mCameraStateLock} held.
*
* @return true if the timeout occurred.
*/
private boolean hitTimeoutLocked() {
return (SystemClock.elapsedRealtime() - mCaptureTimer) > PRECAPTURE_TIMEOUT_MS;
}
private static int execHdr(Context context, long exposureTime1, String path1, long exposureTime2, String path2, String outputPath, String tmpFilePath) {
ApplicationInfo applicationInfo = null;
try {
applicationInfo = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_SHARED_LIBRARY_FILES);
} catch (Exception ex) {
}
String exeFilePath = applicationInfo.nativeLibraryDir + '/' + "libhdr.so";
File hdrpFile = new File(exeFilePath);
if (!hdrpFile.exists()) {
return -1;
}
String cmd = exeFilePath + " " + outputPath + " ";
cmd += tmpFilePath + " ";
cmd += Long.toString(exposureTime1) + " ";
cmd += path1 + " ";
cmd += Long.toString(exposureTime2) + " ";
cmd += path2 + " ";
String[] params = new String[]{""};
File workDir = context.getFilesDir();
int exitCode = 0;
try {
Process process = Runtime.getRuntime().exec(cmd, params, workDir.getAbsoluteFile());
// Intrinsics.checkNotNullExpressionValue(process, "process");
InputStream inputStream = process.getInputStream();
BufferedReader reader = new BufferedReader((Reader)(new InputStreamReader(inputStream)));
// StringBuilder stringBuilder = new StringBuilder();
while(true) {
String line = reader.readLine();
if (line == null) {
exitCode = process.exitValue();
reader.close();
process.destroy();
break;
}
if (line != null) {
// this.outputCallback.invoke(var5);
Log.d("HDRPlus", line);
// stringBuilder.append(line);
// stringBuilder.append("\r\n");
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
return exitCode;
}
}