|
|
|
|
package com.xypower.mppreview;
|
|
|
|
|
|
|
|
|
|
import static java.lang.System.loadLibrary;
|
|
|
|
|
|
|
|
|
|
import android.Manifest;
|
|
|
|
|
import android.app.Activity;
|
|
|
|
|
import android.app.Dialog;
|
|
|
|
|
import android.content.Context;
|
|
|
|
|
import android.content.DialogInterface;
|
|
|
|
|
import android.content.pm.PackageManager;
|
|
|
|
|
import android.graphics.ImageFormat;
|
|
|
|
|
import android.graphics.Matrix;
|
|
|
|
|
import android.graphics.Point;
|
|
|
|
|
import android.graphics.Rect;
|
|
|
|
|
import android.graphics.RectF;
|
|
|
|
|
import android.graphics.SurfaceTexture;
|
|
|
|
|
import android.hardware.SensorManager;
|
|
|
|
|
import android.hardware.camera2.CameraAccessException;
|
|
|
|
|
import android.hardware.camera2.CameraCaptureSession;
|
|
|
|
|
import android.hardware.camera2.CameraCharacteristics;
|
|
|
|
|
import android.hardware.camera2.CameraDevice;
|
|
|
|
|
import android.hardware.camera2.CameraManager;
|
|
|
|
|
import android.hardware.camera2.CameraMetadata;
|
|
|
|
|
import android.hardware.camera2.CaptureFailure;
|
|
|
|
|
import android.hardware.camera2.CaptureRequest;
|
|
|
|
|
import android.hardware.camera2.CaptureResult;
|
|
|
|
|
import android.hardware.camera2.DngCreator;
|
|
|
|
|
import android.hardware.camera2.TotalCaptureResult;
|
|
|
|
|
import android.hardware.camera2.params.MeteringRectangle;
|
|
|
|
|
import android.hardware.camera2.params.StreamConfigurationMap;
|
|
|
|
|
import android.media.Image;
|
|
|
|
|
import android.media.ImageReader;
|
|
|
|
|
import android.os.AsyncTask;
|
|
|
|
|
import android.os.Bundle;
|
|
|
|
|
|
|
|
|
|
import androidx.appcompat.app.AlertDialog;
|
|
|
|
|
import androidx.fragment.app.DialogFragment;
|
|
|
|
|
import androidx.fragment.app.Fragment;
|
|
|
|
|
import androidx.fragment.app.FragmentActivity;
|
|
|
|
|
|
|
|
|
|
import android.os.Environment;
|
|
|
|
|
import android.os.Handler;
|
|
|
|
|
import android.os.HandlerThread;
|
|
|
|
|
import android.os.Looper;
|
|
|
|
|
import android.os.Message;
|
|
|
|
|
import android.os.SystemClock;
|
|
|
|
|
import android.util.Log;
|
|
|
|
|
import android.util.Range;
|
|
|
|
|
import android.util.Rational;
|
|
|
|
|
import android.util.Size;
|
|
|
|
|
import android.util.SparseIntArray;
|
|
|
|
|
import android.view.LayoutInflater;
|
|
|
|
|
import android.view.OrientationEventListener;
|
|
|
|
|
import android.view.Surface;
|
|
|
|
|
import android.view.TextureView;
|
|
|
|
|
import android.view.View;
|
|
|
|
|
import android.view.ViewGroup;
|
|
|
|
|
import android.widget.Button;
|
|
|
|
|
import android.widget.EditText;
|
|
|
|
|
import android.widget.Toast;
|
|
|
|
|
|
|
|
|
|
import java.io.File;
|
|
|
|
|
import java.io.FileOutputStream;
|
|
|
|
|
import java.io.IOException;
|
|
|
|
|
import java.io.OutputStream;
|
|
|
|
|
import java.nio.ByteBuffer;
|
|
|
|
|
import java.text.SimpleDateFormat;
|
|
|
|
|
import java.util.ArrayList;
|
|
|
|
|
import java.util.Arrays;
|
|
|
|
|
import java.util.Collections;
|
|
|
|
|
import java.util.Comparator;
|
|
|
|
|
import java.util.Date;
|
|
|
|
|
import java.util.List;
|
|
|
|
|
import java.util.Locale;
|
|
|
|
|
import java.util.Map;
|
|
|
|
|
import java.util.TreeMap;
|
|
|
|
|
import java.util.concurrent.Semaphore;
|
|
|
|
|
import java.util.concurrent.TimeUnit;
|
|
|
|
|
import java.util.concurrent.atomic.AtomicInteger;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A simple {@link Fragment} subclass.
|
|
|
|
|
* Use the {@link Camera2RawFragment#newInstance} factory method to
|
|
|
|
|
* create an instance of this fragment.
|
|
|
|
|
*/
|
|
|
|
|
public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
private static int covertNum;
|
|
|
|
|
|
|
|
|
|
static {
|
|
|
|
|
loadLibrary("mppreview");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Conversion from screen rotation to JPEG orientation.
|
|
|
|
|
*/
|
|
|
|
|
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
static {
|
|
|
|
|
ORIENTATIONS.append(Surface.ROTATION_0, 0);
|
|
|
|
|
ORIENTATIONS.append(Surface.ROTATION_90, 90);
|
|
|
|
|
ORIENTATIONS.append(Surface.ROTATION_180, 180);
|
|
|
|
|
ORIENTATIONS.append(Surface.ROTATION_270, 270);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Request code for camera permissions.
|
|
|
|
|
*/
|
|
|
|
|
private static final int REQUEST_CAMERA_PERMISSIONS = 1;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Permissions required to take a picture.
|
|
|
|
|
*/
|
|
|
|
|
private static final String[] CAMERA_PERMISSIONS = {Manifest.permission.CAMERA, Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,};
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Timeout for the pre-capture sequence.
|
|
|
|
|
*/
|
|
|
|
|
private static final long PRECAPTURE_TIMEOUT_MS = 1000;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Tolerance when comparing aspect ratios.
|
|
|
|
|
*/
|
|
|
|
|
private static final double ASPECT_RATIO_TOLERANCE = 0.005;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Max preview width that is guaranteed by Camera2 API
|
|
|
|
|
*/
|
|
|
|
|
private static final int MAX_PREVIEW_WIDTH = 1920;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Max preview height that is guaranteed by Camera2 API
|
|
|
|
|
*/
|
|
|
|
|
private static final int MAX_PREVIEW_HEIGHT = 1080;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Tag for the {@link Log}.
|
|
|
|
|
*/
|
|
|
|
|
private static final String TAG = "Camera2RawFragment";
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Camera state: Device is closed.
|
|
|
|
|
*/
|
|
|
|
|
private static final int STATE_CLOSED = 0;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Camera state: Device is opened, but is not capturing.
|
|
|
|
|
*/
|
|
|
|
|
private static final int STATE_OPENED = 1;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Camera state: Showing camera preview.
|
|
|
|
|
*/
|
|
|
|
|
private static final int STATE_PREVIEW = 2;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Camera state: Waiting for 3A convergence before capturing a photo.
|
|
|
|
|
*/
|
|
|
|
|
private static final int STATE_WAITING_FOR_3A_CONVERGENCE = 3;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* An {@link OrientationEventListener} used to determine when device rotation has occurred.
|
|
|
|
|
* This is mainly necessary for when the device is rotated by 180 degrees, in which case
|
|
|
|
|
* onCreate or onConfigurationChanged is not called as the view dimensions remain the same,
|
|
|
|
|
* but the orientation of the has changed, and thus the preview rotation must be updated.
|
|
|
|
|
*/
|
|
|
|
|
private OrientationEventListener mOrientationListener;
|
|
|
|
|
private static ArrayList<PngPhotoBean> list = new ArrayList<>();//用来存储已拍照的照片名称
|
|
|
|
|
|
|
|
|
|
public static native boolean makeHdr(long exposureTime1, String path1, long exposureTime2, String path2, String outputPath);
|
|
|
|
|
|
|
|
|
|
public static native boolean makeHdr2(long exposureTime1, String path1, long exposureTime2, String path2, long exposureTime3, String path3, String outputPath);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private int mExposureComp = MainActivity.ExposureComp;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events of a
|
|
|
|
|
* {@link TextureView}.
|
|
|
|
|
*/
|
|
|
|
|
private final TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
|
|
|
|
|
configureTransform(width, height);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
|
|
|
|
|
configureTransform(width, height);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mPreviewSize = null;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* An {@link AutoFitTextureView} for camera preview.
|
|
|
|
|
*/
|
|
|
|
|
private AutoFitTextureView mTextureView;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* An additional thread for running tasks that shouldn't block the UI. This is used for all
|
|
|
|
|
* callbacks from the {@link CameraDevice} and {@link CameraCaptureSession}s.
|
|
|
|
|
*/
|
|
|
|
|
private HandlerThread mBackgroundThread;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A counter for tracking corresponding {@link CaptureRequest}s and {@link CaptureResult}s
|
|
|
|
|
* across the {@link CameraCaptureSession} capture callbacks.
|
|
|
|
|
*/
|
|
|
|
|
private final AtomicInteger mRequestCounter = new AtomicInteger();
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
|
|
|
|
|
*/
|
|
|
|
|
private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A lock protecting camera state.
|
|
|
|
|
*/
|
|
|
|
|
private final Object mCameraStateLock = new Object();
|
|
|
|
|
|
|
|
|
|
// *********************************************************************************************
|
|
|
|
|
// State protected by mCameraStateLock.
|
|
|
|
|
//
|
|
|
|
|
// The following state is used across both the UI and background threads. Methods with "Locked"
|
|
|
|
|
// in the name expect mCameraStateLock to be held while calling.
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* ID of the current {@link CameraDevice}.
|
|
|
|
|
*/
|
|
|
|
|
private String mCameraId;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A {@link CameraCaptureSession } for camera preview.
|
|
|
|
|
*/
|
|
|
|
|
private CameraCaptureSession mCaptureSession;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A reference to the open {@link CameraDevice}.
|
|
|
|
|
*/
|
|
|
|
|
private CameraDevice mCameraDevice;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* The {@link Size} of camera preview.
|
|
|
|
|
*/
|
|
|
|
|
private Size mPreviewSize;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* The {@link CameraCharacteristics} for the currently configured camera device.
|
|
|
|
|
*/
|
|
|
|
|
private CameraCharacteristics mCharacteristics;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A {@link Handler} for running tasks in the background.
|
|
|
|
|
*/
|
|
|
|
|
private Handler mBackgroundHandler;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A reference counted holder wrapping the {@link ImageReader} that handles JPEG image
|
|
|
|
|
* captures. This is used to allow us to clean up the {@link ImageReader} when all background
|
|
|
|
|
* tasks using its {@link Image}s have completed.
|
|
|
|
|
*/
|
|
|
|
|
// private RefCountedAutoCloseable<ImageReader> mJpegImageReader;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A reference counted holder wrapping the {@link ImageReader} that handles RAW image captures.
|
|
|
|
|
* This is used to allow us to clean up the {@link ImageReader} when all background tasks using
|
|
|
|
|
* its {@link Image}s have completed.
|
|
|
|
|
*/
|
|
|
|
|
private RefCountedAutoCloseable<ImageReader> mRawImageReader;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Whether or not the currently configured camera device is fixed-focus.
|
|
|
|
|
*/
|
|
|
|
|
private boolean mNoAFRun = false;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Number of pending user requests to capture a photo.
|
|
|
|
|
*/
|
|
|
|
|
private int mPendingUserCaptures = 0;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Request ID to {@link ImageSaver.ImageSaverBuilder} mapping for in-progress JPEG captures.
|
|
|
|
|
*/
|
|
|
|
|
private final TreeMap<Integer, ImageSaver.ImageSaverBuilder> mJpegResultQueue = new TreeMap<>();
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Request ID to {@link ImageSaver.ImageSaverBuilder} mapping for in-progress RAW captures.
|
|
|
|
|
*/
|
|
|
|
|
private final TreeMap<Integer, ImageSaver.ImageSaverBuilder> mRawResultQueue = new TreeMap<>();
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* {@link CaptureRequest.Builder} for the camera preview
|
|
|
|
|
*/
|
|
|
|
|
private CaptureRequest.Builder mPreviewRequestBuilder;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* The state of the camera device.
|
|
|
|
|
*
|
|
|
|
|
* @see #mPreCaptureCallback
|
|
|
|
|
*/
|
|
|
|
|
private int mState = STATE_CLOSED;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Timer to use with pre-capture sequence to ensure a timely capture if 3A convergence is
|
|
|
|
|
* taking too long.
|
|
|
|
|
*/
|
|
|
|
|
private long mCaptureTimer;
|
|
|
|
|
|
|
|
|
|
//**********************************************************************************************
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* {@link CameraDevice.StateCallback} is called when the currently active {@link CameraDevice}
|
|
|
|
|
* changes its state.
|
|
|
|
|
*/
|
|
|
|
|
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onOpened(CameraDevice cameraDevice) {
|
|
|
|
|
// This method is called when the camera is opened. We start camera preview here if
|
|
|
|
|
// the TextureView displaying this has been set up.
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mState = STATE_OPENED;
|
|
|
|
|
mCameraOpenCloseLock.release();
|
|
|
|
|
mCameraDevice = cameraDevice;
|
|
|
|
|
|
|
|
|
|
// Start the preview session if the TextureView has been set up already.
|
|
|
|
|
if (mPreviewSize != null && mTextureView.isAvailable()) {
|
|
|
|
|
createCameraPreviewSessionLocked();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onDisconnected(CameraDevice cameraDevice) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mState = STATE_CLOSED;
|
|
|
|
|
mCameraOpenCloseLock.release();
|
|
|
|
|
cameraDevice.close();
|
|
|
|
|
mCameraDevice = null;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onError(CameraDevice cameraDevice, int error) {
|
|
|
|
|
Log.e(TAG, "Received camera device error: " + error);
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mState = STATE_CLOSED;
|
|
|
|
|
mCameraOpenCloseLock.release();
|
|
|
|
|
cameraDevice.close();
|
|
|
|
|
mCameraDevice = null;
|
|
|
|
|
}
|
|
|
|
|
Activity activity = getActivity();
|
|
|
|
|
if (null != activity) {
|
|
|
|
|
activity.finish();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
|
|
|
|
|
* JPEG image is ready to be saved.
|
|
|
|
|
*/
|
|
|
|
|
private final ImageReader.OnImageAvailableListener mOnJpegImageAvailableListener = new ImageReader.OnImageAvailableListener() {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onImageAvailable(ImageReader reader) {
|
|
|
|
|
// dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
|
|
|
|
|
* RAW image is ready to be saved.
|
|
|
|
|
*/
|
|
|
|
|
private final ImageReader.OnImageAvailableListener mOnRawImageAvailableListener = new ImageReader.OnImageAvailableListener() {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onImageAvailable(ImageReader reader) {
|
|
|
|
|
dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
private Long exposetime;
|
|
|
|
|
private Integer sensitivity;
|
|
|
|
|
/**
|
|
|
|
|
* /**
|
|
|
|
|
* A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and
|
|
|
|
|
* pre-capture sequence.
|
|
|
|
|
*/
|
|
|
|
|
private CameraCaptureSession.CaptureCallback mPreCaptureCallback = new CameraCaptureSession.CaptureCallback() {
|
|
|
|
|
|
|
|
|
|
private void process(CaptureResult result) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
switch (mState) {
|
|
|
|
|
case STATE_PREVIEW: {
|
|
|
|
|
// We have nothing to do when the camera preview is running normally.
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case STATE_WAITING_FOR_3A_CONVERGENCE: {
|
|
|
|
|
boolean readyToCapture = true;
|
|
|
|
|
if (!mNoAFRun) {
|
|
|
|
|
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
|
|
|
|
|
//获取自动曝光时间
|
|
|
|
|
exposetime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
|
|
|
|
|
//获取自动ISO
|
|
|
|
|
sensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);
|
|
|
|
|
if (afState == null) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If auto-focus has reached locked state, we are ready to capture
|
|
|
|
|
readyToCapture = (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If we are running on an non-legacy device, we should also wait until
|
|
|
|
|
// auto-exposure and auto-white-balance have converged as well before
|
|
|
|
|
// taking a picture.
|
|
|
|
|
if (!isLegacyLocked()) {
|
|
|
|
|
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
|
|
|
|
|
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
|
|
|
|
|
if (aeState == null || awbState == null) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
readyToCapture = readyToCapture && aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED && awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If we haven't finished the pre-capture sequence but have hit our maximum
|
|
|
|
|
// wait timeout, too bad! Begin capture anyway.
|
|
|
|
|
if (!readyToCapture && hitTimeoutLocked()) {
|
|
|
|
|
Log.w(TAG, "Timed out waiting for pre-capture sequence to complete.");
|
|
|
|
|
readyToCapture = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (readyToCapture && mPendingUserCaptures > 0) {
|
|
|
|
|
// Capture once for each user tap of the "Picture" button.
|
|
|
|
|
while (mPendingUserCaptures > 0) {
|
|
|
|
|
captureStillPictureLocked(exposetime, sensitivity);
|
|
|
|
|
mPendingUserCaptures--;
|
|
|
|
|
}
|
|
|
|
|
// After this, the camera will go back to the normal state of preview.
|
|
|
|
|
mState = STATE_PREVIEW;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) {
|
|
|
|
|
process(partialResult);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
|
|
|
|
|
process(result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A {@link CameraCaptureSession.CaptureCallback} that handles the still JPEG and RAW capture
|
|
|
|
|
* request.
|
|
|
|
|
*/
|
|
|
|
|
private final CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
String currentDateTime = generateTimestamp();
|
|
|
|
|
File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
|
|
|
|
|
// File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_a.dng");
|
|
|
|
|
File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
|
|
|
|
|
|
|
|
|
|
// Look up the ImageSaverBuilder for this request and update it with the file name
|
|
|
|
|
// based on the capture start time.
|
|
|
|
|
ImageSaver.ImageSaverBuilder jpegBuilder;
|
|
|
|
|
ImageSaver.ImageSaverBuilder rawBuilder;
|
|
|
|
|
int requestId = (int) request.getTag();
|
|
|
|
|
jpegBuilder = mJpegResultQueue.get(requestId);
|
|
|
|
|
rawBuilder = mRawResultQueue.get(requestId);
|
|
|
|
|
|
|
|
|
|
if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
|
|
|
|
|
if (rawBuilder != null) rawBuilder.setFile(rawFile);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
|
|
|
|
|
int requestId = (int) request.getTag();
|
|
|
|
|
ImageSaver.ImageSaverBuilder jpegBuilder;
|
|
|
|
|
ImageSaver.ImageSaverBuilder rawBuilder;
|
|
|
|
|
StringBuilder sb = new StringBuilder();
|
|
|
|
|
|
|
|
|
|
// Look up the ImageSaverBuilder for this request and update it with the CaptureResult
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
jpegBuilder = mJpegResultQueue.get(requestId);
|
|
|
|
|
rawBuilder = mRawResultQueue.get(requestId);
|
|
|
|
|
|
|
|
|
|
if (jpegBuilder != null) {
|
|
|
|
|
jpegBuilder.setResult(result);
|
|
|
|
|
sb.append("Saving JPEG as: ");
|
|
|
|
|
sb.append(jpegBuilder.getSaveLocation());
|
|
|
|
|
}
|
|
|
|
|
if (rawBuilder != null) {
|
|
|
|
|
rawBuilder.setResult(result);
|
|
|
|
|
if (rawBuilder != null) sb.append(", ");
|
|
|
|
|
sb.append("Saving RAW as: ");
|
|
|
|
|
sb.append(rawBuilder.getSaveLocation());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If we have all the results necessary, save the image to a file in the background.
|
|
|
|
|
handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue);
|
|
|
|
|
handleCompletionLocked(requestId, rawBuilder, mRawResultQueue);
|
|
|
|
|
|
|
|
|
|
finishedCaptureLocked();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
showToast(sb.toString());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
|
|
|
|
|
int requestId = (int) request.getTag();
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mJpegResultQueue.remove(requestId);
|
|
|
|
|
mRawResultQueue.remove(requestId);
|
|
|
|
|
finishedCaptureLocked();
|
|
|
|
|
}
|
|
|
|
|
showToast("Capture failed!");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A {@link Handler} for showing {@link Toast}s on the UI thread.
|
|
|
|
|
*/
|
|
|
|
|
private final Handler mMessageHandler = new Handler(Looper.getMainLooper()) {
|
|
|
|
|
@Override
|
|
|
|
|
public void handleMessage(Message msg) {
|
|
|
|
|
Activity activity = getActivity();
|
|
|
|
|
if (activity != null) {
|
|
|
|
|
Toast.makeText(activity, (String) msg.obj, Toast.LENGTH_SHORT).show();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
public boolean isHandTakePic = true;
|
|
|
|
|
private double pic1 = 1;
|
|
|
|
|
private double pic2 = 1;
|
|
|
|
|
// private long exposureTime = 0;
|
|
|
|
|
// private int sensitivity = 0;
|
|
|
|
|
|
|
|
|
|
public static Camera2RawFragment newInstance() {
|
|
|
|
|
return new Camera2RawFragment();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
|
|
|
|
|
return inflater.inflate(R.layout.fragment_camera2_raw, container, false);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onViewCreated(final View view, Bundle savedInstanceState) {
|
|
|
|
|
|
|
|
|
|
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Setup a new OrientationEventListener. This is used to handle rotation events like a
|
|
|
|
|
// 180 degree rotation that do not normally trigger a call to onCreate to do view re-layout
|
|
|
|
|
// or otherwise cause the preview TextureView's size to change.
|
|
|
|
|
mOrientationListener = new OrientationEventListener(getActivity(), SensorManager.SENSOR_DELAY_NORMAL) {
|
|
|
|
|
@Override
|
|
|
|
|
public void onOrientationChanged(int orientation) {
|
|
|
|
|
if (mTextureView != null && mTextureView.isAvailable()) {
|
|
|
|
|
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Button takepic = view.findViewById(R.id.takepic);
|
|
|
|
|
takepic.setOnClickListener(new View.OnClickListener() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onClick(View view) {
|
|
|
|
|
isHandTakePic = true;
|
|
|
|
|
takePicture();
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onResume() {
|
|
|
|
|
super.onResume();
|
|
|
|
|
startBackgroundThread();
|
|
|
|
|
openCamera();
|
|
|
|
|
|
|
|
|
|
// When the screen is turned off and turned back on, the SurfaceTexture is already
|
|
|
|
|
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we should
|
|
|
|
|
// configure the preview bounds here (otherwise, we wait until the surface is ready in
|
|
|
|
|
// the SurfaceTextureListener).
|
|
|
|
|
if (mTextureView.isAvailable()) {
|
|
|
|
|
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
|
|
|
|
|
} else {
|
|
|
|
|
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
|
|
|
|
|
}
|
|
|
|
|
if (mOrientationListener != null && mOrientationListener.canDetectOrientation()) {
|
|
|
|
|
mOrientationListener.enable();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onPause() {
|
|
|
|
|
if (mOrientationListener != null) {
|
|
|
|
|
mOrientationListener.disable();
|
|
|
|
|
}
|
|
|
|
|
closeCamera();
|
|
|
|
|
stopBackgroundThread();
|
|
|
|
|
super.onPause();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
|
|
|
|
|
if (requestCode == REQUEST_CAMERA_PERMISSIONS) {
|
|
|
|
|
for (int result : grantResults) {
|
|
|
|
|
if (result != PackageManager.PERMISSION_GRANTED) {
|
|
|
|
|
showMissingPermissionError();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private void restartFragment() {
|
|
|
|
|
final FragmentActivity activity = getActivity();
|
|
|
|
|
|
|
|
|
|
mBackgroundHandler.postDelayed(new Runnable() {
|
|
|
|
|
@Override
|
|
|
|
|
public void run() {
|
|
|
|
|
activity.getSupportFragmentManager().beginTransaction().replace(R.id.container, Camera2RawFragment.newInstance()).commit();
|
|
|
|
|
}
|
|
|
|
|
}, 100);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public void doFocus() {
|
|
|
|
|
|
|
|
|
|
int previewWidth = mTextureView.getWidth();
|
|
|
|
|
int previewHeight = mTextureView.getHeight();
|
|
|
|
|
RectF previewRect = new RectF(0, 0, previewWidth, previewHeight);
|
|
|
|
|
|
|
|
|
|
CoordinateTransformer cf = new CoordinateTransformer(mCharacteristics, previewRect);
|
|
|
|
|
|
|
|
|
|
RectF rect = cf.toCameraSpace(previewRect);
|
|
|
|
|
|
|
|
|
|
MeteringRectangle mr = new MeteringRectangle(new Rect((int) rect.left, (int) rect.top, (int) rect.right, (int) rect.bottom), 1000);
|
|
|
|
|
startControlAFRequest(mr, mPreCaptureCallback);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// mCameraDevice.;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public void startControlAFRequest(MeteringRectangle rect, CameraCaptureSession.CaptureCallback captureCallback) {
|
|
|
|
|
|
|
|
|
|
MeteringRectangle[] rectangle = new MeteringRectangle[]{rect};
|
|
|
|
|
// Focus Mode AUTO
|
|
|
|
|
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_AUTO);
|
|
|
|
|
//AE
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, rectangle);
|
|
|
|
|
//AF 此处AF和AE用的同一个rect, 实际AE矩形面积比AF稍大, 这样测光效果更好
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, rectangle);
|
|
|
|
|
try {
|
|
|
|
|
// AE/AF区域设置通过setRepeatingRequest不断发请求
|
|
|
|
|
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), captureCallback, mBackgroundHandler);
|
|
|
|
|
} catch (CameraAccessException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
//触发对焦
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
|
|
|
|
|
try {
|
|
|
|
|
//触发对焦通过capture发送请求, 因为用户点击屏幕后只需触发一次对焦
|
|
|
|
|
mCaptureSession.capture(mPreviewRequestBuilder.build(), captureCallback, mBackgroundHandler);
|
|
|
|
|
} catch (CameraAccessException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Sets up state related to camera that is needed before opening a {@link CameraDevice}.
|
|
|
|
|
*/
|
|
|
|
|
private boolean setUpCameraOutputs() {
|
|
|
|
|
Activity activity = getActivity();
|
|
|
|
|
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
|
|
|
|
|
if (manager == null) {
|
|
|
|
|
ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
try {
|
|
|
|
|
// Find a CameraDevice that supports RAW captures, and configure state.
|
|
|
|
|
for (String cameraId : manager.getCameraIdList()) {
|
|
|
|
|
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
|
|
|
|
|
|
|
|
|
|
// We only use a camera that supports RAW in this sample.
|
|
|
|
|
if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
|
|
|
|
|
|
|
|
|
// For still image captures, we use the largest available size.
|
|
|
|
|
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
|
|
|
|
|
|
|
|
|
|
Size[] rawSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
|
|
|
|
|
Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea());
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
// Set up ImageReaders for JPEG and RAW outputs. Place these in a reference
|
|
|
|
|
// counted wrapper to ensure they are only closed when all background tasks
|
|
|
|
|
// using them are finished.
|
|
|
|
|
// if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
|
|
|
|
|
// mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
|
|
|
|
|
// }
|
|
|
|
|
// mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler);
|
|
|
|
|
|
|
|
|
|
if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
|
|
|
|
|
mRawImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
|
|
|
|
|
}
|
|
|
|
|
mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler);
|
|
|
|
|
|
|
|
|
|
mCharacteristics = characteristics;
|
|
|
|
|
mCameraId = cameraId;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
} catch (CameraAccessException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If we found no suitable cameras for capturing RAW, warn the user.
|
|
|
|
|
ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Opens the camera specified by {@link #mCameraId}.
|
|
|
|
|
*/
|
|
|
|
|
@SuppressWarnings("MissingPermission")
|
|
|
|
|
private void openCamera() {
|
|
|
|
|
if (!setUpCameraOutputs()) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
if (!hasAllPermissionsGranted()) {
|
|
|
|
|
requestCameraPermissions();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Activity activity = getActivity();
|
|
|
|
|
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
|
|
|
|
|
try {
|
|
|
|
|
// Wait for any previously running session to finish.
|
|
|
|
|
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
|
|
|
|
|
throw new RuntimeException("Time out waiting to lock camera opening.");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
String cameraId;
|
|
|
|
|
Handler backgroundHandler;
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
cameraId = mCameraId;
|
|
|
|
|
backgroundHandler = mBackgroundHandler;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Attempt to open the camera. mStateCallback will be called on the background handler's
|
|
|
|
|
// thread when this succeeds or fails.
|
|
|
|
|
manager.openCamera(cameraId, mStateCallback, backgroundHandler);
|
|
|
|
|
} catch (CameraAccessException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
} catch (InterruptedException e) {
|
|
|
|
|
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Requests permissions necessary to use camera and save pictures.
|
|
|
|
|
*/
|
|
|
|
|
private void requestCameraPermissions() {
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Tells whether all the necessary permissions are granted to this app.
|
|
|
|
|
*
|
|
|
|
|
* @return True if all the required permissions are granted.
|
|
|
|
|
*/
|
|
|
|
|
private boolean hasAllPermissionsGranted() {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Gets whether you should show UI with rationale for requesting the permissions.
|
|
|
|
|
*
|
|
|
|
|
* @return True if the UI should be shown.
|
|
|
|
|
*/
|
|
|
|
|
private boolean shouldShowRationale() {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Shows that this app really needs the permission and finishes the app.
|
|
|
|
|
*/
|
|
|
|
|
private void showMissingPermissionError() {
|
|
|
|
|
Activity activity = getActivity();
|
|
|
|
|
if (activity != null) {
|
|
|
|
|
Toast.makeText(activity, R.string.request_permission, Toast.LENGTH_SHORT).show();
|
|
|
|
|
activity.finish();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Closes the current {@link CameraDevice}.
|
|
|
|
|
*/
|
|
|
|
|
private void closeCamera() {
|
|
|
|
|
try {
|
|
|
|
|
mCameraOpenCloseLock.acquire();
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
|
|
|
|
|
// Reset state and clean up resources used by the camera.
|
|
|
|
|
// Note: After calling this, the ImageReaders will be closed after any background
|
|
|
|
|
// tasks saving Images from these readers have been completed.
|
|
|
|
|
mPendingUserCaptures = 0;
|
|
|
|
|
mState = STATE_CLOSED;
|
|
|
|
|
if (null != mCaptureSession) {
|
|
|
|
|
mCaptureSession.close();
|
|
|
|
|
mCaptureSession = null;
|
|
|
|
|
}
|
|
|
|
|
if (null != mCameraDevice) {
|
|
|
|
|
mCameraDevice.close();
|
|
|
|
|
mCameraDevice = null;
|
|
|
|
|
}
|
|
|
|
|
// if (null != mJpegImageReader) {
|
|
|
|
|
// mJpegImageReader.close();
|
|
|
|
|
// mJpegImageReader = null;
|
|
|
|
|
// }
|
|
|
|
|
if (null != mRawImageReader) {
|
|
|
|
|
mRawImageReader.close();
|
|
|
|
|
mRawImageReader = null;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} catch (InterruptedException e) {
|
|
|
|
|
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
|
|
|
|
|
} finally {
|
|
|
|
|
mCameraOpenCloseLock.release();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Starts a background thread and its {@link Handler}.
|
|
|
|
|
*/
|
|
|
|
|
private void startBackgroundThread() {
|
|
|
|
|
mBackgroundThread = new HandlerThread("CameraBackground");
|
|
|
|
|
mBackgroundThread.start();
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Stops the background thread and its {@link Handler}.
|
|
|
|
|
*/
|
|
|
|
|
private void stopBackgroundThread() {
|
|
|
|
|
mBackgroundThread.quitSafely();
|
|
|
|
|
try {
|
|
|
|
|
mBackgroundThread.join();
|
|
|
|
|
mBackgroundThread = null;
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mBackgroundHandler = null;
|
|
|
|
|
}
|
|
|
|
|
} catch (InterruptedException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Creates a new {@link CameraCaptureSession} for camera preview.
|
|
|
|
|
* <p/>
|
|
|
|
|
* Call this only with {@link #mCameraStateLock} held.
|
|
|
|
|
*/
|
|
|
|
|
private void createCameraPreviewSessionLocked() {
|
|
|
|
|
try {
|
|
|
|
|
SurfaceTexture texture = mTextureView.getSurfaceTexture();
|
|
|
|
|
// We configure the size of default buffer to be the size of camera preview we want.
|
|
|
|
|
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
|
|
|
|
|
|
|
|
|
|
// This is the output Surface we need to start preview.
|
|
|
|
|
Surface surface = new Surface(texture);
|
|
|
|
|
|
|
|
|
|
// We set up a CaptureRequest.Builder with the output Surface.
|
|
|
|
|
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
|
|
|
|
|
mPreviewRequestBuilder.addTarget(surface);
|
|
|
|
|
|
|
|
|
|
// Here, we create a CameraCaptureSession for camera preview.
|
|
|
|
|
mCameraDevice.createCaptureSession(Arrays.asList(surface, mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
// The camera is already closed
|
|
|
|
|
if (null == mCameraDevice) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
setup3AControlsLocked(mPreviewRequestBuilder);
|
|
|
|
|
|
|
|
|
|
if (mExposureComp != 0) {
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, mExposureComp);
|
|
|
|
|
}
|
|
|
|
|
// Finally, we start displaying the camera preview.
|
|
|
|
|
cameraCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
|
|
|
|
|
mState = STATE_PREVIEW;
|
|
|
|
|
} catch (CameraAccessException | IllegalStateException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
// When the session is ready, we start displaying the preview.
|
|
|
|
|
mCaptureSession = cameraCaptureSession;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
|
|
|
|
|
showToast("Failed to configure camera.");
|
|
|
|
|
}
|
|
|
|
|
}, mBackgroundHandler);
|
|
|
|
|
} catch (CameraAccessException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Configure the given {@link CaptureRequest.Builder} to use auto-focus, auto-exposure, and
|
|
|
|
|
* auto-white-balance controls if available.
|
|
|
|
|
* <p/>
|
|
|
|
|
* Call this only with {@link #mCameraStateLock} held.
|
|
|
|
|
*
|
|
|
|
|
* @param builder the builder to configure.
|
|
|
|
|
*/
|
|
|
|
|
private void setup3AControlsLocked(CaptureRequest.Builder builder) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// parameters.setPreviewFpsRange(10,30);
|
|
|
|
|
// parameters.setPreviewFrameRate(10);
|
|
|
|
|
|
|
|
|
|
Range<Integer>[] fpsRanges = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
|
|
|
|
|
Range<Integer> fpsRange = new Range<>(5, 30);
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
|
|
|
|
|
|
|
|
|
|
// Enable auto-magical 3A run by camera device
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
|
|
|
|
|
|
|
|
|
|
Float minFocusDist = mCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
|
|
|
|
|
|
|
|
|
|
// If MINIMUM_FOCUS_DISTANCE is 0, lens is fixed-focus and we need to skip the AF run.
|
|
|
|
|
mNoAFRun = (minFocusDist == null || minFocusDist == 0);
|
|
|
|
|
|
|
|
|
|
if (!mNoAFRun) {
|
|
|
|
|
// If there is a "continuous picture" mode available, use it, otherwise default to AUTO.
|
|
|
|
|
if (contains(mCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES), CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
|
|
|
|
|
} else {
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// MeteringRectangle mr = new MeteringRectangle(rect, 1000);
|
|
|
|
|
// builder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[] {mr});
|
|
|
|
|
// builder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {mr});
|
|
|
|
|
|
|
|
|
|
// builder.set(CaptureRequest.CONTROL_AF_REGIONS, arrayOf(MeteringRectangle(rect, 1000)))
|
|
|
|
|
|
|
|
|
|
// set(CaptureRequest.CONTROL_AE_REGIONS, arrayOf(MeteringRectangle(rect, 1000)))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If there is an auto-magical flash control mode available, use it, otherwise default to
|
|
|
|
|
// the "on" mode, which is guaranteed to always be available.
|
|
|
|
|
if (contains(mCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES), CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH)) {
|
|
|
|
|
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
|
|
|
|
|
} else {
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If there is an auto-magical white balance control mode available, use it.
|
|
|
|
|
if (contains(mCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES), CaptureRequest.CONTROL_AWB_MODE_DAYLIGHT)) {
|
|
|
|
|
// Allow AWB to run auto-magically if this device supports this
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_DAYLIGHT);
|
|
|
|
|
} else if (contains(mCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES), CaptureRequest.CONTROL_AWB_MODE_AUTO)) {
|
|
|
|
|
// Allow AWB to run auto-magically if this device supports this
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Configure the necessary {@link android.graphics.Matrix} transformation to `mTextureView`,
|
|
|
|
|
* and start/restart the preview capture session if necessary.
|
|
|
|
|
* <p/>
|
|
|
|
|
* This method should be called after the camera state has been initialized in
|
|
|
|
|
* setUpCameraOutputs.
|
|
|
|
|
*
|
|
|
|
|
* @param viewWidth The width of `mTextureView`
|
|
|
|
|
* @param viewHeight The height of `mTextureView`
|
|
|
|
|
*/
|
|
|
|
|
private void configureTransform(int viewWidth, int viewHeight) {
|
|
|
|
|
Activity activity = getActivity();
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
if (null == mTextureView || null == activity) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
StreamConfigurationMap map = null;
|
|
|
|
|
|
|
|
|
|
if (map == null) {
|
|
|
|
|
try {
|
|
|
|
|
map = mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
|
|
|
|
} catch (Exception ex) {
|
|
|
|
|
ex.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// For still image captures, we always use the largest available size.
|
|
|
|
|
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
|
|
|
|
|
|
|
|
|
|
// Find the rotation of the device relative to the native device orientation.
|
|
|
|
|
int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
|
|
|
|
|
Point displaySize = new Point();
|
|
|
|
|
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
|
|
|
|
|
|
|
|
|
|
// Find the rotation of the device relative to the camera sensor's orientation.
|
|
|
|
|
int totalRotation = sensorToDeviceRotation(mCharacteristics, deviceRotation);
|
|
|
|
|
|
|
|
|
|
// Swap the view dimensions for calculation as needed if they are rotated relative to
|
|
|
|
|
// the sensor.
|
|
|
|
|
boolean swappedDimensions = totalRotation == 90 || totalRotation == 270;
|
|
|
|
|
int rotatedViewWidth = viewWidth;
|
|
|
|
|
int rotatedViewHeight = viewHeight;
|
|
|
|
|
int maxPreviewWidth = displaySize.x;
|
|
|
|
|
int maxPreviewHeight = displaySize.y;
|
|
|
|
|
|
|
|
|
|
if (swappedDimensions) {
|
|
|
|
|
rotatedViewWidth = viewHeight;
|
|
|
|
|
rotatedViewHeight = viewWidth;
|
|
|
|
|
maxPreviewWidth = displaySize.y;
|
|
|
|
|
maxPreviewHeight = displaySize.x;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Preview should not be larger than display size and 1080p.
|
|
|
|
|
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
|
|
|
|
|
maxPreviewWidth = MAX_PREVIEW_WIDTH;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
|
|
|
|
|
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Find the best preview size for these view dimensions and configured JPEG size.
|
|
|
|
|
Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg);
|
|
|
|
|
// Size previewSize = new Size(3840, 2160);
|
|
|
|
|
|
|
|
|
|
if (swappedDimensions) {
|
|
|
|
|
mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
|
|
|
|
|
} else {
|
|
|
|
|
mTextureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Find rotation of device in degrees (reverse device orientation for front-facing
|
|
|
|
|
// cameras).
|
|
|
|
|
int rotation = (mCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) ? (360 + ORIENTATIONS.get(deviceRotation)) % 360 : (360 - ORIENTATIONS.get(deviceRotation)) % 360;
|
|
|
|
|
|
|
|
|
|
Matrix matrix = new Matrix();
|
|
|
|
|
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
|
|
|
|
|
RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
|
|
|
|
|
float centerX = viewRect.centerX();
|
|
|
|
|
float centerY = viewRect.centerY();
|
|
|
|
|
|
|
|
|
|
// if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
|
|
|
|
|
// bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
|
|
|
|
|
// matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
|
|
|
|
|
// float scale = Math.max((float) viewHeight / previewSize.getHeight(), (float) viewWidth / previewSize.getWidth());
|
|
|
|
|
// matrix.postScale(scale, scale, centerX, centerY);
|
|
|
|
|
//
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
|
|
|
|
|
if (rotation == 0) {
|
|
|
|
|
matrix.postScale(1, 1);
|
|
|
|
|
} else if (rotation == 90) {
|
|
|
|
|
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
|
|
|
|
|
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
|
|
|
|
|
float scaleh = (float) viewHeight / previewSize.getHeight();
|
|
|
|
|
float scalew = (float) viewWidth / previewSize.getWidth();
|
|
|
|
|
matrix.postScale(scalew, scaleh, centerX, centerY);
|
|
|
|
|
} else if (rotation == 180) {
|
|
|
|
|
matrix.postScale(1, 1);
|
|
|
|
|
} else if (rotation == 270) {
|
|
|
|
|
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
|
|
|
|
|
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
|
|
|
|
|
float scaleh = (float) viewHeight / previewSize.getHeight();
|
|
|
|
|
float scalew = (float) viewWidth / previewSize.getWidth();
|
|
|
|
|
matrix.postScale(scaleh, scalew, centerX, centerY);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
matrix.postRotate(rotation, centerX, centerY);
|
|
|
|
|
|
|
|
|
|
mTextureView.setTransform(matrix);
|
|
|
|
|
|
|
|
|
|
// Start or restart the active capture session if the preview was initialized or
|
|
|
|
|
// if its aspect ratio changed significantly.
|
|
|
|
|
if (mPreviewSize == null || !checkAspectsEqual(previewSize, mPreviewSize)) {
|
|
|
|
|
mPreviewSize = previewSize;
|
|
|
|
|
if (mState != STATE_CLOSED) {
|
|
|
|
|
createCameraPreviewSessionLocked();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Initiate a still image capture.
|
|
|
|
|
* <p/>
|
|
|
|
|
* This function sends a capture request that initiates a pre-capture sequence in our state
|
|
|
|
|
* machine that waits for auto-focus to finish, ending in a "locked" state where the lens is no
|
|
|
|
|
* longer moving, waits for auto-exposure to choose a good exposure value, and waits for
|
|
|
|
|
* auto-white-balance to converge.
|
|
|
|
|
*/
|
|
|
|
|
public void takePicture() {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mPendingUserCaptures++;
|
|
|
|
|
|
|
|
|
|
// If we already triggered a pre-capture sequence, or are in a state where we cannot
|
|
|
|
|
// do this, return immediately.
|
|
|
|
|
if (mState != STATE_PREVIEW) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
// If this is not a legacy device, we can also trigger an auto-exposure metering
|
|
|
|
|
// run.
|
|
|
|
|
if (!isLegacyLocked()) {
|
|
|
|
|
// Tell the camera to lock focus.
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Trigger an auto-focus run if camera is capable. If the camera is already focused,
|
|
|
|
|
// this should do nothing.
|
|
|
|
|
if (!mNoAFRun) {
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Update state machine to wait for auto-focus, auto-exposure, and
|
|
|
|
|
// auto-white-balance (aka. "3A") to converge.
|
|
|
|
|
mState = STATE_WAITING_FOR_3A_CONVERGENCE;
|
|
|
|
|
|
|
|
|
|
// Start a timer for the pre-capture sequence.
|
|
|
|
|
startTimerLocked();
|
|
|
|
|
|
|
|
|
|
// Replace the existing repeating request with one with updated 3A triggers.
|
|
|
|
|
mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
|
|
|
|
|
} catch (CameraAccessException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Send a capture request to the camera device that initiates a capture targeting the JPEG and
|
|
|
|
|
* RAW outputs.
|
|
|
|
|
* <p/>
|
|
|
|
|
* Call this only with {@link #mCameraStateLock} held.
|
|
|
|
|
*/
|
|
|
|
|
private void captureStillPictureLocked(long exposureTime, int sensitivity) {
|
|
|
|
|
try {
|
|
|
|
|
final Activity activity = getActivity();
|
|
|
|
|
if (null == activity || null == mCameraDevice) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
// This is the CaptureRequest.Builder that we use to take a picture.
|
|
|
|
|
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
|
|
|
|
|
|
|
|
|
|
// captureBuilder.addTarget(mJpegImageReader.get().getSurface());
|
|
|
|
|
captureBuilder.addTarget(mRawImageReader.get().getSurface());
|
|
|
|
|
|
|
|
|
|
// Use the same AE and AF modes as the preview.
|
|
|
|
|
setup3AControlsLocked(captureBuilder);
|
|
|
|
|
|
|
|
|
|
if (mExposureComp != 0) {
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, mExposureComp);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Set orientation.
|
|
|
|
|
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
|
|
|
|
|
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, sensorToDeviceRotation(mCharacteristics, rotation));
|
|
|
|
|
|
|
|
|
|
Range<Integer> range = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
|
|
|
|
|
Rational rational = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
|
|
|
|
|
double step = rational.doubleValue();
|
|
|
|
|
|
|
|
|
|
// mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP)
|
|
|
|
|
List<CaptureRequest> requests = new ArrayList<>();
|
|
|
|
|
double v = 0;
|
|
|
|
|
for (int idx = 0; idx < 2; idx++) {
|
|
|
|
|
// Set request tag to easily track results in callbacks.
|
|
|
|
|
captureBuilder.setTag(mRequestCounter.getAndIncrement());
|
|
|
|
|
|
|
|
|
|
if (idx == 0) {
|
|
|
|
|
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
|
|
|
|
|
// 设置曝光时间,例如设置为1000微秒
|
|
|
|
|
// long exposureTime = 1000 000000L; // 1000微秒
|
|
|
|
|
if (isHandTakePic) {
|
|
|
|
|
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
|
|
|
|
|
if (exposureTime > 0) {
|
|
|
|
|
v = exposureTime * pic1;
|
|
|
|
|
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
|
|
|
|
|
}
|
|
|
|
|
if (sensitivity > 0) {
|
|
|
|
|
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (idx == 1) {
|
|
|
|
|
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
|
|
|
|
|
// 设置曝光时间,例如设置为1000微秒
|
|
|
|
|
// long exposureTime = 1000 000000L; // 1000微秒
|
|
|
|
|
if (isHandTakePic) {
|
|
|
|
|
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
|
|
|
|
|
if (exposureTime > 0) {
|
|
|
|
|
v = exposureTime * 7;
|
|
|
|
|
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
|
|
|
|
|
}
|
|
|
|
|
if (sensitivity > 0) {
|
|
|
|
|
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CaptureRequest request = captureBuilder.build();
|
|
|
|
|
|
|
|
|
|
// Create an ImageSaverBuilder in which to collect results, and add it to the queue
|
|
|
|
|
// of active requests.
|
|
|
|
|
ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
|
|
|
|
|
rawBuilder.setExposetime((long) v);
|
|
|
|
|
|
|
|
|
|
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
|
|
|
|
|
mRawResultQueue.put((int) request.getTag(), rawBuilder);
|
|
|
|
|
|
|
|
|
|
requests.add(request);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
|
|
|
|
|
//
|
|
|
|
|
// // Set request tag to easily track results in callbacks.
|
|
|
|
|
// captureBuilder.setTag(mRequestCounter.getAndIncrement());
|
|
|
|
|
//
|
|
|
|
|
// CaptureRequest request = captureBuilder.build();
|
|
|
|
|
//
|
|
|
|
|
// // Create an ImageSaverBuilder in which to collect results, and add it to the queue
|
|
|
|
|
// // of active requests.
|
|
|
|
|
// ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
// ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
//
|
|
|
|
|
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
|
|
|
|
|
// mRawResultQueue.put((int) request.getTag(), rawBuilder);
|
|
|
|
|
//
|
|
|
|
|
// mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
} catch (CameraAccessException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Called after a RAW/JPEG capture has completed; resets the AF trigger state for the
|
|
|
|
|
* pre-capture sequence.
|
|
|
|
|
* <p/>
|
|
|
|
|
* Call this only with {@link #mCameraStateLock} held.
|
|
|
|
|
*/
|
|
|
|
|
private void finishedCaptureLocked() {
|
|
|
|
|
try {
|
|
|
|
|
// Reset the auto-focus trigger in case AF didn't run quickly enough.
|
|
|
|
|
if (!mNoAFRun) {
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
|
|
|
|
|
|
|
|
|
|
mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
|
|
|
|
|
|
|
|
|
|
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
|
|
|
|
|
}
|
|
|
|
|
} catch (CameraAccessException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Retrieve the next {@link Image} from a reference counted {@link ImageReader}, retaining
|
|
|
|
|
* that {@link ImageReader} until that {@link Image} is no longer in use, and set this
|
|
|
|
|
* {@link Image} as the result for the next request in the queue of pending requests. If
|
|
|
|
|
* all necessary information is available, begin saving the image to a file in a background
|
|
|
|
|
* thread.
|
|
|
|
|
*
|
|
|
|
|
* @param pendingQueue the currently active requests.
|
|
|
|
|
* @param reader a reference counted wrapper containing an {@link ImageReader} from which
|
|
|
|
|
* to acquire an image.
|
|
|
|
|
*/
|
|
|
|
|
private void dequeueAndSaveImage(TreeMap<Integer, ImageSaver.ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = null;
|
|
|
|
|
if (pendingQueue != null) {
|
|
|
|
|
for (Map.Entry<Integer, ImageSaver.ImageSaverBuilder> item : pendingQueue.entrySet()) {
|
|
|
|
|
ImageSaver.ImageSaverBuilder value = item.getValue();
|
|
|
|
|
if (value.mImage == null) {
|
|
|
|
|
entry = item;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = pendingQueue.firstEntry();
|
|
|
|
|
|
|
|
|
|
// ImageSaver.ImageSaverBuilder builder = entry.getValue();
|
|
|
|
|
if (entry == null) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
ImageSaver.ImageSaverBuilder builder = entry.getValue();
|
|
|
|
|
|
|
|
|
|
// Increment reference count to prevent ImageReader from being closed while we
|
|
|
|
|
// are saving its Images in a background thread (otherwise their resources may
|
|
|
|
|
// be freed while we are writing to a file).
|
|
|
|
|
if (reader == null || reader.getAndRetain() == null) {
|
|
|
|
|
Log.e(TAG, "Paused the activity before we could save the image," + " ImageReader already closed.");
|
|
|
|
|
pendingQueue.remove(entry.getKey());
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Image image;
|
|
|
|
|
try {
|
|
|
|
|
image = reader.get().acquireNextImage();
|
|
|
|
|
} catch (IllegalStateException e) {
|
|
|
|
|
Log.e(TAG, "Too many images queued for saving, dropping image for request: " + entry.getKey());
|
|
|
|
|
pendingQueue.remove(entry.getKey());
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
builder.setRefCountedReader(reader).setImage(image);
|
|
|
|
|
handleCompletionLocked(entry.getKey(), builder, pendingQueue);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private static class ImageSaver implements Runnable {
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* The image to save.
|
|
|
|
|
*/
|
|
|
|
|
private final Image mImage;
|
|
|
|
|
/**
|
|
|
|
|
* The file we save the image into.
|
|
|
|
|
*/
|
|
|
|
|
private final File mFile;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* The CaptureResult for this image capture.
|
|
|
|
|
*/
|
|
|
|
|
private final CaptureResult mCaptureResult;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* The CameraCharacteristics for this camera device.
|
|
|
|
|
*/
|
|
|
|
|
private final CameraCharacteristics mCharacteristics;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* The Context to use when updating MediaStore with the saved images.
|
|
|
|
|
*/
|
|
|
|
|
private final Context mContext;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A reference counted wrapper for the ImageReader that owns the given image.
|
|
|
|
|
*/
|
|
|
|
|
private final RefCountedAutoCloseable<ImageReader> mReader;
|
|
|
|
|
private final long mExpostime;
|
|
|
|
|
|
|
|
|
|
private ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics, Context context, RefCountedAutoCloseable<ImageReader> reader, long mexpostime) {
|
|
|
|
|
mImage = image;
|
|
|
|
|
mFile = file;
|
|
|
|
|
mCaptureResult = result;
|
|
|
|
|
mCharacteristics = characteristics;
|
|
|
|
|
mContext = context;
|
|
|
|
|
mReader = reader;
|
|
|
|
|
mExpostime = mexpostime;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void run() {
|
|
|
|
|
boolean success = false;
|
|
|
|
|
int format = mImage.getFormat();
|
|
|
|
|
switch (format) {
|
|
|
|
|
case ImageFormat.JPEG: {
|
|
|
|
|
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
|
|
|
|
|
byte[] bytes = new byte[buffer.remaining()];
|
|
|
|
|
buffer.get(bytes);
|
|
|
|
|
FileOutputStream output = null;
|
|
|
|
|
try {
|
|
|
|
|
output = new FileOutputStream(mFile);
|
|
|
|
|
output.write(bytes);
|
|
|
|
|
success = true;
|
|
|
|
|
} catch (IOException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
} finally {
|
|
|
|
|
mImage.close();
|
|
|
|
|
closeOutput(output);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case ImageFormat.RAW_SENSOR: {
|
|
|
|
|
DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
|
|
|
|
|
FileOutputStream output = null;
|
|
|
|
|
try {
|
|
|
|
|
output = new FileOutputStream(mFile);
|
|
|
|
|
dngCreator.writeImage(output, mImage);
|
|
|
|
|
success = true;
|
|
|
|
|
} catch (IOException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
} finally {
|
|
|
|
|
mImage.close();
|
|
|
|
|
closeOutput(output);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
default: {
|
|
|
|
|
Log.e(TAG, "Cannot save image, unexpected image format:" + format);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
mReader.close();
|
|
|
|
|
if (success) {
|
|
|
|
|
File directory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
|
|
|
|
|
String directoryPath = directory.getPath();
|
|
|
|
|
File file = new File(directory, "create_" + mExpostime + "_" + generateTimestamp() + ".png");
|
|
|
|
|
String path = file.getPath();
|
|
|
|
|
try {
|
|
|
|
|
RawToJpgConverter.convertRawToJpg(mFile.getPath(), path);
|
|
|
|
|
} catch (IOException e) {
|
|
|
|
|
throw new RuntimeException(e);
|
|
|
|
|
}
|
|
|
|
|
PngPhotoBean bean = new PngPhotoBean();
|
|
|
|
|
bean.setEtime(mExpostime);
|
|
|
|
|
bean.setPath(path);
|
|
|
|
|
list.add(bean);
|
|
|
|
|
if (list.size() == 2) {
|
|
|
|
|
PngPhotoBean bean1 = list.get(0);
|
|
|
|
|
PngPhotoBean bean2 = list.get(1);
|
|
|
|
|
makeHdr(bean1.getEtime(), bean1.getPath(), bean2.getEtime(), bean2.getPath(), directoryPath + "/" + "hdr_" + generateTimestamp() + ".png");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Builder class for constructing {@link ImageSaver}s.
|
|
|
|
|
* <p/>
|
|
|
|
|
* This class is thread safe.
|
|
|
|
|
*/
|
|
|
|
|
public static class ImageSaverBuilder {
|
|
|
|
|
private Image mImage;
|
|
|
|
|
private File mFile;
|
|
|
|
|
private CaptureResult mCaptureResult;
|
|
|
|
|
private CameraCharacteristics mCharacteristics;
|
|
|
|
|
private Context mContext;
|
|
|
|
|
|
|
|
|
|
private long mexpostime;
|
|
|
|
|
private RefCountedAutoCloseable<ImageReader> mReader;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Construct a new ImageSaverBuilder using the given {@link Context}.
|
|
|
|
|
*
|
|
|
|
|
* @param context a {@link Context} to for accessing the
|
|
|
|
|
* {@link android.provider.MediaStore}.
|
|
|
|
|
*/
|
|
|
|
|
public ImageSaverBuilder(final Context context) {
|
|
|
|
|
mContext = context;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public synchronized ImageSaverBuilder setExposetime(long time) {
|
|
|
|
|
|
|
|
|
|
mexpostime = time;
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public synchronized ImageSaverBuilder setRefCountedReader(RefCountedAutoCloseable<ImageReader> reader) {
|
|
|
|
|
if (reader == null) throw new NullPointerException();
|
|
|
|
|
|
|
|
|
|
mReader = reader;
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public synchronized ImageSaverBuilder setImage(final Image image) {
|
|
|
|
|
if (image == null) throw new NullPointerException();
|
|
|
|
|
mImage = image;
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public synchronized ImageSaverBuilder setFile(final File file) {
|
|
|
|
|
if (file == null) throw new NullPointerException();
|
|
|
|
|
mFile = file;
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public synchronized ImageSaverBuilder setResult(final CaptureResult result) {
|
|
|
|
|
if (result == null) throw new NullPointerException();
|
|
|
|
|
mCaptureResult = result;
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public synchronized ImageSaverBuilder setCharacteristics(final CameraCharacteristics characteristics) {
|
|
|
|
|
if (characteristics == null) throw new NullPointerException();
|
|
|
|
|
mCharacteristics = characteristics;
|
|
|
|
|
return this;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public synchronized ImageSaver buildIfComplete() {
|
|
|
|
|
if (!isComplete()) {
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, mReader, mexpostime);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public synchronized String getSaveLocation() {
|
|
|
|
|
return (mFile == null) ? "Unknown" : mFile.toString();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private boolean isComplete() {
|
|
|
|
|
return mImage != null && mFile != null && mCaptureResult != null && mCharacteristics != null;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Utility classes and methods:
|
|
|
|
|
// *********************************************************************************************
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Comparator based on area of the given {@link Size} objects.
|
|
|
|
|
*/
|
|
|
|
|
static class CompareSizesByArea implements Comparator<Size> {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public int compare(Size lhs, Size rhs) {
|
|
|
|
|
// We cast here to ensure the multiplications won't overflow
|
|
|
|
|
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A dialog fragment for displaying non-recoverable errors; this {@ling Activity} will be
|
|
|
|
|
* finished once the dialog has been acknowledged by the user.
|
|
|
|
|
*/
|
|
|
|
|
public static class ErrorDialog extends DialogFragment {
|
|
|
|
|
|
|
|
|
|
private String mErrorMessage;
|
|
|
|
|
|
|
|
|
|
public ErrorDialog() {
|
|
|
|
|
mErrorMessage = "Unknown error occurred!";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Build a dialog with a custom message (Fragments require default constructor).
|
|
|
|
|
public static ErrorDialog buildErrorDialog(String errorMessage) {
|
|
|
|
|
ErrorDialog dialog = new ErrorDialog();
|
|
|
|
|
dialog.mErrorMessage = errorMessage;
|
|
|
|
|
return dialog;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public Dialog onCreateDialog(Bundle savedInstanceState) {
|
|
|
|
|
final Activity activity = getActivity();
|
|
|
|
|
return new AlertDialog.Builder(activity).setMessage(mErrorMessage).setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onClick(DialogInterface dialogInterface, int i) {
|
|
|
|
|
activity.finish();
|
|
|
|
|
}
|
|
|
|
|
}).create();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A wrapper for an {@link AutoCloseable} object that implements reference counting to allow
|
|
|
|
|
* for resource management.
|
|
|
|
|
*/
|
|
|
|
|
public static class RefCountedAutoCloseable<T extends AutoCloseable> implements AutoCloseable {
|
|
|
|
|
private T mObject;
|
|
|
|
|
private long mRefCount = 0;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Wrap the given object.
|
|
|
|
|
*
|
|
|
|
|
* @param object an object to wrap.
|
|
|
|
|
*/
|
|
|
|
|
public RefCountedAutoCloseable(T object) {
|
|
|
|
|
if (object == null) throw new NullPointerException();
|
|
|
|
|
mObject = object;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Increment the reference count and return the wrapped object.
|
|
|
|
|
*
|
|
|
|
|
* @return the wrapped object, or null if the object has been released.
|
|
|
|
|
*/
|
|
|
|
|
public synchronized T getAndRetain() {
|
|
|
|
|
if (mRefCount < 0) {
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
mRefCount++;
|
|
|
|
|
return mObject;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Return the wrapped object.
|
|
|
|
|
*
|
|
|
|
|
* @return the wrapped object, or null if the object has been released.
|
|
|
|
|
*/
|
|
|
|
|
public synchronized T get() {
|
|
|
|
|
return mObject;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Decrement the reference count and release the wrapped object if there are no other
|
|
|
|
|
* users retaining this object.
|
|
|
|
|
*/
|
|
|
|
|
@Override
|
|
|
|
|
public synchronized void close() {
|
|
|
|
|
if (mRefCount >= 0) {
|
|
|
|
|
mRefCount--;
|
|
|
|
|
if (mRefCount < 0) {
|
|
|
|
|
try {
|
|
|
|
|
mObject.close();
|
|
|
|
|
} catch (Exception e) {
|
|
|
|
|
throw new RuntimeException(e);
|
|
|
|
|
} finally {
|
|
|
|
|
mObject = null;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that
|
|
|
|
|
* is at least as large as the respective texture view size, and that is at most as large as the
|
|
|
|
|
* respective max size, and whose aspect ratio matches with the specified value. If such size
|
|
|
|
|
* doesn't exist, choose the largest one that is at most as large as the respective max size,
|
|
|
|
|
* and whose aspect ratio matches with the specified value.
|
|
|
|
|
*
|
|
|
|
|
* @param choices The list of sizes that the camera supports for the intended output
|
|
|
|
|
* class
|
|
|
|
|
* @param textureViewWidth The width of the texture view relative to sensor coordinate
|
|
|
|
|
* @param textureViewHeight The height of the texture view relative to sensor coordinate
|
|
|
|
|
* @param maxWidth The maximum width that can be chosen
|
|
|
|
|
* @param maxHeight The maximum height that can be chosen
|
|
|
|
|
* @param aspectRatio The aspect ratio
|
|
|
|
|
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
|
|
|
|
|
*/
|
|
|
|
|
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
|
|
|
|
|
// Collect the supported resolutions that are at least as big as the preview Surface
|
|
|
|
|
List<Size> bigEnough = new ArrayList<>();
|
|
|
|
|
// Collect the supported resolutions that are smaller than the preview Surface
|
|
|
|
|
List<Size> notBigEnough = new ArrayList<>();
|
|
|
|
|
int w = aspectRatio.getWidth();
|
|
|
|
|
int h = aspectRatio.getHeight();
|
|
|
|
|
for (Size option : choices) {
|
|
|
|
|
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) {
|
|
|
|
|
if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) {
|
|
|
|
|
bigEnough.add(option);
|
|
|
|
|
} else {
|
|
|
|
|
notBigEnough.add(option);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Pick the smallest of those big enough. If there is no one big enough, pick the
|
|
|
|
|
// largest of those not big enough.
|
|
|
|
|
if (bigEnough.size() > 0) {
|
|
|
|
|
return Collections.min(bigEnough, new CompareSizesByArea());
|
|
|
|
|
} else if (notBigEnough.size() > 0) {
|
|
|
|
|
return Collections.max(notBigEnough, new CompareSizesByArea());
|
|
|
|
|
} else {
|
|
|
|
|
Log.e(TAG, "Couldn't find any suitable preview size");
|
|
|
|
|
return choices[0];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Generate a string containing a formatted timestamp with the current date and time.
|
|
|
|
|
*
|
|
|
|
|
* @return a {@link String} representing a time.
|
|
|
|
|
*/
|
|
|
|
|
private static String generateTimestamp() {
|
|
|
|
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US);
|
|
|
|
|
return sdf.format(new Date());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Cleanup the given {@link OutputStream}.
|
|
|
|
|
*
|
|
|
|
|
* @param outputStream the stream to close.
|
|
|
|
|
*/
|
|
|
|
|
private static void closeOutput(OutputStream outputStream) {
|
|
|
|
|
if (null != outputStream) {
|
|
|
|
|
try {
|
|
|
|
|
outputStream.close();
|
|
|
|
|
} catch (IOException e) {
|
|
|
|
|
e.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Return true if the given array contains the given integer.
|
|
|
|
|
*
|
|
|
|
|
* @param modes array to check.
|
|
|
|
|
* @param mode integer to get for.
|
|
|
|
|
* @return true if the array contains the given integer, otherwise false.
|
|
|
|
|
*/
|
|
|
|
|
private static boolean contains(int[] modes, int mode) {
|
|
|
|
|
if (modes == null) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
for (int i : modes) {
|
|
|
|
|
if (i == mode) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Return true if the two given {@link Size}s have the same aspect ratio.
|
|
|
|
|
*
|
|
|
|
|
* @param a first {@link Size} to compare.
|
|
|
|
|
* @param b second {@link Size} to compare.
|
|
|
|
|
* @return true if the sizes have the same aspect ratio, otherwise false.
|
|
|
|
|
*/
|
|
|
|
|
private static boolean checkAspectsEqual(Size a, Size b) {
|
|
|
|
|
double aAspect = a.getWidth() / (double) a.getHeight();
|
|
|
|
|
double bAspect = b.getWidth() / (double) b.getHeight();
|
|
|
|
|
return Math.abs(aAspect - bAspect) <= ASPECT_RATIO_TOLERANCE;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Rotation need to transform from the camera sensor orientation to the device's current
|
|
|
|
|
* orientation.
|
|
|
|
|
*
|
|
|
|
|
* @param c the {@link CameraCharacteristics} to query for the camera sensor
|
|
|
|
|
* orientation.
|
|
|
|
|
* @param deviceOrientation the current device orientation relative to the native device
|
|
|
|
|
* orientation.
|
|
|
|
|
* @return the total rotation from the sensor orientation to the current device orientation.
|
|
|
|
|
*/
|
|
|
|
|
private static int sensorToDeviceRotation(CameraCharacteristics c, int deviceOrientation) {
|
|
|
|
|
int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
|
|
|
|
|
|
|
|
|
// Get device orientation in degrees
|
|
|
|
|
deviceOrientation = ORIENTATIONS.get(deviceOrientation);
|
|
|
|
|
|
|
|
|
|
// Reverse device orientation for front-facing cameras
|
|
|
|
|
if (c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
|
|
|
|
|
deviceOrientation = -deviceOrientation;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Calculate desired JPEG orientation relative to camera orientation to make
|
|
|
|
|
// the image upright relative to the device orientation
|
|
|
|
|
return (sensorOrientation - deviceOrientation + 360) % 360;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Shows a {@link Toast} on the UI thread.
|
|
|
|
|
*
|
|
|
|
|
* @param text The message to show.
|
|
|
|
|
*/
|
|
|
|
|
private void showToast(String text) {
|
|
|
|
|
// We show a Toast by sending request message to mMessageHandler. This makes sure that the
|
|
|
|
|
// Toast is shown on the UI thread.
|
|
|
|
|
Message message = Message.obtain();
|
|
|
|
|
message.obj = text;
|
|
|
|
|
mMessageHandler.sendMessage(message);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* If the given request has been completed, remove it from the queue of active requests and
|
|
|
|
|
* send an {@link ImageSaver} with the results from this request to a background thread to
|
|
|
|
|
* save a file.
|
|
|
|
|
* <p/>
|
|
|
|
|
* Call this only with {@link #mCameraStateLock} held.
|
|
|
|
|
*
|
|
|
|
|
* @param requestId the ID of the {@link CaptureRequest} to handle.
|
|
|
|
|
* @param builder the {@link ImageSaver.ImageSaverBuilder} for this request.
|
|
|
|
|
* @param queue the queue to remove this request from, if completed.
|
|
|
|
|
*/
|
|
|
|
|
private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder, TreeMap<Integer, ImageSaver.ImageSaverBuilder> queue) {
|
|
|
|
|
if (builder == null) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
ImageSaver saver = builder.buildIfComplete();
|
|
|
|
|
System.out.println();
|
|
|
|
|
if (saver != null) {
|
|
|
|
|
queue.remove(requestId);
|
|
|
|
|
AsyncTask.THREAD_POOL_EXECUTOR.execute(saver);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Check if we are using a device that only supports the LEGACY hardware level.
|
|
|
|
|
* <p/>
|
|
|
|
|
* Call this only with {@link #mCameraStateLock} held.
|
|
|
|
|
*
|
|
|
|
|
* @return true if this is a legacy device.
|
|
|
|
|
*/
|
|
|
|
|
private boolean isLegacyLocked() {
|
|
|
|
|
return mCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Start the timer for the pre-capture sequence.
|
|
|
|
|
* <p/>
|
|
|
|
|
* Call this only with {@link #mCameraStateLock} held.
|
|
|
|
|
*/
|
|
|
|
|
private void startTimerLocked() {
|
|
|
|
|
mCaptureTimer = SystemClock.elapsedRealtime();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Check if the timer for the pre-capture sequence has been hit.
|
|
|
|
|
* <p/>
|
|
|
|
|
* Call this only with {@link #mCameraStateLock} held.
|
|
|
|
|
*
|
|
|
|
|
* @return true if the timeout occurred.
|
|
|
|
|
*/
|
|
|
|
|
private boolean hitTimeoutLocked() {
|
|
|
|
|
return (SystemClock.elapsedRealtime() - mCaptureTimer) > PRECAPTURE_TIMEOUT_MS;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* A dialog that explains about the necessary permissions.
|
|
|
|
|
*/
|
|
|
|
|
public static class PermissionConfirmationDialog extends DialogFragment {
|
|
|
|
|
|
|
|
|
|
public static PermissionConfirmationDialog newInstance() {
|
|
|
|
|
return new PermissionConfirmationDialog();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public Dialog onCreateDialog(Bundle savedInstanceState) {
|
|
|
|
|
final Fragment parent = getParentFragment();
|
|
|
|
|
return new AlertDialog.Builder(getActivity()).setMessage(R.string.request_permission).setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onClick(DialogInterface dialog, int which) {
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}).setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onClick(DialogInterface dialog, int which) {
|
|
|
|
|
getActivity().finish();
|
|
|
|
|
}
|
|
|
|
|
}).create();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
}
|