首页新增曝光倍数

mem
liuguijing 6 months ago
parent 07bc04003d
commit 7ee7f834ae

@ -1,5 +1,6 @@
package com.xypower.mppreview;
import static com.xypower.mppreview.HdrUtil.generateTimestamp;
import static java.lang.System.loadLibrary;
import android.Manifest;
@ -24,7 +25,6 @@ import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.DngCreator;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
@ -56,48 +56,31 @@ import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import com.xypower.mppreview.widget.ErrorDialog;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A simple {@link Fragment} subclass.
* Use the {@link Camera2RawFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class Camera2RawFragment extends Fragment {
private static int covertNum;
static {
loadLibrary("mppreview");
}
/**
* Conversion from screen rotation to JPEG orientation.
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
@ -130,44 +113,20 @@ public class Camera2RawFragment extends Fragment {
*/
private static final int MAX_PREVIEW_WIDTH = 1920;
/**
* Max preview height that is guaranteed by Camera2 API
*/
private static final int MAX_PREVIEW_HEIGHT = 1080;
/**
* Tag for the {@link Log}.
*/
private static final String TAG = "Camera2RawFragment";
/**
* Camera state: Device is closed.
*/
private static final int STATE_CLOSED = 0;
/**
* Camera state: Device is opened, but is not capturing.
*/
private static final int STATE_OPENED = 1;
/**
* Camera state: Showing camera preview.
*/
private static final int STATE_PREVIEW = 2;
/**
* Camera state: Waiting for 3A convergence before capturing a photo.
*/
private static final int STATE_WAITING_FOR_3A_CONVERGENCE = 3;
/**
* An {@link OrientationEventListener} used to determine when device rotation has occurred.
* This is mainly necessary for when the device is rotated by 180 degrees, in which case
* onCreate or onConfigurationChanged is not called as the view dimensions remain the same,
* but the orientation of the has changed, and thus the preview rotation must be updated.
*/
private OrientationEventListener mOrientationListener;
private static ArrayList<PngPhotoBean> list = new ArrayList<>();//用来存储已拍照的照片名称
public static native boolean makeHdr(long exposureTime1, String path1, long exposureTime2, String path2, String outputPath);
@ -176,6 +135,13 @@ public class Camera2RawFragment extends Fragment {
private int mExposureComp = MainActivity.ExposureComp;
private Long exposetime;
private Integer sensitivity;
public boolean isHandTakePic = true;
private double pic1 = 1;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events of a
* {@link TextureView}.
@ -294,14 +260,14 @@ public class Camera2RawFragment extends Fragment {
private int mPendingUserCaptures = 0;
/**
* Request ID to {@link ImageSaver.ImageSaverBuilder} mapping for in-progress JPEG captures.
* Request ID to {@link ImageSaverBuilder} mapping for in-progress JPEG captures.
*/
private final TreeMap<Integer, ImageSaver.ImageSaverBuilder> mJpegResultQueue = new TreeMap<>();
private final TreeMap<Integer, ImageSaverBuilder> mJpegResultQueue = new TreeMap<>();
/**
* Request ID to {@link ImageSaver.ImageSaverBuilder} mapping for in-progress RAW captures.
* Request ID to {@link ImageSaverBuilder} mapping for in-progress RAW captures.
*/
private final TreeMap<Integer, ImageSaver.ImageSaverBuilder> mRawResultQueue = new TreeMap<>();
private final TreeMap<Integer, ImageSaverBuilder> mRawResultQueue = new TreeMap<>();
/**
* {@link CaptureRequest.Builder} for the camera preview
@ -390,16 +356,13 @@ public class Camera2RawFragment extends Fragment {
* RAW image is ready to be saved.
*/
private final ImageReader.OnImageAvailableListener mOnRawImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
}
};
private Long exposetime;
private Integer sensitivity;
/**
* /**
* A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and
@ -425,14 +388,9 @@ public class Camera2RawFragment extends Fragment {
if (afState == null) {
break;
}
// If auto-focus has reached locked state, we are ready to capture
readyToCapture = (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
}
// If we are running on an non-legacy device, we should also wait until
// auto-exposure and auto-white-balance have converged as well before
// taking a picture.
if (!isLegacyLocked()) {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
@ -443,20 +401,16 @@ public class Camera2RawFragment extends Fragment {
readyToCapture = readyToCapture && aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED && awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED;
}
// If we haven't finished the pre-capture sequence but have hit our maximum
// wait timeout, too bad! Begin capture anyway.
if (!readyToCapture && hitTimeoutLocked()) {
Log.w(TAG, "Timed out waiting for pre-capture sequence to complete.");
readyToCapture = true;
}
if (readyToCapture && mPendingUserCaptures > 0) {
// Capture once for each user tap of the "Picture" button.
while (mPendingUserCaptures > 0) {
captureStillPictureLocked(exposetime, sensitivity);
mPendingUserCaptures--;
}
// After this, the camera will go back to the normal state of preview.
mState = STATE_PREVIEW;
}
}
@ -476,28 +430,22 @@ public class Camera2RawFragment extends Fragment {
};
/**
* A {@link CameraCaptureSession.CaptureCallback} that handles the still JPEG and RAW capture
* request.
*/
private final CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
synchronized (mCameraStateLock) {
String currentDateTime = generateTimestamp();
File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
// File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_a.dng");
File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
// Look up the ImageSaverBuilder for this request and update it with the file name
// based on the capture start time.
ImageSaver.ImageSaverBuilder jpegBuilder;
ImageSaver.ImageSaverBuilder rawBuilder;
File directory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
File rawFile = new File(directory, "RAW_" + currentDateTime + ".dng");
// File jpegFile = new File(directory, "JPEG_" + currentDateTime + ".png");
// ImageSaverBuilder jpegBuilder;
ImageSaverBuilder rawBuilder;
int requestId = (int) request.getTag();
jpegBuilder = mJpegResultQueue.get(requestId);
// jpegBuilder = mJpegResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId);
if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
// if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
if (rawBuilder != null) rawBuilder.setFile(rawFile);
}
}
@ -505,20 +453,19 @@ public class Camera2RawFragment extends Fragment {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
int requestId = (int) request.getTag();
ImageSaver.ImageSaverBuilder jpegBuilder;
ImageSaver.ImageSaverBuilder rawBuilder;
ImageSaverBuilder jpegBuilder;
ImageSaverBuilder rawBuilder;
StringBuilder sb = new StringBuilder();
// Look up the ImageSaverBuilder for this request and update it with the CaptureResult
synchronized (mCameraStateLock) {
jpegBuilder = mJpegResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId);
if (jpegBuilder != null) {
jpegBuilder.setResult(result);
sb.append("Saving JPEG as: ");
sb.append(jpegBuilder.getSaveLocation());
}
// if (jpegBuilder != null) {
// jpegBuilder.setResult(result);
// sb.append("Saving JPEG as: ");
// sb.append(jpegBuilder.getSaveLocation());
// }
if (rawBuilder != null) {
rawBuilder.setResult(result);
if (rawBuilder != null) sb.append(", ");
@ -526,7 +473,6 @@ public class Camera2RawFragment extends Fragment {
sb.append(rawBuilder.getSaveLocation());
}
// If we have all the results necessary, save the image to a file in the background.
handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue);
handleCompletionLocked(requestId, rawBuilder, mRawResultQueue);
@ -549,9 +495,6 @@ public class Camera2RawFragment extends Fragment {
};
/**
* A {@link Handler} for showing {@link Toast}s on the UI thread.
*/
private final Handler mMessageHandler = new Handler(Looper.getMainLooper()) {
@Override
public void handleMessage(Message msg) {
@ -561,11 +504,7 @@ public class Camera2RawFragment extends Fragment {
}
}
};
public boolean isHandTakePic = true;
private double pic1 = 1;
private double pic2 = 1;
// private long exposureTime = 0;
// private int sensitivity = 0;
public static Camera2RawFragment newInstance() {
return new Camera2RawFragment();
@ -581,10 +520,6 @@ public class Camera2RawFragment extends Fragment {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
// Setup a new OrientationEventListener. This is used to handle rotation events like a
// 180 degree rotation that do not normally trigger a call to onCreate to do view re-layout
// or otherwise cause the preview TextureView's size to change.
mOrientationListener = new OrientationEventListener(getActivity(), SensorManager.SENSOR_DELAY_NORMAL) {
@Override
public void onOrientationChanged(int orientation) {
@ -609,11 +544,6 @@ public class Camera2RawFragment extends Fragment {
super.onResume();
startBackgroundThread();
openCamera();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we should
// configure the preview bounds here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
} else {
@ -671,9 +601,6 @@ public class Camera2RawFragment extends Fragment {
MeteringRectangle mr = new MeteringRectangle(new Rect((int) rect.left, (int) rect.top, (int) rect.right, (int) rect.bottom), 1000);
startControlAFRequest(mr, mPreCaptureCallback);
// mCameraDevice.;
}
public void startControlAFRequest(MeteringRectangle rect, CameraCaptureSession.CaptureCallback captureCallback) {
@ -706,6 +633,7 @@ public class Camera2RawFragment extends Fragment {
*/
private boolean setUpCameraOutputs() {
Activity activity = getActivity();
assert activity != null;
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
if (manager == null) {
ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog");
@ -720,16 +648,12 @@ public class Camera2RawFragment extends Fragment {
if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
continue;
}
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// For still image captures, we use the largest available size.
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
Size[] rawSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea());
synchronized (mCameraStateLock) {
// Set up ImageReaders for JPEG and RAW outputs. Place these in a reference
// counted wrapper to ensure they are only closed when all background tasks
@ -743,7 +667,6 @@ public class Camera2RawFragment extends Fragment {
mRawImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
}
mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler);
mCharacteristics = characteristics;
mCameraId = cameraId;
}
@ -770,25 +693,16 @@ public class Camera2RawFragment extends Fragment {
requestCameraPermissions();
return;
}
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
try {
// Wait for any previously running session to finish.
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId;
Handler backgroundHandler;
synchronized (mCameraStateLock) {
cameraId = mCameraId;
backgroundHandler = mBackgroundHandler;
}
// Attempt to open the camera. mStateCallback will be called on the background handler's
// thread when this succeeds or fails.
manager.openCamera(cameraId, mStateCallback, backgroundHandler);
manager.openCamera(mCameraId, mStateCallback, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
@ -1014,16 +928,6 @@ public class Camera2RawFragment extends Fragment {
}
}
/**
* Configure the necessary {@link android.graphics.Matrix} transformation to `mTextureView`,
* and start/restart the preview capture session if necessary.
* <p/>
* This method should be called after the camera state has been initialized in
* setUpCameraOutputs.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
synchronized (mCameraStateLock) {
@ -1137,46 +1041,21 @@ public class Camera2RawFragment extends Fragment {
}
}
/**
* Initiate a still image capture.
* <p/>
* This function sends a capture request that initiates a pre-capture sequence in our state
* machine that waits for auto-focus to finish, ending in a "locked" state where the lens is no
* longer moving, waits for auto-exposure to choose a good exposure value, and waits for
* auto-white-balance to converge.
*/
public void takePicture() {
synchronized (mCameraStateLock) {
mPendingUserCaptures++;
// If we already triggered a pre-capture sequence, or are in a state where we cannot
// do this, return immediately.
if (mState != STATE_PREVIEW) {
return;
}
try {
// If this is not a legacy device, we can also trigger an auto-exposure metering
// run.
if (!isLegacyLocked()) {
// Tell the camera to lock focus.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
}
// Trigger an auto-focus run if camera is capable. If the camera is already focused,
// this should do nothing.
if (!mNoAFRun) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
}
// Update state machine to wait for auto-focus, auto-exposure, and
// auto-white-balance (aka. "3A") to converge.
mState = STATE_WAITING_FOR_3A_CONVERGENCE;
// Start a timer for the pre-capture sequence.
startTimerLocked();
// Replace the existing repeating request with one with updated 3A triggers.
mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
@ -1184,12 +1063,6 @@ public class Camera2RawFragment extends Fragment {
}
}
/**
* Send a capture request to the camera device that initiates a capture targeting the JPEG and
* RAW outputs.
* <p/>
* Call this only with {@link #mCameraStateLock} held.
*/
private void captureStillPictureLocked(long exposureTime, int sensitivity) {
try {
final Activity activity = getActivity();
@ -1255,22 +1128,14 @@ public class Camera2RawFragment extends Fragment {
}
}
}
CaptureRequest request = captureBuilder.build();
// Create an ImageSaverBuilder in which to collect results, and add it to the queue
// of active requests.
ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
rawBuilder.setExposetime((long) v);
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
// ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
rawBuilder.setExposetime((long) v);//保存曝光时间
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
requests.add(request);
}
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
//
// // Set request tag to easily track results in callbacks.
@ -1294,20 +1159,11 @@ public class Camera2RawFragment extends Fragment {
}
}
/**
* Called after a RAW/JPEG capture has completed; resets the AF trigger state for the
* pre-capture sequence.
* <p/>
* Call this only with {@link #mCameraStateLock} held.
*/
private void finishedCaptureLocked() {
try {
// Reset the auto-focus trigger in case AF didn't run quickly enough.
if (!mNoAFRun) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
}
} catch (CameraAccessException e) {
@ -1315,46 +1171,27 @@ public class Camera2RawFragment extends Fragment {
}
}
/**
* Retrieve the next {@link Image} from a reference counted {@link ImageReader}, retaining
* that {@link ImageReader} until that {@link Image} is no longer in use, and set this
* {@link Image} as the result for the next request in the queue of pending requests. If
* all necessary information is available, begin saving the image to a file in a background
* thread.
*
* @param pendingQueue the currently active requests.
* @param reader a reference counted wrapper containing an {@link ImageReader} from which
* to acquire an image.
*/
private void dequeueAndSaveImage(TreeMap<Integer, ImageSaver.ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
private void dequeueAndSaveImage(TreeMap<Integer, ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
synchronized (mCameraStateLock) {
Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = null;
Map.Entry<Integer, ImageSaverBuilder> entry = null;
if (pendingQueue != null) {
for (Map.Entry<Integer, ImageSaver.ImageSaverBuilder> item : pendingQueue.entrySet()) {
ImageSaver.ImageSaverBuilder value = item.getValue();
for (Map.Entry<Integer, ImageSaverBuilder> item : pendingQueue.entrySet()) {
ImageSaverBuilder value = item.getValue();
if (value.mImage == null) {
entry = item;
break;
}
}
}
// Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = pendingQueue.firstEntry();
// ImageSaver.ImageSaverBuilder builder = entry.getValue();
if (entry == null) {
return;
}
ImageSaver.ImageSaverBuilder builder = entry.getValue();
// Increment reference count to prevent ImageReader from being closed while we
// are saving its Images in a background thread (otherwise their resources may
// be freed while we are writing to a file).
ImageSaverBuilder builder = entry.getValue();
if (reader == null || reader.getAndRetain() == null) {
Log.e(TAG, "Paused the activity before we could save the image," + " ImageReader already closed.");
pendingQueue.remove(entry.getKey());
return;
}
Image image;
try {
image = reader.get().acquireNextImage();
@ -1368,198 +1205,6 @@ public class Camera2RawFragment extends Fragment {
}
}
private static class ImageSaver implements Runnable {
/**
* The image to save.
*/
private final Image mImage;
/**
* The file we save the image into.
*/
private final File mFile;
/**
* The CaptureResult for this image capture.
*/
private final CaptureResult mCaptureResult;
/**
* The CameraCharacteristics for this camera device.
*/
private final CameraCharacteristics mCharacteristics;
/**
* The Context to use when updating MediaStore with the saved images.
*/
private final Context mContext;
/**
* A reference counted wrapper for the ImageReader that owns the given image.
*/
private final RefCountedAutoCloseable<ImageReader> mReader;
private final long mExpostime;
private ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics, Context context, RefCountedAutoCloseable<ImageReader> reader, long mexpostime) {
mImage = image;
mFile = file;
mCaptureResult = result;
mCharacteristics = characteristics;
mContext = context;
mReader = reader;
mExpostime = mexpostime;
}
@Override
public void run() {
boolean success = false;
int format = mImage.getFormat();
switch (format) {
case ImageFormat.JPEG: {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
output.write(bytes);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(output);
}
break;
}
case ImageFormat.RAW_SENSOR: {
DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
dngCreator.writeImage(output, mImage);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(output);
}
break;
}
default: {
Log.e(TAG, "Cannot save image, unexpected image format:" + format);
break;
}
}
mReader.close();
if (success) {
File directory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
String directoryPath = directory.getPath();
File file = new File(directory, "create_" + mExpostime + "_" + generateTimestamp() + ".png");
String path = file.getPath();
try {
RawToJpgConverter.convertRawToJpg(mFile.getPath(), path);
} catch (IOException e) {
throw new RuntimeException(e);
}
PngPhotoBean bean = new PngPhotoBean();
bean.setEtime(mExpostime);
bean.setPath(path);
list.add(bean);
if (list.size() == 2) {
PngPhotoBean bean1 = list.get(0);
PngPhotoBean bean2 = list.get(1);
makeHdr(bean1.getEtime(), bean1.getPath(), bean2.getEtime(), bean2.getPath(), directoryPath + "/" + "hdr_" + generateTimestamp() + ".png");
}
}
}
/**
* Builder class for constructing {@link ImageSaver}s.
* <p/>
* This class is thread safe.
*/
public static class ImageSaverBuilder {
private Image mImage;
private File mFile;
private CaptureResult mCaptureResult;
private CameraCharacteristics mCharacteristics;
private Context mContext;
private long mexpostime;
private RefCountedAutoCloseable<ImageReader> mReader;
/**
* Construct a new ImageSaverBuilder using the given {@link Context}.
*
* @param context a {@link Context} to for accessing the
* {@link android.provider.MediaStore}.
*/
public ImageSaverBuilder(final Context context) {
mContext = context;
}
public synchronized ImageSaverBuilder setExposetime(long time) {
mexpostime = time;
return this;
}
public synchronized ImageSaverBuilder setRefCountedReader(RefCountedAutoCloseable<ImageReader> reader) {
if (reader == null) throw new NullPointerException();
mReader = reader;
return this;
}
public synchronized ImageSaverBuilder setImage(final Image image) {
if (image == null) throw new NullPointerException();
mImage = image;
return this;
}
public synchronized ImageSaverBuilder setFile(final File file) {
if (file == null) throw new NullPointerException();
mFile = file;
return this;
}
public synchronized ImageSaverBuilder setResult(final CaptureResult result) {
if (result == null) throw new NullPointerException();
mCaptureResult = result;
return this;
}
public synchronized ImageSaverBuilder setCharacteristics(final CameraCharacteristics characteristics) {
if (characteristics == null) throw new NullPointerException();
mCharacteristics = characteristics;
return this;
}
public synchronized ImageSaver buildIfComplete() {
if (!isComplete()) {
return null;
}
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, mReader, mexpostime);
}
public synchronized String getSaveLocation() {
return (mFile == null) ? "Unknown" : mFile.toString();
}
private boolean isComplete() {
return mImage != null && mFile != null && mCaptureResult != null && mCharacteristics != null;
}
}
}
// Utility classes and methods:
// *********************************************************************************************
/**
* Comparator based on area of the given {@link Size} objects.
*/
static class CompareSizesByArea implements Comparator<Size> {
@Override
@ -1570,36 +1215,6 @@ public class Camera2RawFragment extends Fragment {
}
/**
* A dialog fragment for displaying non-recoverable errors; this {@ling Activity} will be
* finished once the dialog has been acknowledged by the user.
*/
public static class ErrorDialog extends DialogFragment {
private String mErrorMessage;
public ErrorDialog() {
mErrorMessage = "Unknown error occurred!";
}
// Build a dialog with a custom message (Fragments require default constructor).
public static ErrorDialog buildErrorDialog(String errorMessage) {
ErrorDialog dialog = new ErrorDialog();
dialog.mErrorMessage = errorMessage;
return dialog;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity).setMessage(mErrorMessage).setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
}).create();
}
}
/**
* A wrapper for an {@link AutoCloseable} object that implements reference counting to allow
@ -1712,25 +1327,14 @@ public class Camera2RawFragment extends Fragment {
*
* @return a {@link String} representing a time.
*/
private static String generateTimestamp() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US);
return sdf.format(new Date());
}
/**
* Cleanup the given {@link OutputStream}.
*
* @param outputStream the stream to close.
*/
private static void closeOutput(OutputStream outputStream) {
if (null != outputStream) {
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Return true if the given array contains the given integer.
@ -1811,10 +1415,9 @@ public class Camera2RawFragment extends Fragment {
* Call this only with {@link #mCameraStateLock} held.
*
* @param requestId the ID of the {@link CaptureRequest} to handle.
* @param builder the {@link ImageSaver.ImageSaverBuilder} for this request.
* @param queue the queue to remove this request from, if completed.
*/
private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder, TreeMap<Integer, ImageSaver.ImageSaverBuilder> queue) {
private void handleCompletionLocked(int requestId, ImageSaverBuilder builder, TreeMap<Integer, ImageSaverBuilder> queue) {
if (builder == null) {
return;
}
@ -1857,32 +1460,5 @@ public class Camera2RawFragment extends Fragment {
return (SystemClock.elapsedRealtime() - mCaptureTimer) > PRECAPTURE_TIMEOUT_MS;
}
/**
* A dialog that explains about the necessary permissions.
*/
public static class PermissionConfirmationDialog extends DialogFragment {
public static PermissionConfirmationDialog newInstance() {
return new PermissionConfirmationDialog();
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity()).setMessage(R.string.request_permission).setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
}).setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
getActivity().finish();
}
}).create();
}
}
}

@ -1,98 +0,0 @@
//package com.xypower.camera2raw;
//
//import android.hardware.camera2.params.TonemapCurve;
//
//import org.opencv.android.OpenCVLoader;
//import org.opencv.core.Core;
//import org.opencv.core.Mat;
//import org.opencv.imgcodecs.Imgcodecs;
//import org.opencv.photo.MergeDebevec;
//import org.opencv.photo.Photo;
//import org.opencv.photo.Tonemap;
//import org.opencv.photo.MergeExposures;
//
//public class HdrMergeExample {
//
// static {
// if (!OpenCVLoader.initDebug()) {
// // Handle initialization error
// }
// }
//
//// public Mat createHdrImage(Mat[] mats) {
//// Mat hdrImage = new Mat();
//// Exposure.createHDRmergeMertens(mats, hdrImage);
//// return hdrImage;
//// }
////
//// public void saveHdrImage(Mat hdrImage, String filePath) {
//// Imgcodecs.imwrite(filePath, hdrImage);
//// }
////
//// // 使用示例
//// public void processHdr() {
//// // 假设你有一个Mat数组包含了多张需要合成HDR的图片
//// Mat[] images = ...;
////
//// Mat hdrImage = createHdrImage(images);
////
//// // 保存HDR图片到设备存储
//// saveHdrImage(hdrImage, "/path/to/save/hdrImage.jpg");
////
//// // 释放资源
//// hdrImage.release();
//// for (Mat mat : images) {
//// mat.release();
//// }
//// }
//
// public Mat sdrToHdr(String inputPaths) {
// Mat image = Imgcodecs.imread(inputPaths);
// Mat hdr = new Mat();
// MergeDebevec mergeDebevec = Photo.createMergeDebevec();
// mergeDebevec.process(image, hdr, image.ge);
//
// }
//
// public Mat mergeHdrImages(Mat[] hdrImages) {
// Mat mergedImage = new Mat();
// Core.merge(hdrImages, mergedImage);
// return mergedImage;
// }
//
// public Mat toneMap(Mat hdrImage) {
// Mat ldrImage = new Mat();
// TonemapCurve.CHANNEL_BLUE(2); // Use the Gamma Tone Mapping
// Tonemap.process(hdrImage, ldrImage);
// return ldrImage;
// }
//
// public void saveMergedImage(Mat mergedImage, String filePath) {
// boolean result = Imgcodecs.imwrite(filePath, mergedImage);
// if (result) {
// // Image saved successfully
// } else {
// // Handle save error
// }
// }
//
// // Example usage
// public void mergeHdrAndSave(String[] inputPaths, String outputPath) {
// Mat[] hdrImages = new Mat[inputPaths.length];
// for (int i = 0; i < inputPaths.length; i++) {
// hdrImages[i] = Imgcodecs.imread(inputPaths[i]);
// }
//
// Mat mergedImage = mergeHdrImages(hdrImages);
// Mat ldrImage = toneMap(mergedImage);
//
// saveMergedImage(ldrImage, outputPath);
//
// // Release the images to avoid memory leaks
// for (Mat image : hdrImages) {
// image.release();
// }
// mergedImage.release();
// ldrImage.release();
// }
//}

@ -2,7 +2,22 @@ package com.xypower.mppreview;
import static java.lang.System.loadLibrary;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
public class HdrUtil {
// static {
// loadLibrary("mppreview");
// }
public static String generateTimestamp() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US);
return sdf.format(new Date());
}
// public static native boolean makeHdr(long exposureTime1, String path1, long exposureTime2, String path2, String outputPath);
//
// public static native boolean makeHdr2(long exposureTime1, String path1, long exposureTime2, String path2, long exposureTime3, String path3, String outputPath);
}

@ -0,0 +1,133 @@
package com.xypower.mppreview;
import static com.xypower.mppreview.Camera2RawFragment.makeHdr;
import static com.xypower.mppreview.HdrUtil.generateTimestamp;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.DngCreator;
import android.media.Image;
import android.media.ImageReader;
import android.os.Environment;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
public class ImageSaver implements Runnable {
private final Image mImage;
private final File mFile;
private final CaptureResult mCaptureResult;
private final CameraCharacteristics mCharacteristics;
private final Context mContext;
private final Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
private final long mExpostime;
private static ArrayList<PngPhotoBean> list = new ArrayList<>();//用来存储已拍照的照片名称
public ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics, Context context, Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader, long mexpostime) {
mImage = image;
mFile = file;
mCaptureResult = result;
mCharacteristics = characteristics;
mContext = context;
mReader = reader;
mExpostime = mexpostime;
}
@Override
public void run() {
boolean success = false;
int format = mImage.getFormat();
switch (format) {
case ImageFormat.JPEG: {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
output.write(bytes);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(output);
}
break;
}
case ImageFormat.RAW_SENSOR: {
DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
dngCreator.writeImage(output, mImage);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(output);
}
break;
}
default: {
break;
}
}
mReader.close();
if (success) {
File directory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
String directoryPath = directory.getPath();
File file = new File(directory, "create_" + mExpostime + "_" + generateTimestamp() + ".png");
String path = file.getPath();
try {
RawToJpgConverter.convertRawToJpg(mFile.getPath(), path);
} catch (IOException e) {
throw new RuntimeException(e);
}
PngPhotoBean bean = new PngPhotoBean();
bean.setEtime(mExpostime);
bean.setPath(path);
list.add(bean);
if (list.size() == 2) {
PngPhotoBean bean1 = list.get(0);
PngPhotoBean bean2 = list.get(1);
makeHdr(bean1.getEtime(), bean1.getPath(), bean2.getEtime(), bean2.getPath(), directoryPath + "/" + "hdr_" + generateTimestamp() + ".png");
}
}
}
private static void closeOutput(OutputStream outputStream) {
if (null != outputStream) {
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
// private void saveJpeg(Image image,String name) {
// Image.Plane[] planes = image.getPlanes();
// ByteBuffer buffer = planes[0].getBuffer();
// int pixelStride = planes[0].getPixelStride();
// int rowStride = planes[0].getRowStride();
// int rowPadding = rowStride - pixelStride * mWidth;
//
// Bitmap bitmap = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
// bitmap.copyPixelsFromBuffer(buffer);
// //bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
// ImageSaveUtil.saveBitmap2file(bitmap,getApplicationContext(),name);
//
// }
}

@ -0,0 +1,82 @@
package com.xypower.mppreview;
import android.content.Context;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureResult;
import android.media.Image;
import android.media.ImageReader;
import java.io.File;
public class ImageSaverBuilder {
public Image mImage;
public File mFile;
public CaptureResult mCaptureResult;
public CameraCharacteristics mCharacteristics;
public Context mContext;
public long mexpostime;
public Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
/**
* Construct a new ImageSaverBuilder using the given {@link Context}.
*
* @param context a {@link Context} to for accessing the
* {@link android.provider.MediaStore}.
*/
public ImageSaverBuilder(final Context context) {
mContext = context;
}
public synchronized ImageSaverBuilder setExposetime(long time) {
mexpostime = time;
return this;
}
public synchronized ImageSaverBuilder setRefCountedReader(Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader) {
if (reader == null) throw new NullPointerException();
mReader = reader;
return this;
}
public synchronized ImageSaverBuilder setImage(final Image image) {
if (image == null) throw new NullPointerException();
mImage = image;
return this;
}
public synchronized ImageSaverBuilder setFile(final File file) {
if (file == null) throw new NullPointerException();
mFile = file;
return this;
}
public synchronized ImageSaverBuilder setResult(final CaptureResult result) {
if (result == null) throw new NullPointerException();
mCaptureResult = result;
return this;
}
public synchronized ImageSaverBuilder setCharacteristics(final CameraCharacteristics characteristics) {
if (characteristics == null) throw new NullPointerException();
mCharacteristics = characteristics;
return this;
}
public synchronized ImageSaver buildIfComplete() {
if (!isComplete()) {
return null;
}
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, mReader, mexpostime);
}
public synchronized String getSaveLocation() {
return (mFile == null) ? "Unknown" : mFile.toString();
}
private boolean isComplete() {
return mImage != null && mFile != null && mCaptureResult != null && mCharacteristics != null;
}
}

@ -0,0 +1,36 @@
package com.xypower.mppreview.widget;
import android.app.Activity;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
public class ErrorDialog extends DialogFragment {
private String mErrorMessage;
public ErrorDialog() {
mErrorMessage = "Unknown error occurred!";
}
// Build a dialog with a custom message (Fragments require default constructor).
public static ErrorDialog buildErrorDialog(String errorMessage) {
ErrorDialog dialog = new ErrorDialog();
dialog.mErrorMessage = errorMessage;
return dialog;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity).setMessage(mErrorMessage).setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
}).create();
}
}

@ -0,0 +1,35 @@
package com.xypower.mppreview.widget;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.Fragment;
import com.xypower.mppreview.R;
public class PermissionConfirmationDialog extends DialogFragment {
public static PermissionConfirmationDialog newInstance() {
return new PermissionConfirmationDialog();
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity()).setMessage(R.string.request_permission).setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
}).setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
getActivity().finish();
}
}).create();
}
}

@ -9,15 +9,6 @@
android:paddingRight="15dp"
tools:context=".MainActivity">
<!-- <Switch-->
<!-- android:id="@+id/hdr"-->
<!-- android:layout_width="wrap_content"-->
<!-- android:layout_height="wrap_content"-->
<!-- android:layout_marginLeft="15dp"-->
<!-- android:layout_marginTop="20dp"-->
<!-- app:layout_constraintLeft_toLeftOf="parent"-->
<!-- app:layout_constraintTop_toTopOf="parent" />-->
<TextView
android:id="@+id/hdrhint"
android:layout_width="wrap_content"
@ -26,6 +17,15 @@
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<Spinner
android:id="@+id/spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:layout_constraintBottom_toBottomOf="@+id/hdrhint"
app:layout_constraintLeft_toRightOf="@+id/hdrhint"
app:layout_constraintTop_toTopOf="@+id/hdrhint" />
<EditText
android:id="@+id/hdr_num"
android:layout_width="50dp"

Loading…
Cancel
Save