修复raw图缺少一张的bug

Hdr_ds
liuguijing 6 months ago
parent 0508c9c7f5
commit 6e8a8b0209

@ -59,6 +59,8 @@ import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import org.opencv.android.OpenCVLoader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
@ -91,6 +93,7 @@ public class Camera2RawFragment extends Fragment {
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
@ -386,6 +389,8 @@ public class Camera2RawFragment extends Fragment {
};
private Long exposetime;
private Integer sensitivity;
/**
* A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and
* pre-capture sequence.
@ -403,8 +408,10 @@ public class Camera2RawFragment extends Fragment {
boolean readyToCapture = true;
if (!mNoAFRun) {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
Long exposetime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);//获取自动曝光时间
Integer sensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);//获取自动ISO
//获取自动曝光时间
exposetime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
//获取自动ISO
sensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);
if (afState == null) {
break;
}
@ -436,7 +443,7 @@ public class Camera2RawFragment extends Fragment {
if (readyToCapture && mPendingUserCaptures > 0) {
// Capture once for each user tap of the "Picture" button.
while (mPendingUserCaptures > 0) {
captureStillPictureLocked(exposureTime, sensitivity);
captureStillPictureLocked(exposetime, sensitivity);
mPendingUserCaptures--;
}
// After this, the camera will go back to the normal state of preview.
@ -466,6 +473,7 @@ public class Camera2RawFragment extends Fragment {
private final CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
synchronized (mCameraStateLock) {
String currentDateTime = generateTimestamp();
File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
@ -475,14 +483,13 @@ public class Camera2RawFragment extends Fragment {
ImageSaver.ImageSaverBuilder jpegBuilder;
ImageSaver.ImageSaverBuilder rawBuilder;
int requestId = (int) request.getTag();
synchronized (mCameraStateLock) {
jpegBuilder = mJpegResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId);
}
if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
if (rawBuilder != null) rawBuilder.setFile(rawFile);
}
}
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
@ -503,7 +510,7 @@ public class Camera2RawFragment extends Fragment {
}
if (rawBuilder != null) {
rawBuilder.setResult(result);
if (jpegBuilder != null) sb.append(", ");
if (rawBuilder != null) sb.append(", ");
sb.append("Saving RAW as: ");
sb.append(rawBuilder.getSaveLocation());
}
@ -512,7 +519,7 @@ public class Camera2RawFragment extends Fragment {
handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue);
handleCompletionLocked(requestId, rawBuilder, mRawResultQueue);
finishedCaptureLocked();
// finishedCaptureLocked();
}
showToast(sb.toString());
@ -544,8 +551,12 @@ public class Camera2RawFragment extends Fragment {
}
};
public boolean isHandTakePic = true;
private long exposureTime = 0;
private int sensitivity = 0;
private double pic1 = 1;
private double pic2 = 1;
private int i = 0;
private static int j = 0;
// private long exposureTime = 0;
// private int sensitivity = 0;
public static Camera2RawFragment newInstance() {
return new Camera2RawFragment();
@ -577,12 +588,12 @@ public class Camera2RawFragment extends Fragment {
Button takepic = view.findViewById(R.id.takepic);
EditText baoguang = view.findViewById(R.id.baoguang);
EditText iso = view.findViewById(R.id.iso);
if (exposureTime > 0) {
baoguang.setText(exposureTime + "");
}
if (sensitivity > 0) {
iso.setText(sensitivity + "");
}
// if (exposureTime > 0) {
// baoguang.setText(exposureTime + "");
// }
// if (sensitivity > 0) {
// iso.setText(sensitivity + "");
// }
takepic.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
@ -590,10 +601,11 @@ public class Camera2RawFragment extends Fragment {
String s1 = baoguang.getText().toString();
String s2 = iso.getText().toString();
if (s1 != null && !s1.equals("")) {
exposureTime = Long.parseLong(s1);
pic1 = Double.parseDouble(s1);
}
if (s2 != null && !s2.equals("")) {
sensitivity = Integer.parseInt(s2);
// sensitivity = Integer.parseInt(s2);
pic2 = Double.parseDouble(s2);
}
takePicture();
}
@ -749,6 +761,8 @@ public class Camera2RawFragment extends Fragment {
Size[] rawSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea());
// Size largestRaw = new Size(5760, 4312);
synchronized (mCameraStateLock) {
// Set up ImageReaders for JPEG and RAW outputs. Place these in a reference
@ -1032,20 +1046,6 @@ public class Camera2RawFragment extends Fragment {
// Allow AWB to run auto-magically if this device supports this
builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
}
// // 设置曝光时间例如设置为1000微秒
//// long exposureTime = 1000 000000L; // 1000微秒
// if (isHandTakePic) {
// builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
// if (exposureTime > 0) {
// long value = exposureTime * 1000L;
// builder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, value);
// }
// if (sensitivity > 0) {
// builder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
// }
// }
}
/**
@ -1074,11 +1074,6 @@ public class Camera2RawFragment extends Fragment {
ex.printStackTrace();
}
}
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
// StreamConfigurationMap streamConfigurationMap = mCharacteristics.get(
// CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION);
// System.out.println(streamConfigurationMap);
// }
// For still image captures, we always use the largest available size.
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
@ -1133,22 +1128,6 @@ public class Camera2RawFragment extends Fragment {
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
// Initially, output stream images from the Camera2 API will be rotated to the native
// device orientation from the sensor's orientation, and the TextureView will default to
// scaling these buffers to fill it's view bounds. If the aspect ratios and relative
// orientations are correct, this is fine.
//
// However, if the device orientation has been rotated relative to its native
// orientation so that the TextureView's dimensions are swapped relative to the
// native device orientation, we must do the following to ensure the output stream
// images are not incorrectly scaled by the TextureView:
// - Undo the scale-to-fill from the output buffer's dimensions (i.e. its dimensions
// in the native device orientation) to the TextureView's dimension.
// - Apply a scale-to-fill from the output buffer's rotated dimensions
// (i.e. its dimensions in the current device orientation) to the TextureView's
// dimensions.
// - Apply the rotation from the native device orientation to the current device
// rotation.
if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
@ -1258,20 +1237,37 @@ public class Camera2RawFragment extends Fragment {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
if (idx == 1) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// 设置曝光时间例如设置为1000微秒
// long exposureTime = 1000 000000L; // 1000微秒
if (isHandTakePic) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, exposureTime * 2);
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
}
}
}
// if (idx == 0) {
//// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// // 设置曝光时间例如设置为1000微秒
//// long exposureTime = 1000 000000L; // 1000微秒
// if (isHandTakePic) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
// if (exposureTime > 0) {
// double v = exposureTime * pic1;
// captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long)v);
// }
// if (sensitivity > 0) {
// captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
// }
// }
// }
//
// if (idx == 1) {
//// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// // 设置曝光时间例如设置为1000微秒
//// long exposureTime = 1000 000000L; // 1000微秒
// if (isHandTakePic) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
// if (exposureTime > 0) {
// double v = exposureTime * pic2;
// captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long)v);
// }
// if (sensitivity > 0) {
// captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
// }
// }
// }
CaptureRequest request = captureBuilder.build();
@ -1288,7 +1284,7 @@ public class Camera2RawFragment extends Fragment {
requests.add(request);
}
// mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
//
// // Set request tag to easily track results in callbacks.
// captureBuilder.setTag(mRequestCounter.getAndIncrement());
@ -1345,7 +1341,21 @@ public class Camera2RawFragment extends Fragment {
*/
private void dequeueAndSaveImage( TreeMap<Integer, ImageSaver.ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
synchronized (mCameraStateLock) {
Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = pendingQueue.firstEntry();
Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = null;
if (pendingQueue != null) {
for (Map.Entry<Integer, ImageSaver.ImageSaverBuilder> item : pendingQueue.entrySet()) {
ImageSaver.ImageSaverBuilder value = item.getValue();
if (value.mImage == null) {
entry = item;
}
}
}
// Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = pendingQueue.firstEntry();
// ImageSaver.ImageSaverBuilder builder = entry.getValue();
if (entry == null) {
return;
}
ImageSaver.ImageSaverBuilder builder = entry.getValue();
// Increment reference count to prevent ImageReader from being closed while we
@ -1365,9 +1375,15 @@ public class Camera2RawFragment extends Fragment {
pendingQueue.remove(entry.getKey());
return;
}
// String currentDateTime = generateTimestamp();
// File file;
// if (type == 0) {
// file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
// } else {
// file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
// }
// builder.setFile(file);
builder.setRefCountedReader(reader).setImage(image);
handleCompletionLocked(entry.getKey(), builder, pendingQueue);
}
}
@ -1421,6 +1437,7 @@ public class Camera2RawFragment extends Fragment {
@Override
public void run() {
System.out.println("保存线程执行了" + (j++) + "次");
boolean success = false;
int format = mImage.getFormat();
switch (format) {
@ -1539,6 +1556,7 @@ public class Camera2RawFragment extends Fragment {
public synchronized ImageSaver buildIfComplete() {
if (!isComplete()) {
Log.e("看看有没有问题", "问题");
return null;
}
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, mReader);
@ -1815,8 +1833,11 @@ public class Camera2RawFragment extends Fragment {
* @param queue the queue to remove this request from, if completed.
*/
private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder, TreeMap<Integer, ImageSaver.ImageSaverBuilder> queue) {
if (builder == null) return;
if (builder == null) {
return;
}
ImageSaver saver = builder.buildIfComplete();
System.out.println();
if (saver != null) {
queue.remove(requestId);
AsyncTask.THREAD_POOL_EXECUTOR.execute(saver);
@ -1881,4 +1902,5 @@ public class Camera2RawFragment extends Fragment {
}
}

@ -1,10 +1,15 @@
//package com.xypower.camera2raw;
//
//import android.hardware.camera2.params.TonemapCurve;
//
//import org.opencv.android.OpenCVLoader;
//import org.opencv.core.Core;
//import org.opencv.core.Mat;
//import org.opencv.imgcodecs.Imgcodecs;
//import org.opencv.photo.MergeDebevec;
//import org.opencv.photo.Photo;
//import org.opencv.photo.Tonemap;
//import org.opencv.photo.MergeExposures;
//
//public class HdrMergeExample {
//
@ -14,6 +19,41 @@
// }
// }
//
//// public Mat createHdrImage(Mat[] mats) {
//// Mat hdrImage = new Mat();
//// Exposure.createHDRmergeMertens(mats, hdrImage);
//// return hdrImage;
//// }
////
//// public void saveHdrImage(Mat hdrImage, String filePath) {
//// Imgcodecs.imwrite(filePath, hdrImage);
//// }
////
//// // 使用示例
//// public void processHdr() {
//// // 假设你有一个Mat数组包含了多张需要合成HDR的图片
//// Mat[] images = ...;
////
//// Mat hdrImage = createHdrImage(images);
////
//// // 保存HDR图片到设备存储
//// saveHdrImage(hdrImage, "/path/to/save/hdrImage.jpg");
////
//// // 释放资源
//// hdrImage.release();
//// for (Mat mat : images) {
//// mat.release();
//// }
//// }
//
// public Mat sdrToHdr(String inputPaths) {
// Mat image = Imgcodecs.imread(inputPaths);
// Mat hdr = new Mat();
// MergeDebevec mergeDebevec = Photo.createMergeDebevec();
// mergeDebevec.process(image, hdr, image.ge);
//
// }
//
// public Mat mergeHdrImages(Mat[] hdrImages) {
// Mat mergedImage = new Mat();
// Core.merge(hdrImages, mergedImage);
@ -22,7 +62,7 @@
//
// public Mat toneMap(Mat hdrImage) {
// Mat ldrImage = new Mat();
// Tonemap.setTonemap(2); // Use the Gamma Tone Mapping
// TonemapCurve.CHANNEL_BLUE(2); // Use the Gamma Tone Mapping
// Tonemap.process(hdrImage, ldrImage);
// return ldrImage;
// }

@ -8,7 +8,7 @@ import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import org.opencv.android.OpenCVLoader;
//import org.opencv.android.OpenCVLoader;
public class MainActivity extends AppCompatActivity {
@ -23,11 +23,11 @@ public class MainActivity extends AppCompatActivity {
@Override
protected void onResume() {
super.onResume();
if (OpenCVLoader.initDebug()) {
Log.d("dfsdfd", "成功");
} else {
Log.d("dfsdfd", "失败");
}
// if (OpenCVLoader.initDebug()) {
// Log.d("dfsdfd", "成功");
// } else {
// Log.d("dfsdfd", "失败");
// }
}
@Override

@ -19,7 +19,6 @@
android:layout_height="30dp"
android:layout_marginLeft="20dp"
android:hint="曝光 单位 微秒"
android:inputType="number"
android:textColor="@color/black"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toTopOf="parent" />
@ -31,7 +30,6 @@
android:layout_height="30dp"
android:layout_marginLeft="20dp"
android:hint="ISO"
android:inputType="number"
android:textColor="@color/black"
app:layout_constraintLeft_toRightOf="@id/baoguang"
app:layout_constraintTop_toTopOf="parent" />

Loading…
Cancel
Save