From 6e8a8b020904760eaa0668ce014cc15575e2acb4 Mon Sep 17 00:00:00 2001 From: liuguijing <123456> Date: Wed, 18 Dec 2024 16:52:31 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8Draw=E5=9B=BE=E7=BC=BA?= =?UTF-8?q?=E5=B0=91=E4=B8=80=E5=BC=A0=E7=9A=84bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../camera2raw/Camera2RawFragment.java | 192 ++++++++++-------- .../xypower/camera2raw/HdrMergeExample.java | 42 +++- .../com/xypower/camera2raw/MainActivity.java | 12 +- .../main/res/layout/fragment_camera2_raw.xml | 2 - 4 files changed, 154 insertions(+), 94 deletions(-) diff --git a/app/src/main/java/com/xypower/camera2raw/Camera2RawFragment.java b/app/src/main/java/com/xypower/camera2raw/Camera2RawFragment.java index ba620db..68e7025 100644 --- a/app/src/main/java/com/xypower/camera2raw/Camera2RawFragment.java +++ b/app/src/main/java/com/xypower/camera2raw/Camera2RawFragment.java @@ -59,6 +59,8 @@ import android.widget.Button; import android.widget.EditText; import android.widget.Toast; +import org.opencv.android.OpenCVLoader; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -91,6 +93,7 @@ public class Camera2RawFragment extends Fragment { */ private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); + static { ORIENTATIONS.append(Surface.ROTATION_0, 0); ORIENTATIONS.append(Surface.ROTATION_90, 90); @@ -368,7 +371,7 @@ public class Camera2RawFragment extends Fragment { @Override public void onImageAvailable(ImageReader reader) { - dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader); + dequeueAndSaveImage( mJpegResultQueue, mJpegImageReader); } }; @@ -381,11 +384,13 @@ public class Camera2RawFragment extends Fragment { @Override public void onImageAvailable(ImageReader reader) { - dequeueAndSaveImage(mRawResultQueue, mRawImageReader); + dequeueAndSaveImage( mRawResultQueue, mRawImageReader); } }; + private Long exposetime; + private Integer sensitivity; /** * A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and * pre-capture sequence. @@ -403,8 +408,10 @@ public class Camera2RawFragment extends Fragment { boolean readyToCapture = true; if (!mNoAFRun) { Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); - Long exposetime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);//获取自动曝光时间 - Integer sensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);//获取自动ISO + //获取自动曝光时间 + exposetime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); + //获取自动ISO + sensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY); if (afState == null) { break; } @@ -436,7 +443,7 @@ public class Camera2RawFragment extends Fragment { if (readyToCapture && mPendingUserCaptures > 0) { // Capture once for each user tap of the "Picture" button. while (mPendingUserCaptures > 0) { - captureStillPictureLocked(exposureTime, sensitivity); + captureStillPictureLocked(exposetime, sensitivity); mPendingUserCaptures--; } // After this, the camera will go back to the normal state of preview. @@ -466,22 +473,22 @@ public class Camera2RawFragment extends Fragment { private final CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) { - String currentDateTime = generateTimestamp(); - File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng"); - File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg"); - - // Look up the ImageSaverBuilder for this request and update it with the file name - // based on the capture start time. - ImageSaver.ImageSaverBuilder jpegBuilder; - ImageSaver.ImageSaverBuilder rawBuilder; - int requestId = (int) request.getTag(); synchronized (mCameraStateLock) { + String currentDateTime = generateTimestamp(); + File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng"); + File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg"); + + // Look up the ImageSaverBuilder for this request and update it with the file name + // based on the capture start time. + ImageSaver.ImageSaverBuilder jpegBuilder; + ImageSaver.ImageSaverBuilder rawBuilder; + int requestId = (int) request.getTag(); jpegBuilder = mJpegResultQueue.get(requestId); rawBuilder = mRawResultQueue.get(requestId); - } - if (jpegBuilder != null) jpegBuilder.setFile(jpegFile); - if (rawBuilder != null) rawBuilder.setFile(rawFile); + if (jpegBuilder != null) jpegBuilder.setFile(jpegFile); + if (rawBuilder != null) rawBuilder.setFile(rawFile); + } } @Override @@ -503,7 +510,7 @@ public class Camera2RawFragment extends Fragment { } if (rawBuilder != null) { rawBuilder.setResult(result); - if (jpegBuilder != null) sb.append(", "); + if (rawBuilder != null) sb.append(", "); sb.append("Saving RAW as: "); sb.append(rawBuilder.getSaveLocation()); } @@ -512,7 +519,7 @@ public class Camera2RawFragment extends Fragment { handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue); handleCompletionLocked(requestId, rawBuilder, mRawResultQueue); - finishedCaptureLocked(); +// finishedCaptureLocked(); } showToast(sb.toString()); @@ -544,8 +551,12 @@ public class Camera2RawFragment extends Fragment { } }; public boolean isHandTakePic = true; - private long exposureTime = 0; - private int sensitivity = 0; + private double pic1 = 1; + private double pic2 = 1; + private int i = 0; + private static int j = 0; + // private long exposureTime = 0; +// private int sensitivity = 0; public static Camera2RawFragment newInstance() { return new Camera2RawFragment(); @@ -577,12 +588,12 @@ public class Camera2RawFragment extends Fragment { Button takepic = view.findViewById(R.id.takepic); EditText baoguang = view.findViewById(R.id.baoguang); EditText iso = view.findViewById(R.id.iso); - if (exposureTime > 0) { - baoguang.setText(exposureTime + ""); - } - if (sensitivity > 0) { - iso.setText(sensitivity + ""); - } +// if (exposureTime > 0) { +// baoguang.setText(exposureTime + ""); +// } +// if (sensitivity > 0) { +// iso.setText(sensitivity + ""); +// } takepic.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { @@ -590,10 +601,11 @@ public class Camera2RawFragment extends Fragment { String s1 = baoguang.getText().toString(); String s2 = iso.getText().toString(); if (s1 != null && !s1.equals("")) { - exposureTime = Long.parseLong(s1); + pic1 = Double.parseDouble(s1); } if (s2 != null && !s2.equals("")) { - sensitivity = Integer.parseInt(s2); +// sensitivity = Integer.parseInt(s2); + pic2 = Double.parseDouble(s2); } takePicture(); } @@ -749,6 +761,8 @@ public class Camera2RawFragment extends Fragment { Size[] rawSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); +// Size largestRaw = new Size(5760, 4312); + synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference @@ -1032,20 +1046,6 @@ public class Camera2RawFragment extends Fragment { // Allow AWB to run auto-magically if this device supports this builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO); } - -// // 设置曝光时间,例如设置为1000微秒 -//// long exposureTime = 1000 000000L; // 1000微秒 -// if (isHandTakePic) { -// builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); -// if (exposureTime > 0) { -// long value = exposureTime * 1000L; -// builder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, value); -// } -// if (sensitivity > 0) { -// builder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); -// } -// } - } /** @@ -1074,11 +1074,6 @@ public class Camera2RawFragment extends Fragment { ex.printStackTrace(); } } -// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { -// StreamConfigurationMap streamConfigurationMap = mCharacteristics.get( -// CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION); -// System.out.println(streamConfigurationMap); -// } // For still image captures, we always use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); @@ -1133,22 +1128,6 @@ public class Camera2RawFragment extends Fragment { float centerX = viewRect.centerX(); float centerY = viewRect.centerY(); - // Initially, output stream images from the Camera2 API will be rotated to the native - // device orientation from the sensor's orientation, and the TextureView will default to - // scaling these buffers to fill it's view bounds. If the aspect ratios and relative - // orientations are correct, this is fine. - // - // However, if the device orientation has been rotated relative to its native - // orientation so that the TextureView's dimensions are swapped relative to the - // native device orientation, we must do the following to ensure the output stream - // images are not incorrectly scaled by the TextureView: - // - Undo the scale-to-fill from the output buffer's dimensions (i.e. its dimensions - // in the native device orientation) to the TextureView's dimension. - // - Apply a scale-to-fill from the output buffer's rotated dimensions - // (i.e. its dimensions in the current device orientation) to the TextureView's - // dimensions. - // - Apply the rotation from the native device orientation to the current device - // rotation. if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); @@ -1254,24 +1233,41 @@ public class Camera2RawFragment extends Fragment { // mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP) List requests = new ArrayList<>(); - for (int idx = 0; idx <2; idx++) { + for (int idx = 0; idx < 2; idx++) { // Set request tag to easily track results in callbacks. captureBuilder.setTag(mRequestCounter.getAndIncrement()); - if (idx == 1) { -// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); - // 设置曝光时间,例如设置为1000微秒 -// long exposureTime = 1000 000000L; // 1000微秒 - if (isHandTakePic) { - captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); - if (exposureTime > 0) { - captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, exposureTime * 2); - } - if (sensitivity > 0) { - captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); - } - } - } +// if (idx == 0) { +//// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); +// // 设置曝光时间,例如设置为1000微秒 +//// long exposureTime = 1000 000000L; // 1000微秒 +// if (isHandTakePic) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); +// if (exposureTime > 0) { +// double v = exposureTime * pic1; +// captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long)v); +// } +// if (sensitivity > 0) { +// captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); +// } +// } +// } +// +// if (idx == 1) { +//// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); +// // 设置曝光时间,例如设置为1000微秒 +//// long exposureTime = 1000 000000L; // 1000微秒 +// if (isHandTakePic) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); +// if (exposureTime > 0) { +// double v = exposureTime * pic2; +// captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long)v); +// } +// if (sensitivity > 0) { +// captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); +// } +// } +// } CaptureRequest request = captureBuilder.build(); @@ -1288,7 +1284,7 @@ public class Camera2RawFragment extends Fragment { requests.add(request); } -// mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler); + mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler); // // // Set request tag to easily track results in callbacks. // captureBuilder.setTag(mRequestCounter.getAndIncrement()); @@ -1343,9 +1339,23 @@ public class Camera2RawFragment extends Fragment { * @param reader a reference counted wrapper containing an {@link ImageReader} from which * to acquire an image. */ - private void dequeueAndSaveImage(TreeMap pendingQueue, RefCountedAutoCloseable reader) { + private void dequeueAndSaveImage( TreeMap pendingQueue, RefCountedAutoCloseable reader) { synchronized (mCameraStateLock) { - Map.Entry entry = pendingQueue.firstEntry(); + Map.Entry entry = null; + if (pendingQueue != null) { + for (Map.Entry item : pendingQueue.entrySet()) { + ImageSaver.ImageSaverBuilder value = item.getValue(); + if (value.mImage == null) { + entry = item; + } + } + } +// Map.Entry entry = pendingQueue.firstEntry(); + +// ImageSaver.ImageSaverBuilder builder = entry.getValue(); + if (entry == null) { + return; + } ImageSaver.ImageSaverBuilder builder = entry.getValue(); // Increment reference count to prevent ImageReader from being closed while we @@ -1365,9 +1375,15 @@ public class Camera2RawFragment extends Fragment { pendingQueue.remove(entry.getKey()); return; } - +// String currentDateTime = generateTimestamp(); +// File file; +// if (type == 0) { +// file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg"); +// } else { +// file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng"); +// } +// builder.setFile(file); builder.setRefCountedReader(reader).setImage(image); - handleCompletionLocked(entry.getKey(), builder, pendingQueue); } } @@ -1421,6 +1437,7 @@ public class Camera2RawFragment extends Fragment { @Override public void run() { + System.out.println("保存线程执行了" + (j++) + "次"); boolean success = false; int format = mImage.getFormat(); switch (format) { @@ -1539,6 +1556,7 @@ public class Camera2RawFragment extends Fragment { public synchronized ImageSaver buildIfComplete() { if (!isComplete()) { + Log.e("看看有没有问题", "问题"); return null; } return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, mReader); @@ -1815,8 +1833,11 @@ public class Camera2RawFragment extends Fragment { * @param queue the queue to remove this request from, if completed. */ private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder, TreeMap queue) { - if (builder == null) return; + if (builder == null) { + return; + } ImageSaver saver = builder.buildIfComplete(); + System.out.println(); if (saver != null) { queue.remove(requestId); AsyncTask.THREAD_POOL_EXECUTOR.execute(saver); @@ -1881,4 +1902,5 @@ public class Camera2RawFragment extends Fragment { } + } \ No newline at end of file diff --git a/app/src/main/java/com/xypower/camera2raw/HdrMergeExample.java b/app/src/main/java/com/xypower/camera2raw/HdrMergeExample.java index 03e59e3..ac63746 100644 --- a/app/src/main/java/com/xypower/camera2raw/HdrMergeExample.java +++ b/app/src/main/java/com/xypower/camera2raw/HdrMergeExample.java @@ -1,10 +1,15 @@ //package com.xypower.camera2raw; // +//import android.hardware.camera2.params.TonemapCurve; +// //import org.opencv.android.OpenCVLoader; //import org.opencv.core.Core; //import org.opencv.core.Mat; //import org.opencv.imgcodecs.Imgcodecs; +//import org.opencv.photo.MergeDebevec; +//import org.opencv.photo.Photo; //import org.opencv.photo.Tonemap; +//import org.opencv.photo.MergeExposures; // //public class HdrMergeExample { // @@ -14,6 +19,41 @@ // } // } // +//// public Mat createHdrImage(Mat[] mats) { +//// Mat hdrImage = new Mat(); +//// Exposure.createHDRmergeMertens(mats, hdrImage); +//// return hdrImage; +//// } +//// +//// public void saveHdrImage(Mat hdrImage, String filePath) { +//// Imgcodecs.imwrite(filePath, hdrImage); +//// } +//// +//// // 使用示例 +//// public void processHdr() { +//// // 假设你有一个Mat数组,包含了多张需要合成HDR的图片 +//// Mat[] images = ...; +//// +//// Mat hdrImage = createHdrImage(images); +//// +//// // 保存HDR图片到设备存储 +//// saveHdrImage(hdrImage, "/path/to/save/hdrImage.jpg"); +//// +//// // 释放资源 +//// hdrImage.release(); +//// for (Mat mat : images) { +//// mat.release(); +//// } +//// } +// +// public Mat sdrToHdr(String inputPaths) { +// Mat image = Imgcodecs.imread(inputPaths); +// Mat hdr = new Mat(); +// MergeDebevec mergeDebevec = Photo.createMergeDebevec(); +// mergeDebevec.process(image, hdr, image.ge); +// +// } +// // public Mat mergeHdrImages(Mat[] hdrImages) { // Mat mergedImage = new Mat(); // Core.merge(hdrImages, mergedImage); @@ -22,7 +62,7 @@ // // public Mat toneMap(Mat hdrImage) { // Mat ldrImage = new Mat(); -// Tonemap.setTonemap(2); // Use the Gamma Tone Mapping +// TonemapCurve.CHANNEL_BLUE(2); // Use the Gamma Tone Mapping // Tonemap.process(hdrImage, ldrImage); // return ldrImage; // } diff --git a/app/src/main/java/com/xypower/camera2raw/MainActivity.java b/app/src/main/java/com/xypower/camera2raw/MainActivity.java index 4217480..a0f10d7 100644 --- a/app/src/main/java/com/xypower/camera2raw/MainActivity.java +++ b/app/src/main/java/com/xypower/camera2raw/MainActivity.java @@ -8,7 +8,7 @@ import android.util.Log; import android.view.Menu; import android.view.MenuItem; -import org.opencv.android.OpenCVLoader; +//import org.opencv.android.OpenCVLoader; public class MainActivity extends AppCompatActivity { @@ -23,11 +23,11 @@ public class MainActivity extends AppCompatActivity { @Override protected void onResume() { super.onResume(); - if (OpenCVLoader.initDebug()) { - Log.d("dfsdfd", "成功"); - } else { - Log.d("dfsdfd", "失败"); - } +// if (OpenCVLoader.initDebug()) { +// Log.d("dfsdfd", "成功"); +// } else { +// Log.d("dfsdfd", "失败"); +// } } @Override diff --git a/app/src/main/res/layout/fragment_camera2_raw.xml b/app/src/main/res/layout/fragment_camera2_raw.xml index 46e5c4b..516841e 100644 --- a/app/src/main/res/layout/fragment_camera2_raw.xml +++ b/app/src/main/res/layout/fragment_camera2_raw.xml @@ -19,7 +19,6 @@ android:layout_height="30dp" android:layout_marginLeft="20dp" android:hint="曝光 单位 微秒" - android:inputType="number" android:textColor="@color/black" app:layout_constraintLeft_toLeftOf="parent" app:layout_constraintTop_toTopOf="parent" /> @@ -31,7 +30,6 @@ android:layout_height="30dp" android:layout_marginLeft="20dp" android:hint="ISO" - android:inputType="number" android:textColor="@color/black" app:layout_constraintLeft_toRightOf="@id/baoguang" app:layout_constraintTop_toTopOf="parent" />