From af20d89452ed974e44118eda2643ecf60920852d Mon Sep 17 00:00:00 2001 From: liuguijing <123456> Date: Wed, 18 Dec 2024 23:31:56 +0800 Subject: [PATCH] =?UTF-8?q?=E5=8D=81=E8=BF=9E=E6=8B=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../camera2raw/Camera2RawFragment.java | 248 ++++++++++++------ 1 file changed, 166 insertions(+), 82 deletions(-) diff --git a/app/src/main/java/com/xypower/camera2raw/Camera2RawFragment.java b/app/src/main/java/com/xypower/camera2raw/Camera2RawFragment.java index 4d96e77..b430e2d 100644 --- a/app/src/main/java/com/xypower/camera2raw/Camera2RawFragment.java +++ b/app/src/main/java/com/xypower/camera2raw/Camera2RawFragment.java @@ -124,12 +124,12 @@ public class Camera2RawFragment extends Fragment { /** * Max preview width that is guaranteed by Camera2 API */ - private static final int MAX_PREVIEW_WIDTH = 5760; + private static final int MAX_PREVIEW_WIDTH = 1920; /** * Max preview height that is guaranteed by Camera2 API */ - private static final int MAX_PREVIEW_HEIGHT = 4312; + private static final int MAX_PREVIEW_HEIGHT = 1080; /** * Tag for the {@link Log}. @@ -265,7 +265,7 @@ public class Camera2RawFragment extends Fragment { * captures. This is used to allow us to clean up the {@link ImageReader} when all background * tasks using its {@link Image}s have completed. */ - private RefCountedAutoCloseable mJpegImageReader; +// private RefCountedAutoCloseable mJpegImageReader; /** * A reference counted holder wrapping the {@link ImageReader} that handles RAW image captures. @@ -371,7 +371,7 @@ public class Camera2RawFragment extends Fragment { @Override public void onImageAvailable(ImageReader reader) { - dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader); +// dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader); } }; @@ -391,6 +391,7 @@ public class Camera2RawFragment extends Fragment { private Long exposetime; private Integer sensitivity; + /** /** * A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and * pre-capture sequence. @@ -619,7 +620,7 @@ public class Camera2RawFragment extends Fragment { for (String item : strings) { if (item.contains("RAW")) { File file1 = new File(directory, item); - File file2 = new File(directory, "create_"+generateTimestamp()+".jpg"); + File file2 = new File(directory, "create_" + item + ".jpg"); try { RawToJpgConverter.convertRawToJpg(file1.getPath(), file2.getPath()); } catch (IOException e) { @@ -757,51 +758,6 @@ public class Camera2RawFragment extends Fragment { * Sets up state related to camera that is needed before opening a {@link CameraDevice}. */ private boolean setUpCameraOutputs() { - - - - - - - -// CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); -// -// try { -// // 打开相机 -// String cameraId = manager.getCameraIdList()[0]; -// CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); -// StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); -// -// // 选择输出尺寸 -// Size rawSize = map.getOutputSizes(ImageFormat.RAW_SENSOR)[0]; // 选择最小的尺寸 -// -// // 创建一个合适的CameraCaptureSession -// manager.openSession(/* ... */); -// -// // 创建一个RequestBuilder来拍摄RAW图像 -// CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); -// builder.addTarget(rawReader.getSurface()); // 假设你已经设置了一个ImageReader来接收RAW数据 -// -// // 设置相机的输出尺寸 -// builder.set(CaptureRequest.SENSOR_ORIENTATION, getOrientation(characteristics)); -// builder.set(CaptureRequest.JPEG_QUALITY, 0); // 确保不会生成JPEG图像 -// -// // 开始捕获 -// cameraDevice.createCaptureSession(/* ... */); -// -// } catch (CameraAccessException e) { -// // 异常处理 -// } -// - - - - - - - - - Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); if (manager == null) { @@ -825,17 +781,16 @@ public class Camera2RawFragment extends Fragment { Size[] rawSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); -// Size largestRaw = rawSizes[0]; synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. - if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { - mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); - } - mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); +// if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { +// mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); +// } +// mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) { mRawImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5)); @@ -951,10 +906,10 @@ public class Camera2RawFragment extends Fragment { mCameraDevice.close(); mCameraDevice = null; } - if (null != mJpegImageReader) { - mJpegImageReader.close(); - mJpegImageReader = null; - } +// if (null != mJpegImageReader) { +// mJpegImageReader.close(); +// mJpegImageReader = null; +// } if (null != mRawImageReader) { mRawImageReader.close(); mRawImageReader = null; @@ -1013,7 +968,7 @@ public class Camera2RawFragment extends Fragment { mPreviewRequestBuilder.addTarget(surface); // Here, we create a CameraCaptureSession for camera preview. - mCameraDevice.createCaptureSession(Arrays.asList(surface, mJpegImageReader.get().getSurface(), mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() { + mCameraDevice.createCaptureSession(Arrays.asList(surface, mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(CameraCaptureSession cameraCaptureSession) { synchronized (mCameraStateLock) { @@ -1139,8 +1094,8 @@ public class Camera2RawFragment extends Fragment { } } // For still image captures, we always use the largest available size. -// Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); - Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); + Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); + // Find the rotation of the device relative to the native device orientation. int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); Point displaySize = new Point(); @@ -1175,14 +1130,12 @@ public class Camera2RawFragment extends Fragment { // Find the best preview size for these view dimensions and configured JPEG size. // Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg); - Size previewSize = new Size(5760, 4312); + Size previewSize = new Size(3840,2160); if (swappedDimensions) { -// mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); - mTextureView.setAspectRatio(5376, 4312); + mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); } else { -// mTextureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); - mTextureView.setAspectRatio(5376, 4312); + mTextureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); } // Find rotation of device in degrees (reverse device orientation for front-facing @@ -1206,23 +1159,19 @@ public class Camera2RawFragment extends Fragment { if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) { if (rotation == 0) { matrix.postScale(1, 1); - } else if (rotation == 90){ + } else if (rotation == 90) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); - float scaleh = - (float) viewHeight / previewSize.getHeight(); - float scalew = - (float) viewWidth / previewSize.getWidth(); + float scaleh = (float) viewHeight / previewSize.getHeight(); + float scalew = (float) viewWidth / previewSize.getWidth(); matrix.postScale(scalew, scaleh, centerX, centerY); - } else if (rotation == 180){ + } else if (rotation == 180) { matrix.postScale(1, 1); - }else if (rotation == 270){ + } else if (rotation == 270) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); - float scaleh = - (float) viewHeight / previewSize.getHeight(); - float scalew = - (float) viewWidth / previewSize.getWidth(); + float scaleh = (float) viewHeight / previewSize.getHeight(); + float scalew = (float) viewWidth / previewSize.getWidth(); matrix.postScale(scaleh, scalew, centerX, centerY); } } @@ -1303,7 +1252,7 @@ public class Camera2RawFragment extends Fragment { // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); - captureBuilder.addTarget(mJpegImageReader.get().getSurface()); +// captureBuilder.addTarget(mJpegImageReader.get().getSurface()); captureBuilder.addTarget(mRawImageReader.get().getSurface()); // Use the same AE and AF modes as the preview. @@ -1324,7 +1273,7 @@ public class Camera2RawFragment extends Fragment { // mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP) List requests = new ArrayList<>(); - for (int idx = 0; idx < 2; idx++) { + for (int idx = 0; idx < 10; idx++) { // Set request tag to easily track results in callbacks. captureBuilder.setTag(mRequestCounter.getAndIncrement()); @@ -1351,11 +1300,146 @@ public class Camera2RawFragment extends Fragment { if (isHandTakePic) { captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); if (exposureTime > 0) { - double v = exposureTime * pic2; + double v = exposureTime * 2; captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); } if (sensitivity > 0) { - captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 2) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 3; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 3) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 4; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 4) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 5; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 5) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 6; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 6) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 7; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 7) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 8; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 8) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 9; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 9) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 10; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); + } + } + } + if (idx == 10) { +// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4)); + // 设置曝光时间,例如设置为1000微秒 +// long exposureTime = 1000 000000L; // 1000微秒 + if (isHandTakePic) { + captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); + if (exposureTime > 0) { + double v = exposureTime * 11; + captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v); + } + if (sensitivity > 0) { + captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100); } } }