调整raw四分之一的bug

Hdr_ds
liuguijing 6 months ago
parent 6e8a8b0209
commit ed28180b5d

@ -124,12 +124,12 @@ public class Camera2RawFragment extends Fragment {
/**
* Max preview width that is guaranteed by Camera2 API
*/
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_WIDTH = 5760;
/**
* Max preview height that is guaranteed by Camera2 API
*/
private static final int MAX_PREVIEW_HEIGHT = 1080;
private static final int MAX_PREVIEW_HEIGHT = 4312;
/**
* Tag for the {@link Log}.
@ -371,7 +371,7 @@ public class Camera2RawFragment extends Fragment {
@Override
public void onImageAvailable(ImageReader reader) {
dequeueAndSaveImage( mJpegResultQueue, mJpegImageReader);
dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader);
}
};
@ -384,7 +384,7 @@ public class Camera2RawFragment extends Fragment {
@Override
public void onImageAvailable(ImageReader reader) {
dequeueAndSaveImage( mRawResultQueue, mRawImageReader);
dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
}
};
@ -476,6 +476,7 @@ public class Camera2RawFragment extends Fragment {
synchronized (mCameraStateLock) {
String currentDateTime = generateTimestamp();
File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
// File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_a.dng");
File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
// Look up the ImageSaverBuilder for this request and update it with the file name
@ -553,8 +554,6 @@ public class Camera2RawFragment extends Fragment {
public boolean isHandTakePic = true;
private double pic1 = 1;
private double pic2 = 1;
private int i = 0;
private static int j = 0;
// private long exposureTime = 0;
// private int sensitivity = 0;
@ -586,6 +585,7 @@ public class Camera2RawFragment extends Fragment {
};
Button takepic = view.findViewById(R.id.takepic);
Button button = view.findViewById(R.id.tojpg);
EditText baoguang = view.findViewById(R.id.baoguang);
EditText iso = view.findViewById(R.id.iso);
// if (exposureTime > 0) {
@ -610,7 +610,26 @@ public class Camera2RawFragment extends Fragment {
takePicture();
}
});
button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
File directory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
List<String> strings = RawToJpgConverter.listFiles(directory);
if (strings != null && strings.size() > 0) {
for (String item : strings) {
if (item.contains("RAW")) {
File file1 = new File(directory, item);
File file2 = new File(directory, "create_"+generateTimestamp()+".jpg");
try {
RawToJpgConverter.convertRawToJpg(file1.getPath(), file2.getPath());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
}
});
// mTextureView.setOnTouchListener(new View.OnTouchListener() {
// @Override
@ -738,6 +757,51 @@ public class Camera2RawFragment extends Fragment {
* Sets up state related to camera that is needed before opening a {@link CameraDevice}.
*/
private boolean setUpCameraOutputs() {
// CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
//
// try {
// // 打开相机
// String cameraId = manager.getCameraIdList()[0];
// CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
//
// // 选择输出尺寸
// Size rawSize = map.getOutputSizes(ImageFormat.RAW_SENSOR)[0]; // 选择最小的尺寸
//
// // 创建一个合适的CameraCaptureSession
// manager.openSession(/* ... */);
//
// // 创建一个RequestBuilder来拍摄RAW图像
// CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
// builder.addTarget(rawReader.getSurface()); // 假设你已经设置了一个ImageReader来接收RAW数据
//
// // 设置相机的输出尺寸
// builder.set(CaptureRequest.SENSOR_ORIENTATION, getOrientation(characteristics));
// builder.set(CaptureRequest.JPEG_QUALITY, 0); // 确保不会生成JPEG图像
//
// // 开始捕获
// cameraDevice.createCaptureSession(/* ... */);
//
// } catch (CameraAccessException e) {
// // 异常处理
// }
//
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
if (manager == null) {
@ -761,7 +825,7 @@ public class Camera2RawFragment extends Fragment {
Size[] rawSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea());
// Size largestRaw = new Size(5760, 4312);
// Size largestRaw = rawSizes[0];
synchronized (mCameraStateLock) {
@ -769,7 +833,7 @@ public class Camera2RawFragment extends Fragment {
// counted wrapper to ensure they are only closed when all background tasks
// using them are finished.
if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
}
mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler);
@ -1075,8 +1139,8 @@ public class Camera2RawFragment extends Fragment {
}
}
// For still image captures, we always use the largest available size.
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
// Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea());
// Find the rotation of the device relative to the native device orientation.
int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Point displaySize = new Point();
@ -1110,12 +1174,15 @@ public class Camera2RawFragment extends Fragment {
}
// Find the best preview size for these view dimensions and configured JPEG size.
Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg);
// Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg);
Size previewSize = new Size(5760, 4312);
if (swappedDimensions) {
mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
// mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
mTextureView.setAspectRatio(5376, 4312);
} else {
mTextureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
// mTextureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
mTextureView.setAspectRatio(5376, 4312);
}
// Find rotation of device in degrees (reverse device orientation for front-facing
@ -1128,12 +1195,36 @@ public class Camera2RawFragment extends Fragment {
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
// if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
// bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
// matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
// float scale = Math.max((float) viewHeight / previewSize.getHeight(), (float) viewWidth / previewSize.getWidth());
// matrix.postScale(scale, scale, centerX, centerY);
//
// }
if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
if (rotation == 0) {
matrix.postScale(1, 1);
} else if (rotation == 90){
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max((float) viewHeight / previewSize.getHeight(), (float) viewWidth / previewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
float scaleh =
(float) viewHeight / previewSize.getHeight();
float scalew =
(float) viewWidth / previewSize.getWidth();
matrix.postScale(scalew, scaleh, centerX, centerY);
} else if (rotation == 180){
matrix.postScale(1, 1);
}else if (rotation == 270){
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scaleh =
(float) viewHeight / previewSize.getHeight();
float scalew =
(float) viewWidth / previewSize.getWidth();
matrix.postScale(scaleh, scalew, centerX, centerY);
}
}
matrix.postRotate(rotation, centerX, centerY);
@ -1237,46 +1328,44 @@ public class Camera2RawFragment extends Fragment {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
// if (idx == 0) {
//// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// // 设置曝光时间例如设置为1000微秒
//// long exposureTime = 1000 000000L; // 1000微秒
// if (isHandTakePic) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
// if (exposureTime > 0) {
// double v = exposureTime * pic1;
// captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long)v);
// }
// if (sensitivity > 0) {
// captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
// }
// }
// }
//
// if (idx == 1) {
//// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// // 设置曝光时间例如设置为1000微秒
//// long exposureTime = 1000 000000L; // 1000微秒
// if (isHandTakePic) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
// if (exposureTime > 0) {
// double v = exposureTime * pic2;
// captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long)v);
// }
// if (sensitivity > 0) {
// captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
// }
// }
// }
if (idx == 0) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// 设置曝光时间例如设置为1000微秒
// long exposureTime = 1000 000000L; // 1000微秒
if (isHandTakePic) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
double v = exposureTime * pic1;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
}
}
}
if (idx == 1) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// 设置曝光时间例如设置为1000微秒
// long exposureTime = 1000 000000L; // 1000微秒
if (isHandTakePic) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
double v = exposureTime * pic2;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
}
}
}
CaptureRequest request = captureBuilder.build();
// Create an ImageSaverBuilder in which to collect results, and add it to the queue
// of active requests.
ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity)
.setCharacteristics(mCharacteristics);
ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(activity)
.setCharacteristics(mCharacteristics);
ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
@ -1339,7 +1428,7 @@ public class Camera2RawFragment extends Fragment {
* @param reader a reference counted wrapper containing an {@link ImageReader} from which
* to acquire an image.
*/
private void dequeueAndSaveImage( TreeMap<Integer, ImageSaver.ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
private void dequeueAndSaveImage(TreeMap<Integer, ImageSaver.ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
synchronized (mCameraStateLock) {
Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = null;
if (pendingQueue != null) {
@ -1375,14 +1464,6 @@ public class Camera2RawFragment extends Fragment {
pendingQueue.remove(entry.getKey());
return;
}
// String currentDateTime = generateTimestamp();
// File file;
// if (type == 0) {
// file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
// } else {
// file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
// }
// builder.setFile(file);
builder.setRefCountedReader(reader).setImage(image);
handleCompletionLocked(entry.getKey(), builder, pendingQueue);
}
@ -1437,7 +1518,6 @@ public class Camera2RawFragment extends Fragment {
@Override
public void run() {
System.out.println("保存线程执行了" + (j++) + "次");
boolean success = false;
int format = mImage.getFormat();
switch (format) {
@ -1556,7 +1636,6 @@ public class Camera2RawFragment extends Fragment {
public synchronized ImageSaver buildIfComplete() {
if (!isComplete()) {
Log.e("看看有没有问题", "问题");
return null;
}
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, mReader);

@ -0,0 +1,73 @@
package com.xypower.camera2raw;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
public class RawToJpgConverter {
public static List<String> listFiles(File directory) {
ArrayList list = new ArrayList();
File[] files = directory.listFiles();
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
listFiles(file); // 递归遍历子目录
} else {
// 这里可以处理文件,例如打印文件名
list.add(file.getName());
}
}
}
return list;
}
public static Bitmap convertRawToJpg(String path, String outpath) throws IOException {
// 1. 读取raw资源到字节数组
InputStream inputStream = new FileInputStream(path);
byte[] rawData = inputStreamToByteArray(inputStream);
// 2. 将字节数组解码为Bitmap
Bitmap rawBitmap = BitmapFactory.decodeByteArray(rawData, 0, rawData.length);
// 3. 如果需要可以在这里对Bitmap进行处理
// 4. 创建一个新的字节数组输出流用于存储JPG格式的图像数据
FileOutputStream outputStream = new FileOutputStream(outpath);
ByteArrayOutputStream bytestream = new ByteArrayOutputStream();
// 5. 将Bitmap以JPG格式编码到输出流中
rawBitmap.compress(Bitmap.CompressFormat.JPEG, 100, bytestream);
// 6. 将输出流转换为字节数组,并关闭输出流
byte[] jpgData = bytestream.toByteArray();
outputStream.write(jpgData);
outputStream.close();
// 7. 返回JPG格式的Bitmap
return BitmapFactory.decodeByteArray(jpgData, 0, jpgData.length);
}
private static byte[] inputStreamToByteArray(InputStream inputStream) throws IOException {
byte[] buffer = new byte[1024];
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteArrayOutputStream.write(buffer, 0, len);
}
byte[] data = byteArrayOutputStream.toByteArray();
byteArrayOutputStream.close();
inputStream.close();
return data;
}
}

@ -43,4 +43,14 @@
android:text="拍照"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@id/iso" />
<Button
android:id="@+id/tojpg"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="20dp"
android:layout_marginTop="10dp"
android:text="转jpg"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@id/takepic" />
</androidx.constraintlayout.widget.ConstraintLayout>
Loading…
Cancel
Save