Hdr拍YUV格式

Hdr_ds
liuguijing 3 months ago
parent 50e32a1d84
commit 2eae434d19

@ -0,0 +1,133 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xypower.camera2raw;
import java.io.OutputStream;
import java.nio.ByteBuffer;
/**
* A ByteBuffer-backed OutputStream that expands the internal ByteBuffer as required. Given this, the caller should
* always access the underlying ByteBuffer via the {@link #buffer()} method until all writes are completed.
*
* This class is typically used for 2 purposes:
*
* 1. Write to a ByteBuffer when there is a chance that we may need to expand it in order to fit all the desired data
* 2. Write to a ByteBuffer via methods that expect an OutputStream interface
*
* Hard to track bugs can happen when this class is used for the second reason and unexpected buffer expansion happens.
* So, it's best to assume that buffer expansion can always happen. An improvement would be to create a separate class
* that throws an error if buffer expansion is required to avoid the issue altogether.
*/
public class ByteBufferOutputStream extends OutputStream {
private static final float REALLOCATION_FACTOR = 1.1f;
private final int initialCapacity;
private final int initialPosition;
private ByteBuffer buffer;
/**
* Creates an instance of this class that will write to the received `buffer` up to its `limit`. If necessary to
* satisfy `write` or `position` calls, larger buffers will be allocated so the {@link #buffer()} method may return
* a different buffer than the received `buffer` parameter.
*
* Prefer one of the constructors that allocate the internal buffer for clearer semantics.
*/
public ByteBufferOutputStream(ByteBuffer buffer) {
this.buffer = buffer;
this.initialPosition = buffer.position();
this.initialCapacity = buffer.capacity();
}
public ByteBufferOutputStream(int initialCapacity) {
this(initialCapacity, false);
}
public ByteBufferOutputStream(int initialCapacity, boolean directBuffer) {
this(directBuffer ? ByteBuffer.allocateDirect(initialCapacity) : ByteBuffer.allocate(initialCapacity));
}
public void write(int b) {
ensureRemaining(1);
buffer.put((byte) b);
}
public void write(byte[] bytes, int off, int len) {
ensureRemaining(len);
buffer.put(bytes, off, len);
}
public void write(ByteBuffer sourceBuffer) {
ensureRemaining(sourceBuffer.remaining());
buffer.put(sourceBuffer);
}
public ByteBuffer buffer() {
return buffer;
}
public int position() {
return buffer.position();
}
public int remaining() {
return buffer.remaining();
}
public int limit() {
return buffer.limit();
}
public void position(int position) {
ensureRemaining(position - buffer.position());
buffer.position(position);
}
/**
* The capacity of the first internal ByteBuffer used by this class. This is useful in cases where a pooled
* ByteBuffer was passed via the constructor and it needs to be returned to the pool.
*/
public int initialCapacity() {
return initialCapacity;
}
/**
* Ensure there is enough space to write some number of bytes, expanding the underlying buffer if necessary.
* This can be used to avoid incremental expansions through calls to {@link #write(int)} when you know how
* many total bytes are needed.
*
* @param remainingBytesRequired The number of bytes required
*/
public void ensureRemaining(int remainingBytesRequired) {
if (remainingBytesRequired > buffer.remaining())
expandBuffer(remainingBytesRequired);
}
private void expandBuffer(int remainingRequired) {
int expandSize = Math.max((int) (buffer.limit() * REALLOCATION_FACTOR), buffer.position() + remainingRequired);
ByteBuffer temp = ByteBuffer.allocate(expandSize);
int limit = limit();
buffer.flip();
temp.put(buffer);
buffer.limit(limit);
// reset the old buffer's position so that the partial data in the new buffer cannot be mistakenly consumed
// we should ideally only do this for the original buffer, but the additional complexity doesn't seem worth it
buffer.position(initialPosition);
buffer = temp;
}
}

@ -12,6 +12,7 @@ import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.SensorManager;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
@ -259,7 +260,7 @@ public class Camera2RawFragment extends Fragment {
* captures. This is used to allow us to clean up the {@link ImageReader} when all background
* tasks using its {@link Image}s have completed.
*/
// private RefCountedAutoCloseable<ImageReader> mJpegImageReader;
private RefCountedAutoCloseable<ImageReader> mJpegImageReader;
/**
* A reference counted holder wrapping the {@link ImageReader} that handles RAW image captures.
@ -365,7 +366,7 @@ public class Camera2RawFragment extends Fragment {
@Override
public void onImageAvailable(ImageReader reader) {
// dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader);
dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader);
}
};
@ -378,7 +379,7 @@ public class Camera2RawFragment extends Fragment {
@Override
public void onImageAvailable(ImageReader reader) {
dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
// dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
}
};
@ -386,7 +387,7 @@ public class Camera2RawFragment extends Fragment {
private Long exposetime;
private Integer sensitivity;
/**
/**
* /**
* A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and
* pre-capture sequence.
*/
@ -472,7 +473,8 @@ public class Camera2RawFragment extends Fragment {
String currentDateTime = generateTimestamp();
File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
// File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_a.dng");
File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
// File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "YUV_" + currentDateTime + ".bmp");
// Look up the ImageSaverBuilder for this request and update it with the file name
// based on the capture start time.
@ -654,13 +656,9 @@ public class Camera2RawFragment extends Fragment {
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
startBackgroundThread();//开启一个后台线程
openCamera();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we should
// configure the preview bounds here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
} else {
@ -771,7 +769,7 @@ public class Camera2RawFragment extends Fragment {
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// For still image captures, we use the largest available size.
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea());
Size[] rawSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea());
@ -781,10 +779,10 @@ public class Camera2RawFragment extends Fragment {
// Set up ImageReaders for JPEG and RAW outputs. Place these in a reference
// counted wrapper to ensure they are only closed when all background tasks
// using them are finished.
// if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
// mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
// }
// mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler);
if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.YUV_420_888, /*maxImages*/5));
}
mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler);
if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
mRawImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
@ -900,10 +898,10 @@ public class Camera2RawFragment extends Fragment {
mCameraDevice.close();
mCameraDevice = null;
}
// if (null != mJpegImageReader) {
// mJpegImageReader.close();
// mJpegImageReader = null;
// }
if (null != mJpegImageReader) {
mJpegImageReader.close();
mJpegImageReader = null;
}
if (null != mRawImageReader) {
mRawImageReader.close();
mRawImageReader = null;
@ -917,7 +915,7 @@ public class Camera2RawFragment extends Fragment {
}
/**
* Starts a background thread and its {@link Handler}.
* 线
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
@ -962,7 +960,7 @@ public class Camera2RawFragment extends Fragment {
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
mCameraDevice.createCaptureSession(Arrays.asList(surface, mJpegImageReader.get().getSurface(), mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
synchronized (mCameraStateLock) {
@ -1088,7 +1086,7 @@ public class Camera2RawFragment extends Fragment {
}
}
// For still image captures, we always use the largest available size.
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea());
// Find the rotation of the device relative to the native device orientation.
int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
@ -1123,8 +1121,8 @@ public class Camera2RawFragment extends Fragment {
}
// Find the best preview size for these view dimensions and configured JPEG size.
// Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg);
Size previewSize = new Size(3840,2160);
Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg);
// Size previewSize = new Size(3840,2160);
if (swappedDimensions) {
mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
@ -1246,7 +1244,7 @@ public class Camera2RawFragment extends Fragment {
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
// captureBuilder.addTarget(mJpegImageReader.get().getSurface());
captureBuilder.addTarget(mJpegImageReader.get().getSurface());
captureBuilder.addTarget(mRawImageReader.get().getSurface());
// Use the same AE and AF modes as the preview.
@ -1267,7 +1265,7 @@ public class Camera2RawFragment extends Fragment {
// mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP)
List<CaptureRequest> requests = new ArrayList<>();
for (int idx = 0; idx < 10; idx++) {
for (int idx = 0; idx < 2; idx++) {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
@ -1278,7 +1276,7 @@ public class Camera2RawFragment extends Fragment {
if (isHandTakePic) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
double v = exposureTime * pic1;
double v = exposureTime;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
}
if (sensitivity > 0) {
@ -1294,7 +1292,7 @@ public class Camera2RawFragment extends Fragment {
if (isHandTakePic) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
double v = exposureTime * 2;
double v = exposureTime * pic1;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
}
if (sensitivity > 0) {
@ -1548,6 +1546,115 @@ public class Camera2RawFragment extends Fragment {
}
}
private static void yuvToRgb(byte[] yuvData, int width, int height, int[] rgbData) {
int frameSize = width * height;
int uOffset = frameSize;
int vOffset = frameSize + frameSize / 4;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int Y = yuvData[y * width + x] & 0xFF;
int U = yuvData[uOffset + (y / 2) * (width / 2) + (x / 2)] & 0xFF;
int V = yuvData[vOffset + (y / 2) * (width / 2) + (x / 2)] & 0xFF;
// YUV to RGB conversion
int R = (int) (Y + 1.402 * (V - 128));
int G = (int) (Y - 0.344 * (U - 128) - 0.714 * (V - 128));
int B = (int) (Y + 1.772 * (U - 128));
// Clamp values to [0, 255]
R = Math.min(255, Math.max(0, R));
G = Math.min(255, Math.max(0, G));
B = Math.min(255, Math.max(0, B));
// Pack RGB into an integer
rgbData[y * width + x] = (0xFF << 24) | (R << 16) | (G << 8) | B;
}
}
}
private static void saveRgbToBmp(File file, int width, int height, int[] rgbData) {
try (FileOutputStream outputStream = new FileOutputStream(file)) {
int rowSize = (width * 3 + 3) & ~3; // 每行对齐到 4 字节
int imageSize = rowSize * height; // 图像数据大小
int fileSize = 54 + imageSize; // 文件总大小
// BMP 文件头
outputStream.write('B');
outputStream.write('M');
writeInt(outputStream, fileSize); // 文件大小
writeInt(outputStream, 0); // 保留字段
writeInt(outputStream, 54); // 像素数据偏移量
// BMP 信息头
writeInt(outputStream, 40); // 信息头大小
writeInt(outputStream, width); // 图像宽度
writeInt(outputStream, height); // 图像高度
writeShort(outputStream, 1); // 颜色平面数
writeShort(outputStream, 24); // 每像素位数
writeInt(outputStream, 0); // 压缩方式
writeInt(outputStream, imageSize);// 图像大小
writeInt(outputStream, 0); // 水平分辨率
writeInt(outputStream, 0); // 垂直分辨率
writeInt(outputStream, 0); // 调色板颜色数
writeInt(outputStream, 0); // 重要颜色数
// 像素数据
byte[] row = new byte[rowSize];
for (int y = height - 1; y >= 0; y--) { // BMP 是从下到上存储的
for (int x = 0; x < width; x++) {
int pixel = rgbData[y * width + x];
row[x * 3] = (byte) (pixel & 0xFF); // B
row[x * 3 + 1] = (byte) ((pixel >> 8) & 0xFF); // G
row[x * 3 + 2] = (byte) ((pixel >> 16) & 0xFF); // R
}
outputStream.write(row);
}
Log.d("BmpUtils", "BMP saved to: " + file.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
}
private static void writeShort(FileOutputStream outputStream, int value) throws IOException {
outputStream.write(value & 0xFF);
outputStream.write((value >> 8) & 0xFF);
}
private static void writeInt(FileOutputStream outputStream, int value) throws IOException {
outputStream.write(value & 0xFF);
outputStream.write((value >> 8) & 0xFF);
outputStream.write((value >> 16) & 0xFF);
outputStream.write((value >> 24) & 0xFF);
}
private static void saveYuvAsBmp(Image image, File file) {
if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Invalid image format");
}
int width = image.getWidth();
int height = image.getHeight();
// 提取 YUV 数据
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
byte[] yuvData = new byte[yBuffer.remaining() + uBuffer.remaining() + vBuffer.remaining()];
yBuffer.get(yuvData, 0, yBuffer.remaining());
uBuffer.get(yuvData, yBuffer.remaining(), uBuffer.remaining());
vBuffer.get(yuvData, yBuffer.remaining() + uBuffer.remaining(), vBuffer.remaining());
// 将 YUV 转换为 RGB
int[] rgbData = new int[width * height];
yuvToRgb(yuvData, width, height, rgbData);
// 将 RGB 保存为 BMP
saveRgbToBmp(file, width, height, rgbData);
}
/**
* Runnable that saves an {@link Image} into the specified {@link File}, and updates
* {@link android.provider.MediaStore} to include the resulting file.
@ -1617,6 +1724,12 @@ public class Camera2RawFragment extends Fragment {
}
break;
}
case ImageFormat.YUV_420_888: {
saveYuvAsBmp(mImage, mFile);
break;
}
case ImageFormat.RAW_SENSOR: {
DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
FileOutputStream output = null;

@ -0,0 +1,5 @@
package com.xypower.camera2raw;
public interface CompleteCallback {
void onResult();
}

@ -0,0 +1,8 @@
package com.xypower.camera2raw;
public class Contants {
public static final String TAG = "MpPriview";
public static final String HDRNUM = "hdrnum";
public static final String CAMERAID = "CAMERAID";
public static final String FILENAME_FORMAT = "yyyy-MM-dd-HH-mm-ss-SSS";
}

@ -0,0 +1,45 @@
package com.xypower.camera2raw;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
public class HdrUtil {
public static String generateTimestamp() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmmss_SSS", Locale.US);
return sdf.format(new Date());
}
// public static void createHDR(Mat[] images, float[] exposureTimes, Mat hdrImage,String filepath) {
// Mat[] images32f = new Mat[images.length];
// for (int i = 0; i < images.length; i++) {
// images32f[i] = new Mat();
// images[i].convertTo(images32f[i], CvType.CV_32F);
// }
//
// Mat response = new Mat();
// Mat times = new Mat(exposureTimes.length, 1, CvType.CV_32F);
// for (int i = 0; i < exposureTimes.length; i++) {
// times.put(i, 0, exposureTimes[i]);
// }
//
// // Calibrate the camera response
// CalibrateDebevec calibrate = createCalibrateDebevec();
// calibrate.process(Arrays.asList(images32f), response, times);
//
// // Merge the images into an HDR image
// MergeDebevec merge = createMergeDebevec();
// merge.process(Arrays.asList(images32f), hdrImage, times, response);
// saveHDRImage(hdrImage,filepath);
// }
//
// public static void saveHDRImage(Mat hdrImage, String filePath) {
// Imgcodecs.imwrite(filePath, hdrImage);
// }
}

@ -0,0 +1,242 @@
package com.xypower.camera2raw;
import android.graphics.Bitmap;
import android.graphics.ImageDecoder;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.DngCreator;
import android.media.Image;
import android.media.ImageReader;
import android.os.Build;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
public class ImageSaver implements Runnable {
private final static String TAG = "HDR";
private final Image mImage;
private final File mFile;
private final CaptureResult mCaptureResult;
private final CameraCharacteristics mCharacteristics;
private CompleteCallback mCallback;
private final ImagePair mImagePair;
private final Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
public static class ImageInfo {
public long exposureTime;
public int length;
public Bitmap bitmap;
ImageInfo(Bitmap bmp, int length, long exp) {
this.exposureTime = exp;
this.length = length;
bitmap = bmp;
}
}
public static class ImagePair {
public List<ImageInfo> mImages;
public int mExpectedCount;
public Runnable mRunnable;
public ImagePair(int expectedCount) {
mImages = new ArrayList<>();
mExpectedCount = expectedCount;
mRunnable = null;
}
public void setRunnable(Runnable runnable) {
mRunnable = runnable;
}
public void addImage(Bitmap bitmap, int length, long exp) {
boolean isFull = false;
ImageInfo imageInfo = new ImageInfo(bitmap, length, exp);
synchronized (mImages) {
mImages.add(imageInfo);
isFull = (mImages.size() == mExpectedCount);
}
if (mRunnable != null && isFull) {
mRunnable.run();
}
}
public List<ImageInfo> getImages() {
return mImages;
}
}
public static abstract class ImagePairRunnable implements Runnable {
protected ImagePair mImagePair;
public ImagePairRunnable(ImagePair imagePair) {
mImagePair = imagePair;
}
}
public ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics,
Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader,
CompleteCallback callback, ImagePair imagePair) {
mImage = image;
mFile = file;
mCaptureResult = result;
mCharacteristics = characteristics;
mReader = reader;
mCallback = callback;
mImagePair = imagePair;
}
@RequiresApi(api = Build.VERSION_CODES.P)
@Override
public void run() {
boolean success = false;
int format = mImage.getFormat();
switch (format) {
case ImageFormat.JPEG: {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
output.write(bytes);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(output);
}
break;
}
case ImageFormat.RAW_SENSOR: {
DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
if (mImagePair != null) {
ByteBuffer byteBuffer = null;
ByteBufferOutputStream baos = null;
Long t = mCaptureResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
try {
byteBuffer = ByteBuffer.allocateDirect(mImage.getWidth() * mImage.getHeight() * 2 + 81768);
baos = new ByteBufferOutputStream(byteBuffer);
Log.d(TAG, "Before Saving DNG Exp=" + t.toString());
dngCreator.writeImage(baos, mImage);
byteBuffer.limit(byteBuffer.position());
byteBuffer.flip();
Log.d(TAG, "After Saving DNG Exp=" + t.toString() + " size=" + byteBuffer.limit());
ImageDecoder.OnHeaderDecodedListener listener = new ImageDecoder.OnHeaderDecodedListener() {
@Override
public void onHeaderDecoded(@NonNull ImageDecoder decoder, @NonNull ImageDecoder.ImageInfo info, @NonNull ImageDecoder.Source source) {
// decoder.setAllocator(ImageDecoder.ALLOCATOR_SOFTWARE);
decoder.setAllocator(ImageDecoder.ALLOCATOR_HARDWARE);
// decoder.setTargetSize(4702, 3520);
}
};
Log.i(TAG, "Start Hardware Decoding Exp=" + t.toString() + " TID=" + Thread.currentThread().getId());
ImageDecoder imageDecoder = null;
Bitmap bmp = null;
// ImageDecoder.Source source = ImageDecoder.createSource(mFile);
ImageDecoder.Source source = ImageDecoder.createSource(byteBuffer);
try {
bmp = ImageDecoder.decodeBitmap(source, listener);
} catch (Exception ex) {
ex.printStackTrace();
}
Log.i(TAG, "End Hardware Decoding Exp=" + t.toString());
byteBuffer.clear();
byteBuffer = null;
mImagePair.addImage(bmp, 0, t.longValue());
// bmp.recycle();
// bmp = null;
success = true;
} catch (Exception e) {
e.printStackTrace();
} finally {
dngCreator.close();
mImage.close();
closeOutput(baos);
}
} else {
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
dngCreator.writeImage(output, mImage);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
dngCreator.close();
closeOutput(output);
}
}
break;
}
default: {
break;
}
}
mReader.close();
}
private static void closeOutput(OutputStream outputStream) {
if (null != outputStream) {
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
// private static void clear() {
// if (null != outputStream) {
// try {
// outputStream.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
// }
// private void saveJpeg(Image image,String name) {
// Image.Plane[] planes = image.getPlanes();
// ByteBuffer buffer = planes[0].getBuffer();
// int pixelStride = planes[0].getPixelStride();
// int rowStride = planes[0].getRowStride();
// int rowPadding = rowStride - pixelStride * mWidth;
//
// Bitmap bitmap = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
// bitmap.copyPixelsFromBuffer(buffer);
// //bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
// ImageSaveUtil.saveBitmap2file(bitmap,getApplicationContext(),name);
//
// }
}

@ -0,0 +1,90 @@
package com.xypower.camera2raw;
import android.content.Context;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureResult;
import android.media.Image;
import android.media.ImageReader;
import java.io.File;
import java.util.ArrayList;
public class ImageSaverBuilder {
public Image mImage;
public File mFile;
public CaptureResult mCaptureResult;
public CameraCharacteristics mCharacteristics;
public Context mContext;
public Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
public ImageSaver.ImagePair mImagePair;
private CompleteCallback mCallback;
/**
* Construct a new ImageSaverBuilder using the given {@link Context}.
*
* @param context a {@link Context} to for accessing the
* {@link android.provider.MediaStore}.
*/
public ImageSaverBuilder(final Context context) {
mContext = context;
}
public synchronized ImageSaverBuilder setRefCountedReader(Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader) {
if (reader == null) throw new NullPointerException();
mReader = reader;
return this;
}
public synchronized ImageSaverBuilder setImage(final Image image) {
if (image == null) throw new NullPointerException();
mImage = image;
return this;
}
public synchronized ImageSaverBuilder setImagePair(final ImageSaver.ImagePair imagePair) {
if (imagePair == null) throw new NullPointerException();
mImagePair = imagePair;
return this;
}
public synchronized ImageSaverBuilder setFile(final File file) {
if (file == null) throw new NullPointerException();
mFile = file;
return this;
}
public synchronized ImageSaverBuilder setResult(final CaptureResult result) {
if (result == null) throw new NullPointerException();
mCaptureResult = result;
return this;
}
public synchronized ImageSaverBuilder setCharacteristics(final CameraCharacteristics characteristics) {
if (characteristics == null) throw new NullPointerException();
mCharacteristics = characteristics;
return this;
}
public synchronized ImageSaverBuilder setCallback(CompleteCallback callback) {
mCallback = callback;
return this;
}
public synchronized ImageSaver buildIfComplete() {
if (!isComplete()) {
return null;
}
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mReader, mCallback, mImagePair);
}
public synchronized String getSaveLocation() {
return (mFile == null) ? "Unknown" : mFile.toString();
}
private boolean isComplete() {
return mImage != null && mFile != null && mCaptureResult != null && mCharacteristics != null;
}
}

@ -70,7 +70,6 @@ public class MainActivity extends AppCompatActivity implements View.OnClickListe
private void takePicture() {
Camera2RawFragment fragment = (Camera2RawFragment) getSupportFragmentManager().findFragmentById(R.id.container);
fragment.isHandTakePic = false;
fragment.takePicture();
}
@ -92,7 +91,6 @@ public class MainActivity extends AppCompatActivity implements View.OnClickListe
PhotoUtil.openCamera(this, photoResultLauncher);
break;
}
}

Loading…
Cancel
Save