YUV拍照Hdr

Hdr_Yuv
liuguijing 3 months ago
parent 4b9232f549
commit e5026556f6

@ -9,7 +9,7 @@ android {
defaultConfig {
applicationId "com.xypower.mppreview"
minSdk 28
targetSdk 30
targetSdk 28
versionCode 3
versionName "1.2"

@ -434,16 +434,16 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
if ((ANDROID_BITMAP_FLAGS_IS_HARDWARE & bmpInfo.flags) == ANDROID_BITMAP_FLAGS_IS_HARDWARE)
{
AHardwareBuffer* hardwareBuffer = NULL;
result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer);
void* outVirtualAddress = NULL;
int32_t fence = -1;
result = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY, fence, NULL, &outVirtualAddress);
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outVirtualAddress);
tmp.copyTo(images[idx]);
AHardwareBuffer_unlock(hardwareBuffer, &fence);
AHardwareBuffer_release(hardwareBuffer);
// AHardwareBuffer* hardwareBuffer = NULL;
//// result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer);
//
// void* outVirtualAddress = NULL;
// int32_t fence = -1;
// result = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY, fence, NULL, &outVirtualAddress);
// cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outVirtualAddress);
// tmp.copyTo(images[idx]);
// AHardwareBuffer_unlock(hardwareBuffer, &fence);
// AHardwareBuffer_release(hardwareBuffer);
}
else
{

@ -256,7 +256,8 @@ public class Camera2RawFragment extends Fragment {
* captures. This is used to allow us to clean up the {@link ImageReader} when all background
* tasks using its {@link Image}s have completed.
*/
// private RefCountedAutoCloseable<ImageReader> mJpegImageReader;
private RefCountedAutoCloseable<ImageReader> mJpegImageReader;
private RefCountedAutoCloseable<ImageReader> mYuvImageReader;
/**
* A reference counted holder wrapping the {@link ImageReader} that handles RAW image captures.
@ -284,6 +285,7 @@ public class Camera2RawFragment extends Fragment {
* Request ID to {@link ImageSaverBuilder} mapping for in-progress RAW captures.
*/
private final TreeMap<Integer, ImageSaverBuilder> mRawResultQueue = new TreeMap<>();
private final TreeMap<Integer, ImageSaverBuilder> mYuvResultQueue = new TreeMap<>();
/**
* {@link CaptureRequest.Builder} for the camera preview
@ -367,6 +369,15 @@ public class Camera2RawFragment extends Fragment {
};
private final ImageReader.OnImageAvailableListener mOnYuvImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
dequeueAndSaveImage(mYuvResultQueue, mYuvImageReader);
}
};
/**
* This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
* RAW image is ready to be saved.
@ -374,7 +385,7 @@ public class Camera2RawFragment extends Fragment {
private final ImageReader.OnImageAvailableListener mOnRawImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
// dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
}
};
@ -458,15 +469,19 @@ public class Camera2RawFragment extends Fragment {
String currentDateTime = generateTimestamp();
File directory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
File rawFile = new File(directory, "RAW_" + currentDateTime + ".dng");
// File jpegFile = new File(directory, "JPEG_" + currentDateTime + ".png");
// File jpegFile = new File(directory, "JPEG_" + currentDateTime + ".jpg");
File yuvFile = new File(directory, "YUV_" + currentDateTime + ".bmp");
// ImageSaverBuilder jpegBuilder;
ImageSaverBuilder yuvBuilder;
ImageSaverBuilder rawBuilder;
int requestId = (int) request.getTag();
// jpegBuilder = mJpegResultQueue.get(requestId);
yuvBuilder = mYuvResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId);
// if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
if (yuvBuilder != null) yuvBuilder.setFile(yuvFile);
if (rawBuilder != null) rawBuilder.setFile(rawFile);
}
}
@ -476,11 +491,13 @@ public class Camera2RawFragment extends Fragment {
int requestId = (int) request.getTag();
ImageSaverBuilder jpegBuilder;
ImageSaverBuilder rawBuilder;
ImageSaverBuilder yuvBuilder;
StringBuilder sb = new StringBuilder();
synchronized (mCameraStateLock) {
// jpegBuilder = mJpegResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId);
yuvBuilder = mRawResultQueue.get(requestId);
// if (jpegBuilder != null) {
// jpegBuilder.setResult(result);
@ -494,8 +511,16 @@ public class Camera2RawFragment extends Fragment {
sb.append(rawBuilder.getSaveLocation());
}
if (yuvBuilder != null) {
yuvBuilder.setResult(result);
if (yuvBuilder != null) sb.append(", ");
sb.append("Saving RAW as: ");
sb.append(yuvBuilder.getSaveLocation());
}
// handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue);
handleCompletionLocked(requestId, rawBuilder, mRawResultQueue);
handleCompletionLocked(requestId, yuvBuilder, mYuvResultQueue);
finishedCaptureLocked();
}
@ -788,7 +813,9 @@ public class Camera2RawFragment extends Fragment {
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// For still image captures, we use the largest available size.
Size[] outputSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
// Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
Size largestYuv = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea());
outputSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
Size largestRaw = Collections.max(Arrays.asList(outputSizes), new CompareSizesByArea());
@ -807,6 +834,11 @@ public class Camera2RawFragment extends Fragment {
mRawImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
}
mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler);
if (mYuvImageReader == null || mYuvImageReader.getAndRetain() == null) {
mYuvImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(largestYuv.getWidth(), largestYuv.getHeight(), ImageFormat.YUV_420_888, /*maxImages*/ 5));
}
mYuvImageReader.get().setOnImageAvailableListener(mOnYuvImageAvailableListener, mBackgroundHandler);
mCharacteristics = characteristics;
mCameraId = cameraId;
}
@ -907,10 +939,10 @@ public class Camera2RawFragment extends Fragment {
mCameraDevice.close();
mCameraDevice = null;
}
// if (null != mJpegImageReader) {
// mJpegImageReader.close();
// mJpegImageReader = null;
// }
if (null != mJpegImageReader) {
mJpegImageReader.close();
mJpegImageReader = null;
}
if (null != mRawImageReader) {
mRawImageReader.close();
mRawImageReader = null;
@ -1212,7 +1244,7 @@ public class Camera2RawFragment extends Fragment {
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
// captureBuilder.addTarget(mJpegImageReader.get().getSurface());
captureBuilder.addTarget(mJpegImageReader.get().getSurface());
captureBuilder.addTarget(mRawImageReader.get().getSurface());
// Use the same AE and AF modes as the preview.
@ -1345,7 +1377,7 @@ public class Camera2RawFragment extends Fragment {
CaptureRequest request = captureBuilder.build();
// ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
rawBuilder.setImagePair(imagePair);
rawBuilder.setCallback(new CompleteCallback() {
@ -1355,7 +1387,7 @@ public class Camera2RawFragment extends Fragment {
}
});
rawBuilder.setList(mlist);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
requests.add(request);
}
@ -1375,10 +1407,10 @@ public class Camera2RawFragment extends Fragment {
// Create an ImageSaverBuilder in which to collect results, and add it to the queue
// of active requests.
// ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);

@ -9,12 +9,14 @@ import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.DngCreator;
import android.media.Image;
import android.media.ImageReader;
import android.renderscript.RenderScript;
import android.util.Log;
import androidx.annotation.NonNull;
import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback;
import com.xypower.mppreview.utils.ImageConverterUtil;
import java.io.File;
import java.io.FileOutputStream;
@ -31,6 +33,7 @@ public class ImageSaver implements Runnable {
private final File mFile;
private final CaptureResult mCaptureResult;
private final CameraCharacteristics mCharacteristics;
private final Context mContext;
private CompleteCallback mCallback;
private final ImagePair mImagePair;
@ -93,11 +96,12 @@ public class ImageSaver implements Runnable {
private ArrayList<PngPhotoBean> mlist = new ArrayList<>();//用来存储已拍照的照片名称
public ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics,
public ImageSaver(Image image,Context context, File file, CaptureResult result, CameraCharacteristics characteristics,
Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader, ArrayList<PngPhotoBean> list,
CompleteCallback callback, ImagePair imagePair) {
mImage = image;
mFile = file;
mContext = context;
mCaptureResult = result;
mCharacteristics = characteristics;
mReader = reader;
@ -128,6 +132,17 @@ public class ImageSaver implements Runnable {
}
break;
}
case ImageFormat.YUV_420_888: {
new Thread(new Runnable() {
@Override
public void run() {
Log.d("测试", "ceshi");
RenderScript rs = RenderScript.create(mContext);
new ImageConverterUtil().saveYuvImageFromImageReader(mImage, rs, "");
}
}).start();
break;
}
case ImageFormat.RAW_SENSOR: {
DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);

@ -85,7 +85,7 @@ public class ImageSaverBuilder {
if (!isComplete()) {
return null;
}
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mReader, mlist,mCallback, mImagePair);
return new ImageSaver(mImage, mContext,mFile, mCaptureResult, mCharacteristics, mReader, mlist,mCallback, mImagePair);
}
public synchronized String getSaveLocation() {

@ -0,0 +1,195 @@
package com.xypower.mppreview.utils;
import static com.xypower.mppreview.utils.HdrUtil.generateTimestamp;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.media.Image;
import android.os.Environment;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicYuvToRGB;
import android.renderscript.Type;
import android.util.Log;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
public class ImageConverterUtil {
private final String TAG = "ImageConverterUtil";
public void saveYuvImageFromImageReader(Image image, RenderScript rs, String i) {
// Image image = null;
try {
// // Acquire the latest image from ImageReader
// image = imageReader.acquireLatestImage();
// if (image == null) {
// Log.e(TAG, "No image available");
// return;
// }
// Convert Image to Bitmap directly using RenderScript
Bitmap bitmap = imageYuvToBitmap(image, rs);
// Save the bitmap to file (PNG format to avoid lossy compression)
saveBitmapToFile(bitmap, i);
Log.d(TAG, "Image saved successfully");
} catch (Exception e) {
Log.e(TAG, "Error saving image: " + e.getMessage());
} finally {
if (image != null) {
image.close();
}
}
}
private Bitmap imageYuvToBitmap(Image image, RenderScript rs) {
if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Only YUV_420_888 format is supported");
}
int width = image.getWidth();
int height = image.getHeight();
// Convert YUV to NV21 format
byte[] nv21Data = yuv420ToNv21(image);
// Create output bitmap
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
// Use RenderScript for YUV to RGB conversion (more efficient)
Allocation inputAllocation = Allocation.createSized(rs, Element.U8(rs), nv21Data.length);
inputAllocation.copyFrom(nv21Data);
Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs))
.setX(width)
.setY(height)
.setYuvFormat(ImageFormat.NV21);
Allocation yuvAllocation = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);
yuvAllocation.copyFrom(nv21Data);
Allocation outputAllocation = Allocation.createFromBitmap(rs, bitmap);
ScriptIntrinsicYuvToRGB yuvToRgbScript = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
yuvToRgbScript.setInput(yuvAllocation);
yuvToRgbScript.forEach(outputAllocation);
outputAllocation.copyTo(bitmap);
// Clean up allocations
inputAllocation.destroy();
yuvAllocation.destroy();
outputAllocation.destroy();
yuvToRgbScript.destroy();
return bitmap;
}
private byte[] yuv420ToNv21(Image image) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer yBuffer = planes[0].getBuffer();
ByteBuffer uBuffer = planes[1].getBuffer();
ByteBuffer vBuffer = planes[2].getBuffer();
int ySize = yBuffer.remaining();
int width = image.getWidth();
int height = image.getHeight();
int uvSize = width * height / 4; // U and V are quarter size
byte[] nv21 = new byte[ySize + uvSize * 2];
// Copy Y plane as-is
yBuffer.get(nv21, 0, ySize);
// Interleave V and U planes into NV21 format (which is really NV12 with U and V swapped)
int uvRowStride = planes[1].getRowStride();
int uvPixelStride = planes[1].getPixelStride();
int pos = ySize;
for (int row = 0; row < height / 2; row++) {
for (int col = 0; col < width / 2; col++) {
int vuPos = col * uvPixelStride + row * uvRowStride;
nv21[pos++] = vBuffer.get(vuPos);
nv21[pos++] = uBuffer.get(vuPos);
}
}
return nv21;
}
// Alternative method using direct YUV-to-RGB color space conversion
private Bitmap yuvImageToBitmapDirect(Image image) {
if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Only YUV_420_888 format is supported");
}
int width = image.getWidth();
int height = image.getHeight();
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
// Get YUV data
Image.Plane[] planes = image.getPlanes();
ByteBuffer yBuffer = planes[0].getBuffer();
ByteBuffer uBuffer = planes[1].getBuffer();
ByteBuffer vBuffer = planes[2].getBuffer();
int yRowStride = planes[0].getRowStride();
int uvRowStride = planes[1].getRowStride();
int uvPixelStride = planes[1].getPixelStride();
// Convert YUV to RGB directly, pixel by pixel
int[] rgbData = new int[width * height];
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
int yIndex = j * yRowStride + i;
int uvJ = j / 2;
int uvI = i / 2;
int uvIndex = uvJ * uvRowStride + uvI * uvPixelStride;
int y = yBuffer.get(yIndex) & 0xFF;
int u = uBuffer.get(uvIndex) & 0xFF;
int v = vBuffer.get(uvIndex) & 0xFF;
// YUV to RGB conversion
y = y - 16;
u = u - 128;
v = v - 128;
int r = (int) (1.164 * y + 1.596 * v);
int g = (int) (1.164 * y - 0.813 * v - 0.391 * u);
int b = (int) (1.164 * y + 2.018 * u);
// Clamp RGB values
r = r > 255 ? 255 : (r < 0 ? 0 : r);
g = g > 255 ? 255 : (g < 0 ? 0 : g);
b = b > 255 ? 255 : (b < 0 ? 0 : b);
rgbData[j * width + i] = 0xFF000000 | (r << 16) | (g << 8) | b;
}
}
bitmap.setPixels(rgbData, 0, width, 0, 0, width, height);
return bitmap;
}
private void saveBitmapToFile(Bitmap bitmap, String fileName) throws IOException {
// File picturesDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
File picturesDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM);
File outputFile = new File(picturesDir, "YUV_" + generateTimestamp() + "_" + fileName + ".bmp");
FileOutputStream outputStream = new FileOutputStream(outputFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
outputStream.flush();
outputStream.close();
}
}

@ -0,0 +1,37 @@
package com.xypower.mppreview.utils;
import java.io.IOException;
public class RouteManager {
// 添加路由
public static void addRoute(String network, String interfaceName) {
try {
// 构建命令
String command = "ip route add " + network + " dev " + interfaceName+" proto static scope link table ccmni1";
// 执行命令
Process process = Runtime.getRuntime().exec(command);
// 等待命令执行完成
process.waitFor();
String command2 = "ip route add " + network + " dev " + interfaceName+" proto static scope link table ccmni0";
// 执行命令
Process process2 = Runtime.getRuntime().exec(command2);
// 等待命令执行完成
process2.waitFor();
// 检查命令是否成功
if (process.exitValue() == 0) {
System.out.println("Route added successfully: " + command);
} else {
System.err.println("Failed to add route: " + command);
}
} catch (IOException | InterruptedException e) {
e.printStackTrace();
}
}
// public static void main(String[] args) {
// // 示例:添加 192.168.2.0/24 网段的路由到 eth0 接口
// addRoute("192.168.68.0/24", "eth0");
// }
}
Loading…
Cancel
Save