东视宽动态YUV

Hdr_Yuv
liuguijing 3 months ago
parent e5026556f6
commit cc092c35cb

@ -21,7 +21,9 @@
android:supportsRtl="true"
android:theme="@style/Theme.MpPreview"
android:requestLegacyExternalStorage="true"
android:largeHeap="true"
tools:targetApi="30">
<activity
android:name=".ui.CameraChannelActivity"
android:exported="false" />

@ -434,7 +434,7 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
if ((ANDROID_BITMAP_FLAGS_IS_HARDWARE & bmpInfo.flags) == ANDROID_BITMAP_FLAGS_IS_HARDWARE)
{
// AHardwareBuffer* hardwareBuffer = NULL;
AHardwareBuffer* hardwareBuffer = NULL;
//// result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer);
//
// void* outVirtualAddress = NULL;
@ -452,7 +452,7 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outAddress);
tmp.copyTo(images[idx]);
AndroidBitmap_unlockPixels(env, bitmaps[idx]);
tmp.release();
// tmp.release();
}
//convert RGB to BGR
@ -497,4 +497,40 @@ JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_Camera2RawFragment_decodeDng(JNIEnv *env, jclass clazz,
jobject byte_buffer, jstring output_path) {
// TODO: implement decodeDng()
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_MainActivity_test(JNIEnv *env, jclass clazz) {
// TODO: implement test()
std::vector<cv::Mat> images;
{
cv::Mat img1 = cv::imread("/sdcard/DCIM/YUV_20250315_184325_959_.bmp");
images.push_back(img1);
}
{
cv::Mat img2 = cv::imread("/sdcard/DCIM/YUV_20250315_184337_167_.bmp");
images.push_back(img2);
}
std::vector<float> times;
times.push_back((double)(1190000) / 1000000000.0);
times.push_back((double)(8330000) / 1000000000.0);
ALOGI("Start MakeHDR3");
cv::Mat rgb;
makeHdr(times, images, rgb);
ALOGI("End MakeHDR3");
std::string fileName = "";
std::vector<int> params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
params.push_back(100);
if (cv::imwrite(fileName.c_str(), rgb, params))
{
rgb.release();
ALOGI("End HDR3");
}
}

@ -41,6 +41,7 @@ import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import android.renderscript.RenderScript;
import android.util.Log;
import android.util.Range;
import android.util.Rational;
@ -64,6 +65,7 @@ import com.xypower.mppreview.bean.Contants;
import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback;
import com.xypower.mppreview.utils.HdrUtil;
import com.xypower.mppreview.utils.ImageConverterUtil;
import com.xypower.mppreview.widget.ErrorDialog;
@ -489,7 +491,7 @@ public class Camera2RawFragment extends Fragment {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
int requestId = (int) request.getTag();
ImageSaverBuilder jpegBuilder;
// ImageSaverBuilder jpegBuilder;
ImageSaverBuilder rawBuilder;
ImageSaverBuilder yuvBuilder;
StringBuilder sb = new StringBuilder();
@ -497,7 +499,7 @@ public class Camera2RawFragment extends Fragment {
synchronized (mCameraStateLock) {
// jpegBuilder = mJpegResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId);
yuvBuilder = mRawResultQueue.get(requestId);
yuvBuilder = mYuvResultQueue.get(requestId);
// if (jpegBuilder != null) {
// jpegBuilder.setResult(result);
@ -532,7 +534,8 @@ public class Camera2RawFragment extends Fragment {
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
int requestId = (int) request.getTag();
synchronized (mCameraStateLock) {
mJpegResultQueue.remove(requestId);
// mJpegResultQueue.remove(requestId);
mYuvResultQueue.remove(requestId);
mRawResultQueue.remove(requestId);
finishedCaptureLocked();
}
@ -803,7 +806,8 @@ public class Camera2RawFragment extends Fragment {
}
try {
// Find a CameraDevice that supports RAW captures, and configure state.
for (String cameraId : manager.getCameraIdList()) {
String[] cameraIdList = manager.getCameraIdList();
for (String cameraId : cameraIdList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We only use a camera that supports RAW in this sample.
@ -814,7 +818,9 @@ public class Camera2RawFragment extends Fragment {
// For still image captures, we use the largest available size.
Size[] outputSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
// Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
Size largestYuv = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea());
List<Size> coll = Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888));
// Size largestYuv = Collections.max(coll, new CompareSizesByArea());
Size largestYuv = coll.get(1);
outputSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
@ -1001,7 +1007,7 @@ public class Camera2RawFragment extends Fragment {
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
mCameraDevice.createCaptureSession(Arrays.asList(surface, mYuvImageReader.get().getSurface(),mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
synchronized (mCameraStateLock) {
@ -1244,8 +1250,9 @@ public class Camera2RawFragment extends Fragment {
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mJpegImageReader.get().getSurface());
// captureBuilder.addTarget(mJpegImageReader.get().getSurface());
captureBuilder.addTarget(mRawImageReader.get().getSurface());
captureBuilder.addTarget(mYuvImageReader.get().getSurface());
// Use the same AE and AF modes as the preview.
setup3AControlsLocked(captureBuilder);
@ -1287,7 +1294,8 @@ public class Camera2RawFragment extends Fragment {
runnable = new ImageSaver.ImagePairRunnable(imagePair) {
@Override
public void run() {
final List<ImageSaver.ImageInfo> images = imagePair.getImages();
// final List<ImageSaver.ImageInfo> images = imagePair.getImages();
List<Image> images = imagePair.getMImages();
final String outputPath = "/sdcard/DCIM/";
new Thread(new Runnable() {
@Override
@ -1295,12 +1303,48 @@ public class Camera2RawFragment extends Fragment {
if (images.size() != 2) {
return;
}
ImageSaver.ImageInfo img1 = images.get(0);
ImageSaver.ImageInfo img2 = images.get(1);
// Image img1 = images.get(0);
// Image img2 = images.get(1);
// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
// RenderScript rs = RenderScript.create(getContext());
// imageConverterUtil1.saveYuvImageFromImageReader(img1, rs,"");
// Log.d("开始Hdr处理", "strat");
// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
//// RenderScript rs2 = RenderScript.create(getContext());
// imageConverterUtil1.saveYuvImageFromImageReader(img2, rs,"");
//// String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp";
//// RenderScript rs = RenderScript.create(getContext());
//// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
////// boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
//// Bitmap img11 = imageConverterUtil1.imageYuvToBitmap(img1, rs);
//// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
//// Bitmap img12 = imageConverterUtil2.imageYuvToBitmap(img2, rs);
//// boolean b = makeHdr3(1000, bitmap, 0,2000, bitmap1, 0, hdrOutputPath);
Image img1 = images.get(0);
Image img2 = images.get(1);
ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
RenderScript rs = RenderScript.create(getContext());
Bitmap bitmap = imageConverterUtil1.imageYuvToBitmap(img1, rs);
Log.d("开始Hdr处理", "strat");
ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
// RenderScript rs2 = RenderScript.create(getContext());
Bitmap bitmap1 = imageConverterUtil1.imageYuvToBitmap(img2, rs);
Log.d("HDR测试", "Hdr");
String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp";
boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
// RenderScript rs = RenderScript.create(getContext());
// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
//// boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
// Bitmap img11 = imageConverterUtil1.imageYuvToBitmap(img1, rs);
// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
// Bitmap img12 = imageConverterUtil2.imageYuvToBitmap(img2, rs);
boolean b = makeHdr3(1000, bitmap, 0,2000, bitmap1, 0, hdrOutputPath);
Log.d("HDR测试2", "Hdr");
// Mat mat1 = new Mat();
// Mat mat2 = new Mat();
@ -1311,27 +1355,27 @@ public class Camera2RawFragment extends Fragment {
// Mat[] mats = {mat1, mat2};
// float[] floats = {img1.exposureTime,img2.exposureTime};
// HdrUtil.createHDR(mats, floats,Hdrmat,hdrOutputPath);
img1.bitmap.recycle();
img2.bitmap.recycle();
// img1.bitmap.recycle();
// img2.bitmap.recycle();
img1 = null;
img2 = null;
images.clear();
Log.d("结束Hdr处理", "end");
if (b) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
// 在主线程中执行UI更新
// ...
takepic.setVisibility(View.VISIBLE);
rorpic.clearAnimation();
rorpic.setVisibility(View.GONE);
showToast("HDR拍摄成功");
}
});
}
// if (b) {
// getActivity().runOnUiThread(new Runnable() {
// @Override
// public void run() {
// // 在主线程中执行UI更新
// // ...
// takepic.setVisibility(View.VISIBLE);
// rorpic.clearAnimation();
// rorpic.setVisibility(View.GONE);
// showToast("HDR拍摄成功");
// }
// });
//
// }
}
}).start();
@ -1340,7 +1384,7 @@ public class Camera2RawFragment extends Fragment {
imagePair.setRunnable(runnable);
for (int idx = 0; idx < 2; idx++) {
for (int idx = 0; idx < 1; idx++) {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
@ -1350,7 +1394,7 @@ public class Camera2RawFragment extends Fragment {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
if (exposureTime > 0) {
v = exposureTime;
v = (long) ((long) exposureTime * (7));
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
}
if (sensitivity > 0) {
@ -1363,7 +1407,7 @@ public class Camera2RawFragment extends Fragment {
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 2);
if (exposureTime > 0) {
if (pic1 <= 0) {
v = exposureTime * DEFAULT_COMPATATION;
v = exposureTime *7;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
} else {
v = exposureTime * pic1;
@ -1377,8 +1421,9 @@ public class Camera2RawFragment extends Fragment {
CaptureRequest request = captureBuilder.build();
ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
// ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
rawBuilder.setImagePair(imagePair);
rawBuilder.setCallback(new CompleteCallback() {
@Override
@ -1387,8 +1432,24 @@ public class Camera2RawFragment extends Fragment {
}
});
rawBuilder.setList(mlist);
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
ImageSaverBuilder yuvBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
yuvBuilder.setImagePair(imagePair);
yuvBuilder.setCallback(new CompleteCallback() {
@Override
public void onResult() {
showToast("HDR拍摄成功");
}
});
yuvBuilder.setList(mlist);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
mYuvResultQueue.put((int) request.getTag(), yuvBuilder);
requests.add(request);
}
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
@ -1407,11 +1468,13 @@ public class Camera2RawFragment extends Fragment {
// Create an ImageSaverBuilder in which to collect results, and add it to the queue
// of active requests.
ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
// ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder yuvBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
mYuvResultQueue.put((int) request.getTag(), yuvBuilder);
mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
}

@ -16,6 +16,7 @@ import androidx.annotation.NonNull;
import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback;
import com.xypower.mppreview.utils.DeepCopyUtil;
import com.xypower.mppreview.utils.ImageConverterUtil;
import java.io.File;
@ -29,7 +30,7 @@ import java.util.List;
public class ImageSaver implements Runnable {
private final static String TAG = "HDR";
private final Image mImage;
private Image mImage;
private final File mFile;
private final CaptureResult mCaptureResult;
private final CameraCharacteristics mCharacteristics;
@ -38,6 +39,7 @@ public class ImageSaver implements Runnable {
private final ImagePair mImagePair;
private final Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
private ArrayList<Image> objects = new ArrayList<>();
public static class ImageInfo {
public long exposureTime;
@ -53,11 +55,14 @@ public class ImageSaver implements Runnable {
public static class ImagePair {
public List<ImageInfo> mImages;
public List<Image> images;
public int mExpectedCount;
public Runnable mRunnable;
public ImagePair(int expectedCount) {
mImages = new ArrayList<>();
images = new ArrayList<>();
mExpectedCount = expectedCount;
mRunnable = null;
}
@ -79,9 +84,24 @@ public class ImageSaver implements Runnable {
}
}
public void addMImage(Image image) {
boolean isFull = false;
synchronized (mImages) {
images.add(image);
isFull = (images.size() == mExpectedCount);
}
if (mRunnable != null && isFull) {
mRunnable.run();
}
}
public List<ImageInfo> getImages() {
return mImages;
}
public List<Image> getMImages() {
return images;
}
}
public static abstract class ImagePairRunnable implements Runnable {
@ -133,12 +153,18 @@ public class ImageSaver implements Runnable {
break;
}
case ImageFormat.YUV_420_888: {
// if (mImagePair != null) {
// mImagePair.addMImage(mImage);
// }
new Thread(new Runnable() {
@Override
public void run() {
Log.d("测试", "ceshi");
// Image image = DeepCopyUtil.deepCopy(mImage);
RenderScript rs = RenderScript.create(mContext);
new ImageConverterUtil().saveYuvImageFromImageReader(mImage, rs, "");
}
}).start();
break;

@ -14,6 +14,7 @@ import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraManager;
import android.net.ConnectivityManager;
@ -63,6 +64,9 @@ public class MainActivity extends AppCompatActivity implements View.OnClickListe
private com.xypower.mppreview.databinding.ActivityMainBinding viewBinding;
private int numberOfCameras;
public static native boolean test();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
@ -184,7 +188,9 @@ public class MainActivity extends AppCompatActivity implements View.OnClickListe
startActivity(intent);
break;
case R.id.systakepic:
PhotoUtil.openCamera(this, photoResultLauncher);
// PhotoUtil.openCamera(this, photoResultLauncher);
// test();
break;
// case R.id.channel1:
// openChannelActivity(0);

@ -0,0 +1,22 @@
package com.xypower.mppreview.utils;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
public class DeepCopyUtil {
public static <T> T deepCopy(T object) throws IOException, ClassNotFoundException {
// 将对象写入字节流
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(object);
oos.flush();
// 从字节流中读取对象
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bis);
return (T) ois.readObject();
}
}

@ -48,7 +48,7 @@ public class ImageConverterUtil {
}
}
private Bitmap imageYuvToBitmap(Image image, RenderScript rs) {
public Bitmap imageYuvToBitmap(Image image, RenderScript rs) {
if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Only YUV_420_888 format is supported");
}

Loading…
Cancel
Save