东视宽动态YUV

Hdr_Yuv
liuguijing 3 months ago
parent e5026556f6
commit cc092c35cb

@ -21,7 +21,9 @@
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/Theme.MpPreview" android:theme="@style/Theme.MpPreview"
android:requestLegacyExternalStorage="true" android:requestLegacyExternalStorage="true"
android:largeHeap="true"
tools:targetApi="30"> tools:targetApi="30">
<activity <activity
android:name=".ui.CameraChannelActivity" android:name=".ui.CameraChannelActivity"
android:exported="false" /> android:exported="false" />

@ -434,7 +434,7 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
if ((ANDROID_BITMAP_FLAGS_IS_HARDWARE & bmpInfo.flags) == ANDROID_BITMAP_FLAGS_IS_HARDWARE) if ((ANDROID_BITMAP_FLAGS_IS_HARDWARE & bmpInfo.flags) == ANDROID_BITMAP_FLAGS_IS_HARDWARE)
{ {
// AHardwareBuffer* hardwareBuffer = NULL; AHardwareBuffer* hardwareBuffer = NULL;
//// result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer); //// result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer);
// //
// void* outVirtualAddress = NULL; // void* outVirtualAddress = NULL;
@ -452,7 +452,7 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outAddress); cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outAddress);
tmp.copyTo(images[idx]); tmp.copyTo(images[idx]);
AndroidBitmap_unlockPixels(env, bitmaps[idx]); AndroidBitmap_unlockPixels(env, bitmaps[idx]);
tmp.release(); // tmp.release();
} }
//convert RGB to BGR //convert RGB to BGR
@ -498,3 +498,39 @@ Java_com_xypower_mppreview_Camera2RawFragment_decodeDng(JNIEnv *env, jclass claz
jobject byte_buffer, jstring output_path) { jobject byte_buffer, jstring output_path) {
// TODO: implement decodeDng() // TODO: implement decodeDng()
} }
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_MainActivity_test(JNIEnv *env, jclass clazz) {
// TODO: implement test()
std::vector<cv::Mat> images;
{
cv::Mat img1 = cv::imread("/sdcard/DCIM/YUV_20250315_184325_959_.bmp");
images.push_back(img1);
}
{
cv::Mat img2 = cv::imread("/sdcard/DCIM/YUV_20250315_184337_167_.bmp");
images.push_back(img2);
}
std::vector<float> times;
times.push_back((double)(1190000) / 1000000000.0);
times.push_back((double)(8330000) / 1000000000.0);
ALOGI("Start MakeHDR3");
cv::Mat rgb;
makeHdr(times, images, rgb);
ALOGI("End MakeHDR3");
std::string fileName = "";
std::vector<int> params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
params.push_back(100);
if (cv::imwrite(fileName.c_str(), rgb, params))
{
rgb.release();
ALOGI("End HDR3");
}
}

@ -41,6 +41,7 @@ import android.os.HandlerThread;
import android.os.Looper; import android.os.Looper;
import android.os.Message; import android.os.Message;
import android.os.SystemClock; import android.os.SystemClock;
import android.renderscript.RenderScript;
import android.util.Log; import android.util.Log;
import android.util.Range; import android.util.Range;
import android.util.Rational; import android.util.Rational;
@ -64,6 +65,7 @@ import com.xypower.mppreview.bean.Contants;
import com.xypower.mppreview.bean.PngPhotoBean; import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback; import com.xypower.mppreview.interfaces.CompleteCallback;
import com.xypower.mppreview.utils.HdrUtil; import com.xypower.mppreview.utils.HdrUtil;
import com.xypower.mppreview.utils.ImageConverterUtil;
import com.xypower.mppreview.widget.ErrorDialog; import com.xypower.mppreview.widget.ErrorDialog;
@ -489,7 +491,7 @@ public class Camera2RawFragment extends Fragment {
@Override @Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
int requestId = (int) request.getTag(); int requestId = (int) request.getTag();
ImageSaverBuilder jpegBuilder; // ImageSaverBuilder jpegBuilder;
ImageSaverBuilder rawBuilder; ImageSaverBuilder rawBuilder;
ImageSaverBuilder yuvBuilder; ImageSaverBuilder yuvBuilder;
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
@ -497,7 +499,7 @@ public class Camera2RawFragment extends Fragment {
synchronized (mCameraStateLock) { synchronized (mCameraStateLock) {
// jpegBuilder = mJpegResultQueue.get(requestId); // jpegBuilder = mJpegResultQueue.get(requestId);
rawBuilder = mRawResultQueue.get(requestId); rawBuilder = mRawResultQueue.get(requestId);
yuvBuilder = mRawResultQueue.get(requestId); yuvBuilder = mYuvResultQueue.get(requestId);
// if (jpegBuilder != null) { // if (jpegBuilder != null) {
// jpegBuilder.setResult(result); // jpegBuilder.setResult(result);
@ -532,7 +534,8 @@ public class Camera2RawFragment extends Fragment {
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
int requestId = (int) request.getTag(); int requestId = (int) request.getTag();
synchronized (mCameraStateLock) { synchronized (mCameraStateLock) {
mJpegResultQueue.remove(requestId); // mJpegResultQueue.remove(requestId);
mYuvResultQueue.remove(requestId);
mRawResultQueue.remove(requestId); mRawResultQueue.remove(requestId);
finishedCaptureLocked(); finishedCaptureLocked();
} }
@ -803,7 +806,8 @@ public class Camera2RawFragment extends Fragment {
} }
try { try {
// Find a CameraDevice that supports RAW captures, and configure state. // Find a CameraDevice that supports RAW captures, and configure state.
for (String cameraId : manager.getCameraIdList()) { String[] cameraIdList = manager.getCameraIdList();
for (String cameraId : cameraIdList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We only use a camera that supports RAW in this sample. // We only use a camera that supports RAW in this sample.
@ -814,7 +818,9 @@ public class Camera2RawFragment extends Fragment {
// For still image captures, we use the largest available size. // For still image captures, we use the largest available size.
Size[] outputSizes = map.getOutputSizes(ImageFormat.YUV_420_888); Size[] outputSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
// Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); // Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
Size largestYuv = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea()); List<Size> coll = Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888));
// Size largestYuv = Collections.max(coll, new CompareSizesByArea());
Size largestYuv = coll.get(1);
outputSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR); outputSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
@ -1001,7 +1007,7 @@ public class Camera2RawFragment extends Fragment {
mPreviewRequestBuilder.addTarget(surface); mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview. // Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() { mCameraDevice.createCaptureSession(Arrays.asList(surface, mYuvImageReader.get().getSurface(),mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
@Override @Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) { public void onConfigured(CameraCaptureSession cameraCaptureSession) {
synchronized (mCameraStateLock) { synchronized (mCameraStateLock) {
@ -1244,8 +1250,9 @@ public class Camera2RawFragment extends Fragment {
// This is the CaptureRequest.Builder that we use to take a picture. // This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mJpegImageReader.get().getSurface()); // captureBuilder.addTarget(mJpegImageReader.get().getSurface());
captureBuilder.addTarget(mRawImageReader.get().getSurface()); captureBuilder.addTarget(mRawImageReader.get().getSurface());
captureBuilder.addTarget(mYuvImageReader.get().getSurface());
// Use the same AE and AF modes as the preview. // Use the same AE and AF modes as the preview.
setup3AControlsLocked(captureBuilder); setup3AControlsLocked(captureBuilder);
@ -1287,7 +1294,8 @@ public class Camera2RawFragment extends Fragment {
runnable = new ImageSaver.ImagePairRunnable(imagePair) { runnable = new ImageSaver.ImagePairRunnable(imagePair) {
@Override @Override
public void run() { public void run() {
final List<ImageSaver.ImageInfo> images = imagePair.getImages(); // final List<ImageSaver.ImageInfo> images = imagePair.getImages();
List<Image> images = imagePair.getMImages();
final String outputPath = "/sdcard/DCIM/"; final String outputPath = "/sdcard/DCIM/";
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override
@ -1295,12 +1303,48 @@ public class Camera2RawFragment extends Fragment {
if (images.size() != 2) { if (images.size() != 2) {
return; return;
} }
ImageSaver.ImageInfo img1 = images.get(0); // Image img1 = images.get(0);
ImageSaver.ImageInfo img2 = images.get(1); // Image img2 = images.get(1);
// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
// RenderScript rs = RenderScript.create(getContext());
// imageConverterUtil1.saveYuvImageFromImageReader(img1, rs,"");
// Log.d("开始Hdr处理", "strat");
// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
//// RenderScript rs2 = RenderScript.create(getContext());
// imageConverterUtil1.saveYuvImageFromImageReader(img2, rs,"");
//// String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp";
//// RenderScript rs = RenderScript.create(getContext());
//// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
////// boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
//// Bitmap img11 = imageConverterUtil1.imageYuvToBitmap(img1, rs);
//// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
//// Bitmap img12 = imageConverterUtil2.imageYuvToBitmap(img2, rs);
//// boolean b = makeHdr3(1000, bitmap, 0,2000, bitmap1, 0, hdrOutputPath);
Image img1 = images.get(0);
Image img2 = images.get(1);
ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
RenderScript rs = RenderScript.create(getContext());
Bitmap bitmap = imageConverterUtil1.imageYuvToBitmap(img1, rs);
Log.d("开始Hdr处理", "strat"); Log.d("开始Hdr处理", "strat");
ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
// RenderScript rs2 = RenderScript.create(getContext());
Bitmap bitmap1 = imageConverterUtil1.imageYuvToBitmap(img2, rs);
Log.d("HDR测试", "Hdr");
String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp"; String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp";
boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath); // RenderScript rs = RenderScript.create(getContext());
// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
//// boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
// Bitmap img11 = imageConverterUtil1.imageYuvToBitmap(img1, rs);
// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
// Bitmap img12 = imageConverterUtil2.imageYuvToBitmap(img2, rs);
boolean b = makeHdr3(1000, bitmap, 0,2000, bitmap1, 0, hdrOutputPath);
Log.d("HDR测试2", "Hdr");
// Mat mat1 = new Mat(); // Mat mat1 = new Mat();
// Mat mat2 = new Mat(); // Mat mat2 = new Mat();
@ -1311,27 +1355,27 @@ public class Camera2RawFragment extends Fragment {
// Mat[] mats = {mat1, mat2}; // Mat[] mats = {mat1, mat2};
// float[] floats = {img1.exposureTime,img2.exposureTime}; // float[] floats = {img1.exposureTime,img2.exposureTime};
// HdrUtil.createHDR(mats, floats,Hdrmat,hdrOutputPath); // HdrUtil.createHDR(mats, floats,Hdrmat,hdrOutputPath);
img1.bitmap.recycle(); // img1.bitmap.recycle();
img2.bitmap.recycle(); // img2.bitmap.recycle();
img1 = null; img1 = null;
img2 = null; img2 = null;
images.clear(); images.clear();
Log.d("结束Hdr处理", "end"); Log.d("结束Hdr处理", "end");
if (b) { // if (b) {
getActivity().runOnUiThread(new Runnable() { // getActivity().runOnUiThread(new Runnable() {
@Override // @Override
public void run() { // public void run() {
// 在主线程中执行UI更新 // // 在主线程中执行UI更新
// ... // // ...
takepic.setVisibility(View.VISIBLE); // takepic.setVisibility(View.VISIBLE);
rorpic.clearAnimation(); // rorpic.clearAnimation();
rorpic.setVisibility(View.GONE); // rorpic.setVisibility(View.GONE);
showToast("HDR拍摄成功"); // showToast("HDR拍摄成功");
} // }
}); // });
//
} // }
} }
}).start(); }).start();
@ -1340,7 +1384,7 @@ public class Camera2RawFragment extends Fragment {
imagePair.setRunnable(runnable); imagePair.setRunnable(runnable);
for (int idx = 0; idx < 2; idx++) { for (int idx = 0; idx < 1; idx++) {
// Set request tag to easily track results in callbacks. // Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement()); captureBuilder.setTag(mRequestCounter.getAndIncrement());
@ -1350,7 +1394,7 @@ public class Camera2RawFragment extends Fragment {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0); captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
if (exposureTime > 0) { if (exposureTime > 0) {
v = exposureTime; v = (long) ((long) exposureTime * (7));
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v); captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
} }
if (sensitivity > 0) { if (sensitivity > 0) {
@ -1363,7 +1407,7 @@ public class Camera2RawFragment extends Fragment {
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 2); captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 2);
if (exposureTime > 0) { if (exposureTime > 0) {
if (pic1 <= 0) { if (pic1 <= 0) {
v = exposureTime * DEFAULT_COMPATATION; v = exposureTime *7;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v); captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
} else { } else {
v = exposureTime * pic1; v = exposureTime * pic1;
@ -1377,8 +1421,9 @@ public class Camera2RawFragment extends Fragment {
CaptureRequest request = captureBuilder.build(); CaptureRequest request = captureBuilder.build();
ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics); // ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数 ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
rawBuilder.setImagePair(imagePair); rawBuilder.setImagePair(imagePair);
rawBuilder.setCallback(new CompleteCallback() { rawBuilder.setCallback(new CompleteCallback() {
@Override @Override
@ -1387,8 +1432,24 @@ public class Camera2RawFragment extends Fragment {
} }
}); });
rawBuilder.setList(mlist); rawBuilder.setList(mlist);
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
ImageSaverBuilder yuvBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
yuvBuilder.setImagePair(imagePair);
yuvBuilder.setCallback(new CompleteCallback() {
@Override
public void onResult() {
showToast("HDR拍摄成功");
}
});
yuvBuilder.setList(mlist);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder); mRawResultQueue.put((int) request.getTag(), rawBuilder);
mYuvResultQueue.put((int) request.getTag(), yuvBuilder);
requests.add(request); requests.add(request);
} }
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler); mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
@ -1407,11 +1468,13 @@ public class Camera2RawFragment extends Fragment {
// Create an ImageSaverBuilder in which to collect results, and add it to the queue // Create an ImageSaverBuilder in which to collect results, and add it to the queue
// of active requests. // of active requests.
ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics); // ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics); ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder yuvBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
mJpegResultQueue.put((int) request.getTag(), jpegBuilder); // mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder); mRawResultQueue.put((int) request.getTag(), rawBuilder);
mYuvResultQueue.put((int) request.getTag(), yuvBuilder);
mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler); mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
} }

@ -16,6 +16,7 @@ import androidx.annotation.NonNull;
import com.xypower.mppreview.bean.PngPhotoBean; import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback; import com.xypower.mppreview.interfaces.CompleteCallback;
import com.xypower.mppreview.utils.DeepCopyUtil;
import com.xypower.mppreview.utils.ImageConverterUtil; import com.xypower.mppreview.utils.ImageConverterUtil;
import java.io.File; import java.io.File;
@ -29,7 +30,7 @@ import java.util.List;
public class ImageSaver implements Runnable { public class ImageSaver implements Runnable {
private final static String TAG = "HDR"; private final static String TAG = "HDR";
private final Image mImage; private Image mImage;
private final File mFile; private final File mFile;
private final CaptureResult mCaptureResult; private final CaptureResult mCaptureResult;
private final CameraCharacteristics mCharacteristics; private final CameraCharacteristics mCharacteristics;
@ -38,6 +39,7 @@ public class ImageSaver implements Runnable {
private final ImagePair mImagePair; private final ImagePair mImagePair;
private final Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader; private final Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
private ArrayList<Image> objects = new ArrayList<>();
public static class ImageInfo { public static class ImageInfo {
public long exposureTime; public long exposureTime;
@ -53,11 +55,14 @@ public class ImageSaver implements Runnable {
public static class ImagePair { public static class ImagePair {
public List<ImageInfo> mImages; public List<ImageInfo> mImages;
public List<Image> images;
public int mExpectedCount; public int mExpectedCount;
public Runnable mRunnable; public Runnable mRunnable;
public ImagePair(int expectedCount) { public ImagePair(int expectedCount) {
mImages = new ArrayList<>(); mImages = new ArrayList<>();
images = new ArrayList<>();
mExpectedCount = expectedCount; mExpectedCount = expectedCount;
mRunnable = null; mRunnable = null;
} }
@ -79,9 +84,24 @@ public class ImageSaver implements Runnable {
} }
} }
public void addMImage(Image image) {
boolean isFull = false;
synchronized (mImages) {
images.add(image);
isFull = (images.size() == mExpectedCount);
}
if (mRunnable != null && isFull) {
mRunnable.run();
}
}
public List<ImageInfo> getImages() { public List<ImageInfo> getImages() {
return mImages; return mImages;
} }
public List<Image> getMImages() {
return images;
}
} }
public static abstract class ImagePairRunnable implements Runnable { public static abstract class ImagePairRunnable implements Runnable {
@ -133,12 +153,18 @@ public class ImageSaver implements Runnable {
break; break;
} }
case ImageFormat.YUV_420_888: { case ImageFormat.YUV_420_888: {
// if (mImagePair != null) {
// mImagePair.addMImage(mImage);
// }
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override
public void run() { public void run() {
Log.d("测试", "ceshi"); Log.d("测试", "ceshi");
// Image image = DeepCopyUtil.deepCopy(mImage);
RenderScript rs = RenderScript.create(mContext); RenderScript rs = RenderScript.create(mContext);
new ImageConverterUtil().saveYuvImageFromImageReader(mImage, rs, ""); new ImageConverterUtil().saveYuvImageFromImageReader(mImage, rs, "");
} }
}).start(); }).start();
break; break;

@ -14,6 +14,7 @@ import android.Manifest;
import android.content.Context; import android.content.Context;
import android.content.Intent; import android.content.Intent;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraManager;
import android.net.ConnectivityManager; import android.net.ConnectivityManager;
@ -63,6 +64,9 @@ public class MainActivity extends AppCompatActivity implements View.OnClickListe
private com.xypower.mppreview.databinding.ActivityMainBinding viewBinding; private com.xypower.mppreview.databinding.ActivityMainBinding viewBinding;
private int numberOfCameras; private int numberOfCameras;
public static native boolean test();
@Override @Override
protected void onCreate(Bundle savedInstanceState) { protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
@ -184,7 +188,9 @@ public class MainActivity extends AppCompatActivity implements View.OnClickListe
startActivity(intent); startActivity(intent);
break; break;
case R.id.systakepic: case R.id.systakepic:
PhotoUtil.openCamera(this, photoResultLauncher); // PhotoUtil.openCamera(this, photoResultLauncher);
// test();
break; break;
// case R.id.channel1: // case R.id.channel1:
// openChannelActivity(0); // openChannelActivity(0);

@ -0,0 +1,22 @@
package com.xypower.mppreview.utils;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
public class DeepCopyUtil {
public static <T> T deepCopy(T object) throws IOException, ClassNotFoundException {
// 将对象写入字节流
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(object);
oos.flush();
// 从字节流中读取对象
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bis);
return (T) ois.readObject();
}
}

@ -48,7 +48,7 @@ public class ImageConverterUtil {
} }
} }
private Bitmap imageYuvToBitmap(Image image, RenderScript rs) { public Bitmap imageYuvToBitmap(Image image, RenderScript rs) {
if (image.getFormat() != ImageFormat.YUV_420_888) { if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Only YUV_420_888 format is supported"); throw new IllegalArgumentException("Only YUV_420_888 format is supported");
} }

Loading…
Cancel
Save