|
|
|
@ -41,6 +41,7 @@ import android.os.HandlerThread;
|
|
|
|
|
import android.os.Looper;
|
|
|
|
|
import android.os.Message;
|
|
|
|
|
import android.os.SystemClock;
|
|
|
|
|
import android.renderscript.RenderScript;
|
|
|
|
|
import android.util.Log;
|
|
|
|
|
import android.util.Range;
|
|
|
|
|
import android.util.Rational;
|
|
|
|
@ -64,6 +65,7 @@ import com.xypower.mppreview.bean.Contants;
|
|
|
|
|
import com.xypower.mppreview.bean.PngPhotoBean;
|
|
|
|
|
import com.xypower.mppreview.interfaces.CompleteCallback;
|
|
|
|
|
import com.xypower.mppreview.utils.HdrUtil;
|
|
|
|
|
import com.xypower.mppreview.utils.ImageConverterUtil;
|
|
|
|
|
import com.xypower.mppreview.widget.ErrorDialog;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -489,7 +491,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
@Override
|
|
|
|
|
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
|
|
|
|
|
int requestId = (int) request.getTag();
|
|
|
|
|
ImageSaverBuilder jpegBuilder;
|
|
|
|
|
// ImageSaverBuilder jpegBuilder;
|
|
|
|
|
ImageSaverBuilder rawBuilder;
|
|
|
|
|
ImageSaverBuilder yuvBuilder;
|
|
|
|
|
StringBuilder sb = new StringBuilder();
|
|
|
|
@ -497,7 +499,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
// jpegBuilder = mJpegResultQueue.get(requestId);
|
|
|
|
|
rawBuilder = mRawResultQueue.get(requestId);
|
|
|
|
|
yuvBuilder = mRawResultQueue.get(requestId);
|
|
|
|
|
yuvBuilder = mYuvResultQueue.get(requestId);
|
|
|
|
|
|
|
|
|
|
// if (jpegBuilder != null) {
|
|
|
|
|
// jpegBuilder.setResult(result);
|
|
|
|
@ -532,7 +534,8 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
|
|
|
|
|
int requestId = (int) request.getTag();
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
mJpegResultQueue.remove(requestId);
|
|
|
|
|
// mJpegResultQueue.remove(requestId);
|
|
|
|
|
mYuvResultQueue.remove(requestId);
|
|
|
|
|
mRawResultQueue.remove(requestId);
|
|
|
|
|
finishedCaptureLocked();
|
|
|
|
|
}
|
|
|
|
@ -803,7 +806,8 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
}
|
|
|
|
|
try {
|
|
|
|
|
// Find a CameraDevice that supports RAW captures, and configure state.
|
|
|
|
|
for (String cameraId : manager.getCameraIdList()) {
|
|
|
|
|
String[] cameraIdList = manager.getCameraIdList();
|
|
|
|
|
for (String cameraId : cameraIdList) {
|
|
|
|
|
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
|
|
|
|
|
|
|
|
|
|
// We only use a camera that supports RAW in this sample.
|
|
|
|
@ -814,7 +818,9 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
// For still image captures, we use the largest available size.
|
|
|
|
|
Size[] outputSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
|
|
|
|
|
// Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
|
|
|
|
|
Size largestYuv = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea());
|
|
|
|
|
List<Size> coll = Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888));
|
|
|
|
|
// Size largestYuv = Collections.max(coll, new CompareSizesByArea());
|
|
|
|
|
Size largestYuv = coll.get(1);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
outputSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
|
|
|
|
@ -1001,7 +1007,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
mPreviewRequestBuilder.addTarget(surface);
|
|
|
|
|
|
|
|
|
|
// Here, we create a CameraCaptureSession for camera preview.
|
|
|
|
|
mCameraDevice.createCaptureSession(Arrays.asList(surface, mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
|
|
|
|
|
mCameraDevice.createCaptureSession(Arrays.asList(surface, mYuvImageReader.get().getSurface(),mRawImageReader.get().getSurface()), new CameraCaptureSession.StateCallback() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
@ -1244,8 +1250,9 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
// This is the CaptureRequest.Builder that we use to take a picture.
|
|
|
|
|
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
|
|
|
|
|
|
|
|
|
|
captureBuilder.addTarget(mJpegImageReader.get().getSurface());
|
|
|
|
|
// captureBuilder.addTarget(mJpegImageReader.get().getSurface());
|
|
|
|
|
captureBuilder.addTarget(mRawImageReader.get().getSurface());
|
|
|
|
|
captureBuilder.addTarget(mYuvImageReader.get().getSurface());
|
|
|
|
|
|
|
|
|
|
// Use the same AE and AF modes as the preview.
|
|
|
|
|
setup3AControlsLocked(captureBuilder);
|
|
|
|
@ -1287,7 +1294,8 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
runnable = new ImageSaver.ImagePairRunnable(imagePair) {
|
|
|
|
|
@Override
|
|
|
|
|
public void run() {
|
|
|
|
|
final List<ImageSaver.ImageInfo> images = imagePair.getImages();
|
|
|
|
|
// final List<ImageSaver.ImageInfo> images = imagePair.getImages();
|
|
|
|
|
List<Image> images = imagePair.getMImages();
|
|
|
|
|
final String outputPath = "/sdcard/DCIM/";
|
|
|
|
|
new Thread(new Runnable() {
|
|
|
|
|
@Override
|
|
|
|
@ -1295,12 +1303,48 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
if (images.size() != 2) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
ImageSaver.ImageInfo img1 = images.get(0);
|
|
|
|
|
ImageSaver.ImageInfo img2 = images.get(1);
|
|
|
|
|
// Image img1 = images.get(0);
|
|
|
|
|
// Image img2 = images.get(1);
|
|
|
|
|
// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
|
|
|
|
|
// RenderScript rs = RenderScript.create(getContext());
|
|
|
|
|
// imageConverterUtil1.saveYuvImageFromImageReader(img1, rs,"");
|
|
|
|
|
// Log.d("开始Hdr处理", "strat");
|
|
|
|
|
// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
|
|
|
|
|
//// RenderScript rs2 = RenderScript.create(getContext());
|
|
|
|
|
// imageConverterUtil1.saveYuvImageFromImageReader(img2, rs,"");
|
|
|
|
|
//// String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp";
|
|
|
|
|
//// RenderScript rs = RenderScript.create(getContext());
|
|
|
|
|
//// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
|
|
|
|
|
////// boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
|
|
|
|
|
//// Bitmap img11 = imageConverterUtil1.imageYuvToBitmap(img1, rs);
|
|
|
|
|
//// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
|
|
|
|
|
//// Bitmap img12 = imageConverterUtil2.imageYuvToBitmap(img2, rs);
|
|
|
|
|
//// boolean b = makeHdr3(1000, bitmap, 0,2000, bitmap1, 0, hdrOutputPath);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Image img1 = images.get(0);
|
|
|
|
|
Image img2 = images.get(1);
|
|
|
|
|
ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
|
|
|
|
|
RenderScript rs = RenderScript.create(getContext());
|
|
|
|
|
Bitmap bitmap = imageConverterUtil1.imageYuvToBitmap(img1, rs);
|
|
|
|
|
Log.d("开始Hdr处理", "strat");
|
|
|
|
|
ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
|
|
|
|
|
// RenderScript rs2 = RenderScript.create(getContext());
|
|
|
|
|
Bitmap bitmap1 = imageConverterUtil1.imageYuvToBitmap(img2, rs);
|
|
|
|
|
|
|
|
|
|
Log.d("HDR测试", "Hdr");
|
|
|
|
|
String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp";
|
|
|
|
|
boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
|
|
|
|
|
// RenderScript rs = RenderScript.create(getContext());
|
|
|
|
|
// ImageConverterUtil imageConverterUtil1 = new ImageConverterUtil();
|
|
|
|
|
//// boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
|
|
|
|
|
// Bitmap img11 = imageConverterUtil1.imageYuvToBitmap(img1, rs);
|
|
|
|
|
// ImageConverterUtil imageConverterUtil2 = new ImageConverterUtil();
|
|
|
|
|
// Bitmap img12 = imageConverterUtil2.imageYuvToBitmap(img2, rs);
|
|
|
|
|
boolean b = makeHdr3(1000, bitmap, 0,2000, bitmap1, 0, hdrOutputPath);
|
|
|
|
|
|
|
|
|
|
Log.d("HDR测试2", "Hdr");
|
|
|
|
|
|
|
|
|
|
// Mat mat1 = new Mat();
|
|
|
|
|
// Mat mat2 = new Mat();
|
|
|
|
@ -1311,27 +1355,27 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
// Mat[] mats = {mat1, mat2};
|
|
|
|
|
// float[] floats = {img1.exposureTime,img2.exposureTime};
|
|
|
|
|
// HdrUtil.createHDR(mats, floats,Hdrmat,hdrOutputPath);
|
|
|
|
|
img1.bitmap.recycle();
|
|
|
|
|
img2.bitmap.recycle();
|
|
|
|
|
// img1.bitmap.recycle();
|
|
|
|
|
// img2.bitmap.recycle();
|
|
|
|
|
img1 = null;
|
|
|
|
|
img2 = null;
|
|
|
|
|
images.clear();
|
|
|
|
|
|
|
|
|
|
Log.d("结束Hdr处理", "end");
|
|
|
|
|
if (b) {
|
|
|
|
|
getActivity().runOnUiThread(new Runnable() {
|
|
|
|
|
@Override
|
|
|
|
|
public void run() {
|
|
|
|
|
// 在主线程中执行UI更新
|
|
|
|
|
// ...
|
|
|
|
|
takepic.setVisibility(View.VISIBLE);
|
|
|
|
|
rorpic.clearAnimation();
|
|
|
|
|
rorpic.setVisibility(View.GONE);
|
|
|
|
|
showToast("HDR拍摄成功");
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
// if (b) {
|
|
|
|
|
// getActivity().runOnUiThread(new Runnable() {
|
|
|
|
|
// @Override
|
|
|
|
|
// public void run() {
|
|
|
|
|
// // 在主线程中执行UI更新
|
|
|
|
|
// // ...
|
|
|
|
|
// takepic.setVisibility(View.VISIBLE);
|
|
|
|
|
// rorpic.clearAnimation();
|
|
|
|
|
// rorpic.setVisibility(View.GONE);
|
|
|
|
|
// showToast("HDR拍摄成功");
|
|
|
|
|
// }
|
|
|
|
|
// });
|
|
|
|
|
//
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}).start();
|
|
|
|
@ -1340,7 +1384,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
imagePair.setRunnable(runnable);
|
|
|
|
|
|
|
|
|
|
for (int idx = 0; idx < 2; idx++) {
|
|
|
|
|
for (int idx = 0; idx < 1; idx++) {
|
|
|
|
|
// Set request tag to easily track results in callbacks.
|
|
|
|
|
captureBuilder.setTag(mRequestCounter.getAndIncrement());
|
|
|
|
|
|
|
|
|
@ -1350,7 +1394,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
|
|
|
|
|
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
|
|
|
|
|
if (exposureTime > 0) {
|
|
|
|
|
v = exposureTime;
|
|
|
|
|
v = (long) ((long) exposureTime * (7));
|
|
|
|
|
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
|
|
|
|
|
}
|
|
|
|
|
if (sensitivity > 0) {
|
|
|
|
@ -1363,7 +1407,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 2);
|
|
|
|
|
if (exposureTime > 0) {
|
|
|
|
|
if (pic1 <= 0) {
|
|
|
|
|
v = exposureTime * DEFAULT_COMPATATION;
|
|
|
|
|
v = exposureTime *7;
|
|
|
|
|
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
|
|
|
|
|
} else {
|
|
|
|
|
v = exposureTime * pic1;
|
|
|
|
@ -1377,8 +1421,9 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
CaptureRequest request = captureBuilder.build();
|
|
|
|
|
|
|
|
|
|
ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
// ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
|
|
|
|
|
|
|
|
|
|
rawBuilder.setImagePair(imagePair);
|
|
|
|
|
rawBuilder.setCallback(new CompleteCallback() {
|
|
|
|
|
@Override
|
|
|
|
@ -1387,8 +1432,24 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
rawBuilder.setList(mlist);
|
|
|
|
|
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ImageSaverBuilder yuvBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
|
|
|
|
|
|
|
|
|
|
yuvBuilder.setImagePair(imagePair);
|
|
|
|
|
yuvBuilder.setCallback(new CompleteCallback() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onResult() {
|
|
|
|
|
showToast("HDR拍摄成功");
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
yuvBuilder.setList(mlist);
|
|
|
|
|
|
|
|
|
|
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
|
|
|
|
|
mRawResultQueue.put((int) request.getTag(), rawBuilder);
|
|
|
|
|
mYuvResultQueue.put((int) request.getTag(), yuvBuilder);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requests.add(request);
|
|
|
|
|
}
|
|
|
|
|
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
|
|
|
|
@ -1407,11 +1468,13 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
// Create an ImageSaverBuilder in which to collect results, and add it to the queue
|
|
|
|
|
// of active requests.
|
|
|
|
|
ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
// ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
ImageSaverBuilder yuvBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
|
|
|
|
|
|
|
|
|
|
mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
|
|
|
|
|
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
|
|
|
|
|
mRawResultQueue.put((int) request.getTag(), rawBuilder);
|
|
|
|
|
mYuvResultQueue.put((int) request.getTag(), yuvBuilder);
|
|
|
|
|
|
|
|
|
|
mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
|
|
|
|
|
}
|
|
|
|
|