|
|
|
@ -59,6 +59,8 @@ import android.widget.Button;
|
|
|
|
|
import android.widget.EditText;
|
|
|
|
|
import android.widget.Toast;
|
|
|
|
|
|
|
|
|
|
import org.opencv.android.OpenCVLoader;
|
|
|
|
|
|
|
|
|
|
import java.io.File;
|
|
|
|
|
import java.io.FileOutputStream;
|
|
|
|
|
import java.io.IOException;
|
|
|
|
@ -91,6 +93,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
*/
|
|
|
|
|
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
static {
|
|
|
|
|
ORIENTATIONS.append(Surface.ROTATION_0, 0);
|
|
|
|
|
ORIENTATIONS.append(Surface.ROTATION_90, 90);
|
|
|
|
@ -368,7 +371,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onImageAvailable(ImageReader reader) {
|
|
|
|
|
dequeueAndSaveImage(mJpegResultQueue, mJpegImageReader);
|
|
|
|
|
dequeueAndSaveImage( mJpegResultQueue, mJpegImageReader);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
@ -381,11 +384,13 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onImageAvailable(ImageReader reader) {
|
|
|
|
|
dequeueAndSaveImage(mRawResultQueue, mRawImageReader);
|
|
|
|
|
dequeueAndSaveImage( mRawResultQueue, mRawImageReader);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
private Long exposetime;
|
|
|
|
|
private Integer sensitivity;
|
|
|
|
|
/**
|
|
|
|
|
* A {@link CameraCaptureSession.CaptureCallback} that handles events for the preview and
|
|
|
|
|
* pre-capture sequence.
|
|
|
|
@ -403,8 +408,10 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
boolean readyToCapture = true;
|
|
|
|
|
if (!mNoAFRun) {
|
|
|
|
|
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
|
|
|
|
|
Long exposetime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);//获取自动曝光时间
|
|
|
|
|
Integer sensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);//获取自动ISO
|
|
|
|
|
//获取自动曝光时间
|
|
|
|
|
exposetime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
|
|
|
|
|
//获取自动ISO
|
|
|
|
|
sensitivity = result.get(CaptureResult.SENSOR_SENSITIVITY);
|
|
|
|
|
if (afState == null) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
@ -436,7 +443,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
if (readyToCapture && mPendingUserCaptures > 0) {
|
|
|
|
|
// Capture once for each user tap of the "Picture" button.
|
|
|
|
|
while (mPendingUserCaptures > 0) {
|
|
|
|
|
captureStillPictureLocked(exposureTime, sensitivity);
|
|
|
|
|
captureStillPictureLocked(exposetime, sensitivity);
|
|
|
|
|
mPendingUserCaptures--;
|
|
|
|
|
}
|
|
|
|
|
// After this, the camera will go back to the normal state of preview.
|
|
|
|
@ -466,6 +473,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
private final CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
String currentDateTime = generateTimestamp();
|
|
|
|
|
File rawFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
|
|
|
|
|
File jpegFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
|
|
|
|
@ -475,14 +483,13 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
ImageSaver.ImageSaverBuilder jpegBuilder;
|
|
|
|
|
ImageSaver.ImageSaverBuilder rawBuilder;
|
|
|
|
|
int requestId = (int) request.getTag();
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
jpegBuilder = mJpegResultQueue.get(requestId);
|
|
|
|
|
rawBuilder = mRawResultQueue.get(requestId);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (jpegBuilder != null) jpegBuilder.setFile(jpegFile);
|
|
|
|
|
if (rawBuilder != null) rawBuilder.setFile(rawFile);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
|
|
|
|
@ -503,7 +510,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
}
|
|
|
|
|
if (rawBuilder != null) {
|
|
|
|
|
rawBuilder.setResult(result);
|
|
|
|
|
if (jpegBuilder != null) sb.append(", ");
|
|
|
|
|
if (rawBuilder != null) sb.append(", ");
|
|
|
|
|
sb.append("Saving RAW as: ");
|
|
|
|
|
sb.append(rawBuilder.getSaveLocation());
|
|
|
|
|
}
|
|
|
|
@ -512,7 +519,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
handleCompletionLocked(requestId, jpegBuilder, mJpegResultQueue);
|
|
|
|
|
handleCompletionLocked(requestId, rawBuilder, mRawResultQueue);
|
|
|
|
|
|
|
|
|
|
finishedCaptureLocked();
|
|
|
|
|
// finishedCaptureLocked();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
showToast(sb.toString());
|
|
|
|
@ -544,8 +551,12 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
public boolean isHandTakePic = true;
|
|
|
|
|
private long exposureTime = 0;
|
|
|
|
|
private int sensitivity = 0;
|
|
|
|
|
private double pic1 = 1;
|
|
|
|
|
private double pic2 = 1;
|
|
|
|
|
private int i = 0;
|
|
|
|
|
private static int j = 0;
|
|
|
|
|
// private long exposureTime = 0;
|
|
|
|
|
// private int sensitivity = 0;
|
|
|
|
|
|
|
|
|
|
public static Camera2RawFragment newInstance() {
|
|
|
|
|
return new Camera2RawFragment();
|
|
|
|
@ -577,12 +588,12 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
Button takepic = view.findViewById(R.id.takepic);
|
|
|
|
|
EditText baoguang = view.findViewById(R.id.baoguang);
|
|
|
|
|
EditText iso = view.findViewById(R.id.iso);
|
|
|
|
|
if (exposureTime > 0) {
|
|
|
|
|
baoguang.setText(exposureTime + "");
|
|
|
|
|
}
|
|
|
|
|
if (sensitivity > 0) {
|
|
|
|
|
iso.setText(sensitivity + "");
|
|
|
|
|
}
|
|
|
|
|
// if (exposureTime > 0) {
|
|
|
|
|
// baoguang.setText(exposureTime + "");
|
|
|
|
|
// }
|
|
|
|
|
// if (sensitivity > 0) {
|
|
|
|
|
// iso.setText(sensitivity + "");
|
|
|
|
|
// }
|
|
|
|
|
takepic.setOnClickListener(new View.OnClickListener() {
|
|
|
|
|
@Override
|
|
|
|
|
public void onClick(View view) {
|
|
|
|
@ -590,10 +601,11 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
String s1 = baoguang.getText().toString();
|
|
|
|
|
String s2 = iso.getText().toString();
|
|
|
|
|
if (s1 != null && !s1.equals("")) {
|
|
|
|
|
exposureTime = Long.parseLong(s1);
|
|
|
|
|
pic1 = Double.parseDouble(s1);
|
|
|
|
|
}
|
|
|
|
|
if (s2 != null && !s2.equals("")) {
|
|
|
|
|
sensitivity = Integer.parseInt(s2);
|
|
|
|
|
// sensitivity = Integer.parseInt(s2);
|
|
|
|
|
pic2 = Double.parseDouble(s2);
|
|
|
|
|
}
|
|
|
|
|
takePicture();
|
|
|
|
|
}
|
|
|
|
@ -749,6 +761,8 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
Size[] rawSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
|
|
|
|
|
Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea());
|
|
|
|
|
// Size largestRaw = new Size(5760, 4312);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
// Set up ImageReaders for JPEG and RAW outputs. Place these in a reference
|
|
|
|
@ -1032,20 +1046,6 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
// Allow AWB to run auto-magically if this device supports this
|
|
|
|
|
builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// // 设置曝光时间,例如设置为1000微秒
|
|
|
|
|
//// long exposureTime = 1000 000000L; // 1000微秒
|
|
|
|
|
// if (isHandTakePic) {
|
|
|
|
|
// builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
|
|
|
|
|
// if (exposureTime > 0) {
|
|
|
|
|
// long value = exposureTime * 1000L;
|
|
|
|
|
// builder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, value);
|
|
|
|
|
// }
|
|
|
|
|
// if (sensitivity > 0) {
|
|
|
|
|
// builder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
@ -1074,11 +1074,6 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
ex.printStackTrace();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
|
|
|
|
// StreamConfigurationMap streamConfigurationMap = mCharacteristics.get(
|
|
|
|
|
// CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION);
|
|
|
|
|
// System.out.println(streamConfigurationMap);
|
|
|
|
|
// }
|
|
|
|
|
// For still image captures, we always use the largest available size.
|
|
|
|
|
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
|
|
|
|
|
|
|
|
|
@ -1133,22 +1128,6 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
float centerX = viewRect.centerX();
|
|
|
|
|
float centerY = viewRect.centerY();
|
|
|
|
|
|
|
|
|
|
// Initially, output stream images from the Camera2 API will be rotated to the native
|
|
|
|
|
// device orientation from the sensor's orientation, and the TextureView will default to
|
|
|
|
|
// scaling these buffers to fill it's view bounds. If the aspect ratios and relative
|
|
|
|
|
// orientations are correct, this is fine.
|
|
|
|
|
//
|
|
|
|
|
// However, if the device orientation has been rotated relative to its native
|
|
|
|
|
// orientation so that the TextureView's dimensions are swapped relative to the
|
|
|
|
|
// native device orientation, we must do the following to ensure the output stream
|
|
|
|
|
// images are not incorrectly scaled by the TextureView:
|
|
|
|
|
// - Undo the scale-to-fill from the output buffer's dimensions (i.e. its dimensions
|
|
|
|
|
// in the native device orientation) to the TextureView's dimension.
|
|
|
|
|
// - Apply a scale-to-fill from the output buffer's rotated dimensions
|
|
|
|
|
// (i.e. its dimensions in the current device orientation) to the TextureView's
|
|
|
|
|
// dimensions.
|
|
|
|
|
// - Apply the rotation from the native device orientation to the current device
|
|
|
|
|
// rotation.
|
|
|
|
|
if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
|
|
|
|
|
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
|
|
|
|
|
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
|
|
|
|
@ -1254,24 +1233,41 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
// mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP)
|
|
|
|
|
List<CaptureRequest> requests = new ArrayList<>();
|
|
|
|
|
for (int idx = 0; idx <2; idx++) {
|
|
|
|
|
for (int idx = 0; idx < 2; idx++) {
|
|
|
|
|
// Set request tag to easily track results in callbacks.
|
|
|
|
|
captureBuilder.setTag(mRequestCounter.getAndIncrement());
|
|
|
|
|
|
|
|
|
|
if (idx == 1) {
|
|
|
|
|
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
|
|
|
|
|
// 设置曝光时间,例如设置为1000微秒
|
|
|
|
|
// long exposureTime = 1000 000000L; // 1000微秒
|
|
|
|
|
if (isHandTakePic) {
|
|
|
|
|
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
|
|
|
|
|
if (exposureTime > 0) {
|
|
|
|
|
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, exposureTime * 2);
|
|
|
|
|
}
|
|
|
|
|
if (sensitivity > 0) {
|
|
|
|
|
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// if (idx == 0) {
|
|
|
|
|
//// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
|
|
|
|
|
// // 设置曝光时间,例如设置为1000微秒
|
|
|
|
|
//// long exposureTime = 1000 000000L; // 1000微秒
|
|
|
|
|
// if (isHandTakePic) {
|
|
|
|
|
// captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
|
|
|
|
|
// if (exposureTime > 0) {
|
|
|
|
|
// double v = exposureTime * pic1;
|
|
|
|
|
// captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long)v);
|
|
|
|
|
// }
|
|
|
|
|
// if (sensitivity > 0) {
|
|
|
|
|
// captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
//
|
|
|
|
|
// if (idx == 1) {
|
|
|
|
|
//// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
|
|
|
|
|
// // 设置曝光时间,例如设置为1000微秒
|
|
|
|
|
//// long exposureTime = 1000 000000L; // 1000微秒
|
|
|
|
|
// if (isHandTakePic) {
|
|
|
|
|
// captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
|
|
|
|
|
// if (exposureTime > 0) {
|
|
|
|
|
// double v = exposureTime * pic2;
|
|
|
|
|
// captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long)v);
|
|
|
|
|
// }
|
|
|
|
|
// if (sensitivity > 0) {
|
|
|
|
|
// captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
CaptureRequest request = captureBuilder.build();
|
|
|
|
|
|
|
|
|
@ -1288,7 +1284,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
requests.add(request);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
|
|
|
|
|
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
|
|
|
|
|
//
|
|
|
|
|
// // Set request tag to easily track results in callbacks.
|
|
|
|
|
// captureBuilder.setTag(mRequestCounter.getAndIncrement());
|
|
|
|
@ -1343,9 +1339,23 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
* @param reader a reference counted wrapper containing an {@link ImageReader} from which
|
|
|
|
|
* to acquire an image.
|
|
|
|
|
*/
|
|
|
|
|
private void dequeueAndSaveImage(TreeMap<Integer, ImageSaver.ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
|
|
|
|
|
private void dequeueAndSaveImage( TreeMap<Integer, ImageSaver.ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) {
|
|
|
|
|
synchronized (mCameraStateLock) {
|
|
|
|
|
Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = pendingQueue.firstEntry();
|
|
|
|
|
Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = null;
|
|
|
|
|
if (pendingQueue != null) {
|
|
|
|
|
for (Map.Entry<Integer, ImageSaver.ImageSaverBuilder> item : pendingQueue.entrySet()) {
|
|
|
|
|
ImageSaver.ImageSaverBuilder value = item.getValue();
|
|
|
|
|
if (value.mImage == null) {
|
|
|
|
|
entry = item;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = pendingQueue.firstEntry();
|
|
|
|
|
|
|
|
|
|
// ImageSaver.ImageSaverBuilder builder = entry.getValue();
|
|
|
|
|
if (entry == null) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
ImageSaver.ImageSaverBuilder builder = entry.getValue();
|
|
|
|
|
|
|
|
|
|
// Increment reference count to prevent ImageReader from being closed while we
|
|
|
|
@ -1365,9 +1375,15 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
pendingQueue.remove(entry.getKey());
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// String currentDateTime = generateTimestamp();
|
|
|
|
|
// File file;
|
|
|
|
|
// if (type == 0) {
|
|
|
|
|
// file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "JPEG_" + currentDateTime + ".jpg");
|
|
|
|
|
// } else {
|
|
|
|
|
// file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "RAW_" + currentDateTime + ".dng");
|
|
|
|
|
// }
|
|
|
|
|
// builder.setFile(file);
|
|
|
|
|
builder.setRefCountedReader(reader).setImage(image);
|
|
|
|
|
|
|
|
|
|
handleCompletionLocked(entry.getKey(), builder, pendingQueue);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
@ -1421,6 +1437,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void run() {
|
|
|
|
|
System.out.println("保存线程执行了" + (j++) + "次");
|
|
|
|
|
boolean success = false;
|
|
|
|
|
int format = mImage.getFormat();
|
|
|
|
|
switch (format) {
|
|
|
|
@ -1539,6 +1556,7 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
public synchronized ImageSaver buildIfComplete() {
|
|
|
|
|
if (!isComplete()) {
|
|
|
|
|
Log.e("看看有没有问题", "问题");
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, mReader);
|
|
|
|
@ -1815,8 +1833,11 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
* @param queue the queue to remove this request from, if completed.
|
|
|
|
|
*/
|
|
|
|
|
private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder, TreeMap<Integer, ImageSaver.ImageSaverBuilder> queue) {
|
|
|
|
|
if (builder == null) return;
|
|
|
|
|
if (builder == null) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
ImageSaver saver = builder.buildIfComplete();
|
|
|
|
|
System.out.println();
|
|
|
|
|
if (saver != null) {
|
|
|
|
|
queue.remove(requestId);
|
|
|
|
|
AsyncTask.THREAD_POOL_EXECUTOR.execute(saver);
|
|
|
|
@ -1881,4 +1902,5 @@ public class Camera2RawFragment extends Fragment {
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
}
|