修复内存泄漏的bug,增加手动点击对焦(取消)

master
liuguijing 3 months ago
parent 2c6ff1a17b
commit 692c48f50c

@ -84,5 +84,4 @@ dependencies {
// CameraX View class // CameraX View class
implementation "androidx.camera:camera-view:1.1.0" implementation "androidx.camera:camera-view:1.1.0"
} }

@ -3,6 +3,7 @@
xmlns:tools="http://schemas.android.com/tools" xmlns:tools="http://schemas.android.com/tools"
package="com.xypower.mppreview"> package="com.xypower.mppreview">
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CAMERA" /> <uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />

@ -57,5 +57,11 @@ bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& r
ldrReinhard.convertTo(rgb, CV_8U); ldrReinhard.convertTo(rgb, CV_8U);
ldrReinhard.release(); ldrReinhard.release();
mergeDebevec.release();
responseDebevec.release();
alignMTB.release();
tonemapReinhard.release();
calibrateDebevec.release();
mergeDebevec.release();
return true; return true;
} }

@ -671,6 +671,7 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outVirtualAddress); cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outVirtualAddress);
tmp.copyTo(images[idx]); tmp.copyTo(images[idx]);
AHardwareBuffer_unlock(hardwareBuffer, &fence); AHardwareBuffer_unlock(hardwareBuffer, &fence);
AHardwareBuffer_release(hardwareBuffer);
} }
else else
{ {
@ -679,14 +680,16 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outAddress); cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outAddress);
tmp.copyTo(images[idx]); tmp.copyTo(images[idx]);
AndroidBitmap_unlockPixels(env, bitmaps[idx]); AndroidBitmap_unlockPixels(env, bitmaps[idx]);
tmp.release();
} }
//convert RGB to BGR //convert RGB to BGR
cv::cvtColor(images[idx], images[idx], cv::COLOR_RGB2BGR); cv::cvtColor(images[idx], images[idx], cv::COLOR_RGB2BGR);
// ConvertDngToPng(pngDatas[idx], pngLengths[idx], images[idx]); // ConvertDngToPng(pngDatas[idx], pngLengths[idx], images[idx]);
} }
bitmaps.clear();
env->DeleteLocalRef(img1);
env->DeleteLocalRef(img2);
ALOGI("End Decode"); ALOGI("End Decode");
cv::Mat rgb; cv::Mat rgb;
@ -704,6 +707,11 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
params.push_back(100); params.push_back(100);
if (cv::imwrite(fileName.c_str(), rgb, params)) if (cv::imwrite(fileName.c_str(), rgb, params))
{ {
rgb.release();
// images[0].release();
// images[1].release();
// images.clear();
ALOGI("End HDR3"); ALOGI("End HDR3");
return JNI_TRUE; return JNI_TRUE;
} }

@ -47,6 +47,7 @@ import android.util.Rational;
import android.util.Size; import android.util.Size;
import android.util.SparseIntArray; import android.util.SparseIntArray;
import android.view.LayoutInflater; import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.OrientationEventListener; import android.view.OrientationEventListener;
import android.view.Surface; import android.view.Surface;
import android.view.TextureView; import android.view.TextureView;
@ -62,8 +63,10 @@ import android.widget.Toast;
import com.xypower.mppreview.bean.Contants; import com.xypower.mppreview.bean.Contants;
import com.xypower.mppreview.bean.PngPhotoBean; import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback; import com.xypower.mppreview.interfaces.CompleteCallback;
import com.xypower.mppreview.utils.HdrUtil;
import com.xypower.mppreview.widget.ErrorDialog; import com.xypower.mppreview.widget.ErrorDialog;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.InputStream; import java.io.InputStream;
@ -139,6 +142,9 @@ public class Camera2RawFragment extends Fragment {
private ExecutorService executorService; private ExecutorService executorService;
private Button takepic; private Button takepic;
private ImageView rorpic; private ImageView rorpic;
private Rect focusArea;
private ImageSaver.ImagePairRunnable runnable;
private ImageSaver.ImagePair imagePair;
public static native boolean makeHdr(long exposureTime1, String path1, long exposureTime2, String path2, String outputPath); public static native boolean makeHdr(long exposureTime1, String path1, long exposureTime2, String path2, String outputPath);
@ -386,6 +392,11 @@ public class Camera2RawFragment extends Fragment {
private CameraCaptureSession.CaptureCallback mPreCaptureCallback = new CameraCaptureSession.CaptureCallback() { private CameraCaptureSession.CaptureCallback mPreCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) { private void process(CaptureResult result) {
// Integer afStates = result.get(CaptureResult.CONTROL_AF_STATE);
// if (afStates!=null &&(afStates == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afStates == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)) {
// resetAutoFocus();
// }
synchronized (mCameraStateLock) { synchronized (mCameraStateLock) {
switch (mState) { switch (mState) {
case STATE_PREVIEW: { case STATE_PREVIEW: {
@ -558,10 +569,117 @@ public class Camera2RawFragment extends Fragment {
takePicture(); takePicture();
} }
}); });
// mTextureView.setOnTouchListener(new View.OnTouchListener() {
// @Override
// public boolean onTouch(View v, MotionEvent event) {
// if (event.getAction() == MotionEvent.ACTION_DOWN) {
// float x = event.getX();
// float y = event.getY();
//
// // 将触摸坐标转换为相机对焦区域
// focusArea = calculateFocusArea(x, y);
// triggerAutoFocus(focusArea);
// }
// return true;
// }
// });
executorService = Executors.newFixedThreadPool(2); executorService = Executors.newFixedThreadPool(2);
} }
@Override
public void onDestroy() {
super.onDestroy();
imagePair = null;
runnable = null;
mTextureView = null;
executorService.shutdown();
}
private Rect calculateFocusArea(float x, float y) {
int viewWidth = mTextureView.getWidth();
int viewHeight = mTextureView.getHeight();
int focusSize =200; // 对焦区域大小
// int left = (int) (x / viewWidth * 2000 - 1000 - focusSize / 2);
// int top = (int) (y / viewHeight * 2000 - 1000 - focusSize / 2);
int left = (int) (x - focusSize / 2);
int top = (int) (y - focusSize / 2);
int right = left + focusSize;
int bottom = top + focusSize;
if (left<0) {
left = 0;
}
if (top<0) {
top = 0;
}
if (right < 0) {
right = 0;
}
if (bottom < 0) {
bottom = 0;
}
return new Rect(left, top, right, bottom);
}
private void triggerAutoFocus(Rect focusArea) {
try {
// CaptureRequest.Builder mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{new MeteringRectangle(focusArea, 1000)});
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{new MeteringRectangle(focusArea, 1000)});
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
//
// try {
// AE/AF区域设置通过setRepeatingRequest不断发请求
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
// } catch (CameraAccessException e) {
// e.printStackTrace();
// }
// mCaptureSession.capture(captureBuilder.build(), null, null);
// startControlAFRequest(new MeteringRectangle(focusArea, 1000), mPreCaptureCallback);
// mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
//
// int previewWidth = mTextureView.getWidth();
// int previewHeight = mTextureView.getHeight();
// RectF previewRect = new RectF(focusArea.left, focusArea.top, previewWidth, previewHeight);
//
// CoordinateTransformer cf = new CoordinateTransformer(mCharacteristics, previewRect);
//
// RectF rect = cf.toCameraSpace(previewRect);
// MeteringRectangle mr = new MeteringRectangle(new Rect((int) rect.left, (int) rect.top, (int) rect.right, (int) rect.bottom), 1000);
// startControlAFRequest(new MeteringRectangle(focusArea, 1000), mPreCaptureCallback);
}
private void resetAutoFocus() {
try {
// CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
// mCaptureSession.capture(captureBuilder.build(), mPreCaptureCallback, null);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override @Override
public void onResume() { public void onResume() {
super.onResume(); super.onResume();
@ -627,7 +745,7 @@ public class Camera2RawFragment extends Fragment {
} }
public void startControlAFRequest(MeteringRectangle rect, CameraCaptureSession.CaptureCallback captureCallback) { public void startControlAFRequest(MeteringRectangle rect, CameraCaptureSession.CaptureCallback captureCallback) {
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
MeteringRectangle[] rectangle = new MeteringRectangle[]{rect}; MeteringRectangle[] rectangle = new MeteringRectangle[]{rect};
// Focus Mode AUTO // Focus Mode AUTO
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_AUTO); // mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_AUTO);
@ -643,6 +761,7 @@ public class Camera2RawFragment extends Fragment {
} }
//触发对焦 //触发对焦
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START); mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
try { try {
//触发对焦通过capture发送请求, 因为用户点击屏幕后只需触发一次对焦 //触发对焦通过capture发送请求, 因为用户点击屏幕后只需触发一次对焦
mCaptureSession.capture(mPreviewRequestBuilder.build(), captureCallback, mBackgroundHandler); mCaptureSession.capture(mPreviewRequestBuilder.build(), captureCallback, mBackgroundHandler);
@ -1117,14 +1236,28 @@ public class Camera2RawFragment extends Fragment {
double step = rational.doubleValue(); double step = rational.doubleValue();
captureBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY); captureBuilder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY); captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
// if (focusArea!=null) {
// captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
// captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
// captureBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{new MeteringRectangle(focusArea, 1000)});
// captureBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{new MeteringRectangle(focusArea, 1000)});
// captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
// }
//
// try {
// AE/AF区域设置通过setRepeatingRequest不断发请求
// mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
if (pic1 < 21) { if (pic1 < 21) {
// mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP) // mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP)
ArrayList<PngPhotoBean> mlist = new ArrayList<>(); ArrayList<PngPhotoBean> mlist = new ArrayList<>();
List<CaptureRequest> requests = new ArrayList<>(); List<CaptureRequest> requests = new ArrayList<>();
long v = 0; long v = 0;
ImageSaver.ImagePair imagePair = new ImageSaver.ImagePair(2); imagePair = new ImageSaver.ImagePair(2);
ImageSaver.ImagePairRunnable runnable = new ImageSaver.ImagePairRunnable(imagePair) { runnable = new ImageSaver.ImagePairRunnable(imagePair) {
@Override @Override
public void run() { public void run() {
final List<ImageSaver.ImageInfo> images = imagePair.getImages(); final List<ImageSaver.ImageInfo> images = imagePair.getImages();
@ -1140,11 +1273,23 @@ public class Camera2RawFragment extends Fragment {
Log.d("开始Hdr处理", "strat"); Log.d("开始Hdr处理", "strat");
String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp"; String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".bmp";
boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath); boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
// Mat mat1 = new Mat();
// Mat mat2 = new Mat();
// Mat Hdrmat = new Mat();
// Utils.bitmapToMat(img1.bitmap, mat1);
// Utils.bitmapToMat(img2.bitmap, mat2);
//
// Mat[] mats = {mat1, mat2};
// float[] floats = {img1.exposureTime,img2.exposureTime};
// HdrUtil.createHDR(mats, floats,Hdrmat,hdrOutputPath);
img1.bitmap.recycle(); img1.bitmap.recycle();
img2.bitmap.recycle(); img2.bitmap.recycle();
img1 = null; img1 = null;
img2 = null; img2 = null;
images.clear(); images.clear();
Log.d("结束Hdr处理", "end"); Log.d("结束Hdr处理", "end");
if (b) { if (b) {
getActivity().runOnUiThread(new Runnable() { getActivity().runOnUiThread(new Runnable() {
@ -1173,10 +1318,10 @@ public class Camera2RawFragment extends Fragment {
captureBuilder.setTag(mRequestCounter.getAndIncrement()); captureBuilder.setTag(mRequestCounter.getAndIncrement());
if (idx == 0) { if (idx == 0) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// 设置曝光时间例如设置为1000微秒 // 设置曝光时间例如设置为1000微秒
// long exposureTime = 1000 000000L; // 1000微秒 // long exposureTime = 1000 000000L; // 1000微秒
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
if (exposureTime > 0) { if (exposureTime > 0) {
v = exposureTime; v = exposureTime;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v); captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, v);
@ -1188,6 +1333,7 @@ public class Camera2RawFragment extends Fragment {
if (idx == 1) { if (idx == 1) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 2);
if (exposureTime > 0) { if (exposureTime > 0) {
if (pic1 <= 0) { if (pic1 <= 0) {
v = exposureTime * DEFAULT_COMPATATION; v = exposureTime * DEFAULT_COMPATATION;

@ -32,7 +32,6 @@ public class ImageSaver implements Runnable {
private final CaptureResult mCaptureResult; private final CaptureResult mCaptureResult;
private final CameraCharacteristics mCharacteristics; private final CameraCharacteristics mCharacteristics;
private CompleteCallback mCallback; private CompleteCallback mCallback;
private final Context mContext;
private final ImagePair mImagePair; private final ImagePair mImagePair;
private final Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader; private final Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
@ -50,9 +49,9 @@ public class ImageSaver implements Runnable {
} }
public static class ImagePair { public static class ImagePair {
private List<ImageInfo> mImages; public List<ImageInfo> mImages;
private int mExpectedCount; public int mExpectedCount;
private Runnable mRunnable; public Runnable mRunnable;
public ImagePair(int expectedCount) { public ImagePair(int expectedCount) {
mImages = new ArrayList<>(); mImages = new ArrayList<>();
@ -94,14 +93,13 @@ public class ImageSaver implements Runnable {
private ArrayList<PngPhotoBean> mlist = new ArrayList<>();//用来存储已拍照的照片名称 private ArrayList<PngPhotoBean> mlist = new ArrayList<>();//用来存储已拍照的照片名称
public ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics, Context context, public ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics,
Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader, ArrayList<PngPhotoBean> list, Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader, ArrayList<PngPhotoBean> list,
CompleteCallback callback, ImagePair imagePair) { CompleteCallback callback, ImagePair imagePair) {
mImage = image; mImage = image;
mFile = file; mFile = file;
mCaptureResult = result; mCaptureResult = result;
mCharacteristics = characteristics; mCharacteristics = characteristics;
mContext = context;
mReader = reader; mReader = reader;
mlist = list; mlist = list;
mCallback = callback; mCallback = callback;
@ -162,6 +160,7 @@ public class ImageSaver implements Runnable {
// ImageDecoder.Source source = ImageDecoder.createSource(mFile); // ImageDecoder.Source source = ImageDecoder.createSource(mFile);
ImageDecoder.Source source = ImageDecoder.createSource(byteBuffer); ImageDecoder.Source source = ImageDecoder.createSource(byteBuffer);
try { try {
bmp = ImageDecoder.decodeBitmap(source, listener); bmp = ImageDecoder.decodeBitmap(source, listener);
} catch (Exception ex) { } catch (Exception ex) {
@ -169,13 +168,17 @@ public class ImageSaver implements Runnable {
} }
Log.i(TAG, "End Hardware Decoding Exp=" + t.toString()); Log.i(TAG, "End Hardware Decoding Exp=" + t.toString());
byteBuffer.clear(); byteBuffer.clear();
byteBuffer = null; byteBuffer = null;
mImagePair.addImage(bmp, 0, t.longValue()); mImagePair.addImage(bmp, 0, t.longValue());
// bmp.recycle();
// bmp = null;
success = true; success = true;
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} finally { } finally {
dngCreator.close();
mImage.close(); mImage.close();
closeOutput(baos); closeOutput(baos);
} }
@ -189,6 +192,7 @@ public class ImageSaver implements Runnable {
e.printStackTrace(); e.printStackTrace();
} finally { } finally {
mImage.close(); mImage.close();
dngCreator.close();
closeOutput(output); closeOutput(output);
} }
} }
@ -213,6 +217,16 @@ public class ImageSaver implements Runnable {
} }
} }
// private static void clear() {
// if (null != outputStream) {
// try {
// outputStream.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
// }
// private void saveJpeg(Image image,String name) { // private void saveJpeg(Image image,String name) {
// Image.Plane[] planes = image.getPlanes(); // Image.Plane[] planes = image.getPlanes();
// ByteBuffer buffer = planes[0].getBuffer(); // ByteBuffer buffer = planes[0].getBuffer();

@ -85,7 +85,7 @@ public class ImageSaverBuilder {
if (!isComplete()) { if (!isComplete()) {
return null; return null;
} }
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mContext, mReader, mlist,mCallback, mImagePair); return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mReader, mlist,mCallback, mImagePair);
} }
public synchronized String getSaveLocation() { public synchronized String getSaveLocation() {

@ -16,6 +16,10 @@ import android.content.Intent;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraManager;
import android.net.ConnectivityManager;
import android.net.Network;
import android.net.NetworkCapabilities;
import android.net.NetworkRequest;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.View; import android.view.View;
@ -30,6 +34,8 @@ import com.xypower.mppreview.ui.CameraChannelActivity;
import com.xypower.mppreview.utils.PhotoUtil; import com.xypower.mppreview.utils.PhotoUtil;
import com.xypower.mppreview.bean.Contants; import com.xypower.mppreview.bean.Contants;
import com.xypower.mppreview.utils.CameraUtils; import com.xypower.mppreview.utils.CameraUtils;
import com.xypower.mppreview.utils.RouteManager;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
@ -40,6 +46,13 @@ public class MainActivity extends AppCompatActivity implements View.OnClickListe
loadLibrary("mppreview"); loadLibrary("mppreview");
} }
// static {
// if (!OpenCVLoader.initDebug()) {
// Log.e("OpenCV", "Initialization failed");
// } else {
// Log.d("OpenCV", "Initialization succeeded");
// }
// }
private static int MY_PERMISSIONS_REQUEST_FOREGROUND_SERVICE = 100; private static int MY_PERMISSIONS_REQUEST_FOREGROUND_SERVICE = 100;
public static int ExposureComp = 0; public static int ExposureComp = 0;
@ -98,19 +111,43 @@ public class MainActivity extends AppCompatActivity implements View.OnClickListe
viewBinding.hdrtakepic.setOnClickListener(this); viewBinding.hdrtakepic.setOnClickListener(this);
viewBinding.systakepic.setOnClickListener(this); viewBinding.systakepic.setOnClickListener(this);
viewBinding.spinner.setOnItemSelectedListener(this); viewBinding.spinner.setOnItemSelectedListener(this);
// viewBinding.channel1.setOnClickListener(this);
// viewBinding.channel2.setOnClickListener(this);
// viewBinding.channel3.setOnClickListener(this);
// viewBinding.channel4.setOnClickListener(this);
// viewBinding.channel5.setOnClickListener(this);
// viewBinding.channel6.setOnClickListener(this);
numberOfCameras = CameraUtils.getNumberOfCameras(this); numberOfCameras = CameraUtils.getNumberOfCameras(this);
ItemAdapter itemAdapter = new ItemAdapter(numberOfCameras); ItemAdapter itemAdapter = new ItemAdapter(numberOfCameras);
itemAdapter.setOnClickListener(this); itemAdapter.setOnClickListener(this);
viewBinding.recyclerView.setAdapter(itemAdapter); viewBinding.recyclerView.setAdapter(itemAdapter);
viewBinding.recyclerView.setLayoutManager(new GridLayoutManager(this,3)); viewBinding.recyclerView.setLayoutManager(new GridLayoutManager(this,3));
// initNetWork();
}
private void initNetWork() {
ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
ConnectivityManager.NetworkCallback networkCallback = new ConnectivityManager.NetworkCallback() {
@Override
public void onAvailable(Network network) {
super.onAvailable(network);
NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
if (capabilities != null && capabilities.hasTransport(NetworkCapabilities.TRANSPORT_ETHERNET)) {
Log.d("NetworkCallback", "Ethernet connected");
// 以太网已连接
viewBinding.hdrhint.setText("已连接");
RouteManager.addRoute("192.168.68.0/24", "eth0");
}
}
@Override
public void onLost(Network network) {
super.onLost(network);
Log.d("NetworkCallback", "Network lost");
// 网络断开
viewBinding.hdrhint.setText("已断开");
}
};
NetworkRequest request = new NetworkRequest.Builder()
.addTransportType(NetworkCapabilities.TRANSPORT_ETHERNET)
.build();
connectivityManager.registerNetworkCallback(request, networkCallback);
} }
private void initActivityResult() { private void initActivityResult() {

@ -11,9 +11,6 @@ import androidx.recyclerview.widget.RecyclerView;
import com.xypower.mppreview.R; import com.xypower.mppreview.R;
import com.xypower.mppreview.interfaces.OnItemClickListener; import com.xypower.mppreview.interfaces.OnItemClickListener;
import java.util.List;
public class ItemAdapter extends RecyclerView.Adapter<ItemAdapter.MyViewHolder> { public class ItemAdapter extends RecyclerView.Adapter<ItemAdapter.MyViewHolder> {
private Integer count; private Integer count;
public OnItemClickListener listener; public OnItemClickListener listener;

@ -1,8 +1,8 @@
package com.xypower.mppreview.utils; package com.xypower.mppreview.utils;
import static java.lang.System.loadLibrary;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.Locale; import java.util.Locale;
@ -13,4 +13,34 @@ public class HdrUtil {
return sdf.format(new Date()); return sdf.format(new Date());
} }
// public static void createHDR(Mat[] images, float[] exposureTimes, Mat hdrImage,String filepath) {
// Mat[] images32f = new Mat[images.length];
// for (int i = 0; i < images.length; i++) {
// images32f[i] = new Mat();
// images[i].convertTo(images32f[i], CvType.CV_32F);
// }
//
// Mat response = new Mat();
// Mat times = new Mat(exposureTimes.length, 1, CvType.CV_32F);
// for (int i = 0; i < exposureTimes.length; i++) {
// times.put(i, 0, exposureTimes[i]);
// }
//
// // Calibrate the camera response
// CalibrateDebevec calibrate = createCalibrateDebevec();
// calibrate.process(Arrays.asList(images32f), response, times);
//
// // Merge the images into an HDR image
// MergeDebevec merge = createMergeDebevec();
// merge.process(Arrays.asList(images32f), hdrImage, times, response);
// saveHDRImage(hdrImage,filepath);
// }
//
// public static void saveHDRImage(Mat hdrImage, String filePath) {
// Imgcodecs.imwrite(filePath, hdrImage);
// }
} }

@ -20,4 +20,5 @@ android.useAndroidX=true
# thereby reducing the size of the R class for that library # thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true android.nonTransitiveRClass=true
opencvsdk=D:/Workspace/deps/opencv-mobile-4.10.0-android-nihui #opencvsdk=D:/Workspace/deps/opencv-mobile-4.10.0-android-nihui
opencvsdk=D:/Workspace/deps/opencv-mobile-4.10.0-android
Loading…
Cancel
Save