nx2024
Matthew 5 months ago
parent 8fcb50f4f4
commit 2d39522e5b

@ -34,6 +34,9 @@ android {
minifyEnabled false minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
} }
debug {
jniDebuggable true
}
} }
// sourceSets { // sourceSets {

@ -21,6 +21,9 @@ ENDIF()
project("mppreview") project("mppreview")
include_directories(D:/Workspace/deps/hdrplus_libs/${ANDROID_ABI}/include)
link_directories(D:/Workspace/deps/hdrplus_libs/${ANDROID_ABI}/lib)
find_package(OpenCV REQUIRED core imgproc highgui photo) find_package(OpenCV REQUIRED core imgproc highgui photo)
# find_package(OpenCV REQUIRED core imgproc) # find_package(OpenCV REQUIRED core imgproc)
if(OpenCV_FOUND) if(OpenCV_FOUND)
@ -84,6 +87,7 @@ target_link_libraries( # Specifies the target library.
#ncnn #ncnn
raw raw_r
${OpenCV_LIBS} ${OpenCV_LIBS}
) )

@ -28,7 +28,6 @@ namespace cv2
{ {
using namespace cv; using namespace cv;
Mat linearResponseNew(int channels) Mat linearResponseNew(int channels)
{ {
Mat response = Mat(LDR_SIZE, 1, CV_MAKETYPE(CV_32F, channels)); Mat response = Mat(LDR_SIZE, 1, CV_MAKETYPE(CV_32F, channels));
@ -131,11 +130,12 @@ namespace cv2
Mat weight_sum = Mat::zeros(size, CV_32F); Mat weight_sum = Mat::zeros(size, CV_32F);
ALOGD("HDR Merge 5"); ALOGD("HDR Merge 5");
// #pragma omp parallel for num_threads(2)
for(size_t i = 0; i < images.size(); i++) { for(size_t i = 0; i < images.size(); i++) {
std::vector<Mat> splitted; std::vector<Mat> splitted;
split(images[i], splitted); split(images[i], splitted);
ALOGD("HDR Merge 5 - 1");
Mat w = Mat::zeros(size, CV_32F); Mat w = Mat::zeros(size, CV_32F);
for(int c = 0; c < channels; c++) { for(int c = 0; c < channels; c++) {
LUT(splitted[c], weights, splitted[c]); LUT(splitted[c], weights, splitted[c]);
@ -143,14 +143,18 @@ namespace cv2
} }
w /= channels; w /= channels;
ALOGD("HDR Merge 5 - 2");
Mat response_img; Mat response_img;
LUT(images[i], log_response, response_img); LUT(images[i], log_response, response_img);
split(response_img, splitted); split(response_img, splitted);
// #pragma omp parallel for num_threads(channels)
for(int c = 0; c < channels; c++) { for(int c = 0; c < channels; c++) {
//这里崩溃 //这里崩溃
result_split[c] += w.mul(splitted[c] - exp_values.at<float>((int)i)); result_split[c] += w.mul(splitted[c] - exp_values.at<float>((int)i));
} }
weight_sum += w; weight_sum += w;
ALOGD("HDR Merge 5 - 3");
} }
ALOGD("HDR Merge 6"); ALOGD("HDR Merge 6");
@ -506,7 +510,7 @@ bool makeHdr(std::vector<float>& times, std::vector<std::string>& paths, cv::Mat
// Tonemap using Reinhard's method to obtain 24-bit color image // Tonemap using Reinhard's method to obtain 24-bit color image
// cout << "Tonemaping using Reinhard's method ... "; // cout << "Tonemaping using Reinhard's method ... ";
cv::Mat ldrReinhard; cv::Mat ldrReinhard;
cv::Ptr<cv2::TonemapReinhardNew> tonemapReinhard = cv2::createTonemapReinhardNew(1.5, 0, 0, 0); cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard); tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release(); hdrDebevec.release();
@ -540,7 +544,7 @@ bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& r
// Merge images into an HDR linear image // Merge images into an HDR linear image
ALOGI("Merging images into one HDR image ... "); ALOGI("Merging images into one HDR image ... ");
cv::Mat hdrDebevec; cv::Mat hdrDebevec;
cv::Ptr<cv2::MergeDebevecNew> mergeDebevec = cv2::createMergeDebevecNew(); cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
mergeDebevec->process(images, hdrDebevec, times, responseDebevec); mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
// Save HDR image. // Save HDR image.
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec); // imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
@ -554,7 +558,7 @@ bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& r
// Tonemap using Reinhard's method to obtain 24-bit color image // Tonemap using Reinhard's method to obtain 24-bit color image
ALOGI("Tonemaping using Reinhard's method ... "); ALOGI("Tonemaping using Reinhard's method ... ");
cv::Mat ldrReinhard; cv::Mat ldrReinhard;
cv::Ptr<cv2::TonemapReinhardNew> tonemapReinhard = cv2::createTonemapReinhardNew(1.5, 0, 0, 0); cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard); tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release(); hdrDebevec.release();
@ -708,16 +712,28 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
for (int idx = 0; idx < 2; idx++) for (int idx = 0; idx < 2; idx++)
{ {
AndroidBitmapInfo bmpInfo = { 0 }; AndroidBitmapInfo bmpInfo = { 0 };
AHardwareBuffer* hardwareBuffer = NULL;
int result = AndroidBitmap_getInfo(env, bitmaps[idx], &bmpInfo); int result = AndroidBitmap_getInfo(env, bitmaps[idx], &bmpInfo);
result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer);
void* outVirtualAddress = NULL; if ((ANDROID_BITMAP_FLAGS_IS_HARDWARE & bmpInfo.flags) == ANDROID_BITMAP_FLAGS_IS_HARDWARE)
int32_t fence = -1; {
result = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY, fence, NULL, &outVirtualAddress); AHardwareBuffer* hardwareBuffer = NULL;
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outVirtualAddress); result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer);
AHardwareBuffer_unlock(hardwareBuffer, &fence);
tmp.copyTo(images[idx]); void* outVirtualAddress = NULL;
int32_t fence = -1;
result = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY, fence, NULL, &outVirtualAddress);
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outVirtualAddress);
AHardwareBuffer_unlock(hardwareBuffer, &fence);
tmp.copyTo(images[idx]);
}
else
{
void* outAddress = NULL;
result = AndroidBitmap_lockPixels(env, bitmaps[idx], &outAddress);
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outAddress);
AndroidBitmap_unlockPixels(env, bitmaps[idx]);
tmp.copyTo(images[idx]);
}
//convert RGB to BGR //convert RGB to BGR
cv::cvtColor(images[idx], images[idx], cv::COLOR_RGB2BGR); cv::cvtColor(images[idx], images[idx], cv::COLOR_RGB2BGR);
@ -746,5 +762,14 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
return JNI_TRUE; return JNI_TRUE;
} }
// env->DeleteGlobalRef(img1);
// env->DeleteGlobalRef(img2);
return JNI_FALSE; return JNI_FALSE;
} }
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_Camera2RawFragment_decodeDng(JNIEnv *env, jclass clazz,
jobject byte_buffer, jstring output_path) {
// TODO: implement decodeDng()
}

@ -135,6 +135,8 @@ public class Camera2RawFragment extends Fragment {
public static native boolean makeHdr3(long exposureTime1, Bitmap img1, int length1, long exposureTime2, Bitmap img2, int length2, String outputPath); public static native boolean makeHdr3(long exposureTime1, Bitmap img1, int length1, long exposureTime2, Bitmap img2, int length2, String outputPath);
// public static native boolean decodeDng(ByteBuffer byteBuffer, String outputPath);
private int mExposureComp = MainActivity.ExposureComp; private int mExposureComp = MainActivity.ExposureComp;
private Long exposetime; private Long exposetime;
@ -653,9 +655,11 @@ public class Camera2RawFragment extends Fragment {
} }
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// For still image captures, we use the largest available size. // For still image captures, we use the largest available size.
Size[] outputSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); outputSizes = map.getOutputSizes(ImageFormat.RAW_SENSOR);
Size largestRaw = Collections.max(Arrays.asList(outputSizes), new CompareSizesByArea());
// Size largestRaw = Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)).get(1); // Size largestRaw = Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)).get(1);
synchronized (mCameraStateLock) { synchronized (mCameraStateLock) {
@ -885,7 +889,7 @@ public class Camera2RawFragment extends Fragment {
// parameters.setPreviewFrameRate(10); // parameters.setPreviewFrameRate(10);
Range<Integer>[] fpsRanges = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); Range<Integer>[] fpsRanges = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
Range<Integer> fpsRange = new Range<>(5, 30); Range<Integer> fpsRange = new Range<>(10, 15);
builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
// Enable auto-magical 3A run by camera device // Enable auto-magical 3A run by camera device
@ -1110,15 +1114,19 @@ public class Camera2RawFragment extends Fragment {
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override
public void run() { public void run() {
// makeHdr3()
if (images.size() != 2) { if (images.size() != 2) {
return; return;
} }
ImageSaver.ImageInfo img1 = images.get(0); ImageSaver.ImageInfo img1 = images.get(0);
ImageSaver.ImageInfo img2 = images.get(1); ImageSaver.ImageInfo img2 = images.get(1);
Log.d("开始Hdr处理", "strat"); Log.d("开始Hdr处理", "strat");
boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, outputPath + "HDR_" + generateTimestamp() + ".jpg"); String hdrOutputPath = outputPath + "HDR_" + generateTimestamp() + ".jpg";
boolean b = makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, hdrOutputPath);
img1.bitmap.recycle();
img2.bitmap.recycle();
img1 = null;
img2 = null;
images.clear();
Log.d("结束Hdr处理", "end"); Log.d("结束Hdr处理", "end");
if (b) { if (b) {
showToast("HDR拍摄成功"); showToast("HDR拍摄成功");

@ -2,9 +2,12 @@ package com.xypower.mppreview;
import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.app.AppCompatActivity;
import android.content.ComponentCallbacks2;
import android.content.Context;
import android.content.Intent; import android.content.Intent;
import android.os.Build; import android.os.Build;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log;
import android.view.View; import android.view.View;
import android.view.Window; import android.view.Window;
import android.view.WindowInsets; import android.view.WindowInsets;
@ -15,6 +18,7 @@ import com.xypower.mppreview.bean.Contants;
public class CameraActivity extends AppCompatActivity { public class CameraActivity extends AppCompatActivity {
private final static String TAG = "HDR";
public int intExtra; public int intExtra;
@Override @Override
@ -31,4 +35,19 @@ public class CameraActivity extends AppCompatActivity {
getSupportFragmentManager().beginTransaction().setReorderingAllowed(true).replace(R.id.container, Camera2RawFragment.class, bundle).commit(); getSupportFragmentManager().beginTransaction().setReorderingAllowed(true).replace(R.id.container, Camera2RawFragment.class, bundle).commit();
} }
} }
@Override
public void onTrimMemory(int level) {
Log.w(TAG, "Event onTrimMemory level=" + level);
try {
System.gc();
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Override
public void onLowMemory() {
Log.w(TAG, "Event onLowMemory");
}
} }

@ -150,11 +150,13 @@ public class ImageSaver implements Runnable {
ImageDecoder.OnHeaderDecodedListener listener = new ImageDecoder.OnHeaderDecodedListener() { ImageDecoder.OnHeaderDecodedListener listener = new ImageDecoder.OnHeaderDecodedListener() {
@Override @Override
public void onHeaderDecoded(@NonNull ImageDecoder decoder, @NonNull ImageDecoder.ImageInfo info, @NonNull ImageDecoder.Source source) { public void onHeaderDecoded(@NonNull ImageDecoder decoder, @NonNull ImageDecoder.ImageInfo info, @NonNull ImageDecoder.Source source) {
// decoder.setAllocator(ImageDecoder.ALLOCATOR_SOFTWARE);
decoder.setAllocator(ImageDecoder.ALLOCATOR_HARDWARE); decoder.setAllocator(ImageDecoder.ALLOCATOR_HARDWARE);
// decoder.setTargetSize(4702, 3520);
} }
}; };
Log.i("HDR", "Start Hardware Decoding Exp=" + t.toString()); Log.i(TAG, "Start Hardware Decoding Exp=" + t.toString() + " TID=" + Thread.currentThread().getId());
ImageDecoder imageDecoder = null; ImageDecoder imageDecoder = null;
Bitmap bmp = null; Bitmap bmp = null;
// ImageDecoder.Source source = ImageDecoder.createSource(mFile); // ImageDecoder.Source source = ImageDecoder.createSource(mFile);
@ -165,8 +167,9 @@ public class ImageSaver implements Runnable {
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} }
Log.i("HDR", "End Hardware Decoding Exp=" + t.toString()); Log.i(TAG, "End Hardware Decoding Exp=" + t.toString());
byteBuffer.clear();
byteBuffer = null; byteBuffer = null;
mImagePair.addImage(bmp, 0, t.longValue()); mImagePair.addImage(bmp, 0, t.longValue());
success = true; success = true;

@ -4,6 +4,7 @@
xmlns:tools="http://schemas.android.com/tools" xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:keepScreenOn="true"
tools:context=".CameraActivity"> tools:context=".CameraActivity">
<!-- res/layout/example_activity.xml --> <!-- res/layout/example_activity.xml -->

@ -4,6 +4,7 @@
xmlns:tools="http://schemas.android.com/tools" xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:keepScreenOn="true"
tools:context=".CameraChannelActivity"> tools:context=".CameraChannelActivity">
<androidx.camera.view.PreviewView <androidx.camera.view.PreviewView

@ -7,6 +7,7 @@
android:paddingLeft="15dp" android:paddingLeft="15dp"
android:paddingTop="20dp" android:paddingTop="20dp"
android:paddingRight="15dp" android:paddingRight="15dp"
android:keepScreenOn="true"
tools:context=".MainActivity"> tools:context=".MainActivity">
<TextView <TextView

@ -20,4 +20,4 @@ android.useAndroidX=true
# thereby reducing the size of the R class for that library # thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true android.nonTransitiveRClass=true
opencvsdk=D:/Workspace/deps/opencv-mobile-4.10.0-android opencvsdk=D:/Workspace/deps/opencv-mobile-4.10.0-android-nihui
Loading…
Cancel
Save