增加独立进程处理的辅助类

master
Matthew 5 months ago
parent 122cd629dc
commit 705e953dc5

@ -55,6 +55,7 @@ add_library( # Sets the name of the library.
# Provides a relative path to your source file(s). # Provides a relative path to your source file(s).
MpPreview.cpp MpPreview.cpp
HdrImpl.cpp
) )
@ -92,4 +93,13 @@ target_link_libraries( # Specifies the target library.
) )
add_executable( libhdr.so
hdr.cpp
HdrImpl.cpp)
target_link_libraries( libhdr.so PUBLIC -fopenmp -static-openmp
android z
-fopenmp -static-openmp
${OpenCV_LIBS}
raw raw_r
)
# set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all") # set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all")

@ -0,0 +1,61 @@
#include <jni.h>
#include <string>
#include <vector>
#include <fcntl.h>
#include <unistd.h>
#include <omp.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "hdr.h"
bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& rgb)
{
// Align input images
// cout << "Aligning images ... " << endl;
cv::Ptr<cv::AlignMTB> alignMTB = cv::createAlignMTB();
#if 0
alignMTB->process(images, images);
#endif
// Obtain Camera Response Function (CRF)
// ALOGI("Calculating Camera Response Function (CRF) ... ");
cv::Mat responseDebevec;
cv::Ptr<cv::CalibrateDebevec> calibrateDebevec = cv::createCalibrateDebevec();
calibrateDebevec->process(images, responseDebevec, times);
// Merge images into an HDR linear image
// ALOGI("Merging images into one HDR image ... ");
cv::Mat hdrDebevec;
cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
// Save HDR image.
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
// cout << "saved hdrDebevec.hdr " << endl;
{
std::vector<cv::Mat> empty;
empty.swap(images);
}
// Tonemap using Reinhard's method to obtain 24-bit color image
// ALOGI("Tonemaping using Reinhard's method ... ");
cv::Mat ldrReinhard;
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release();
int type = ldrReinhard.type();
ldrReinhard = ldrReinhard * 255;
ldrReinhard.convertTo(rgb, CV_8U);
ldrReinhard.release();
return true;
}

@ -16,13 +16,7 @@
#include <opencv2/core.hpp> #include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp> #include <opencv2/imgproc.hpp>
#define HDR_TAG "HDR" #include "hdr.h"
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, HDR_TAG,__VA_ARGS__)
#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, HDR_TAG,__VA_ARGS__)
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, HDR_TAG, __VA_ARGS__)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, HDR_TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, HDR_TAG,__VA_ARGS__)
namespace cv2 namespace cv2
{ {
@ -523,54 +517,6 @@ bool makeHdr(std::vector<float>& times, std::vector<std::string>& paths, cv::Mat
return true; return true;
} }
bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& rgb)
{
// Read images and exposure times
// Align input images
// cout << "Aligning images ... " << endl;
cv::Ptr<cv::AlignMTB> alignMTB = cv::createAlignMTB();
#if 0
alignMTB->process(images, images);
#endif
// Obtain Camera Response Function (CRF)
ALOGI("Calculating Camera Response Function (CRF) ... ");
cv::Mat responseDebevec;
cv::Ptr<cv::CalibrateDebevec> calibrateDebevec = cv::createCalibrateDebevec();
calibrateDebevec->process(images, responseDebevec, times);
// Merge images into an HDR linear image
ALOGI("Merging images into one HDR image ... ");
cv::Mat hdrDebevec;
cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
// Save HDR image.
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
// cout << "saved hdrDebevec.hdr " << endl;
{
std::vector<cv::Mat> empty;
empty.swap(images);
}
// Tonemap using Reinhard's method to obtain 24-bit color image
ALOGI("Tonemaping using Reinhard's method ... ");
cv::Mat ldrReinhard;
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release();
int type = ldrReinhard.type();
ldrReinhard = ldrReinhard * 255;
ldrReinhard.convertTo(rgb, CV_8U);
ldrReinhard.release();
return true;
}
extern "C" extern "C"
JNIEXPORT jboolean JNICALL JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_Camera2RawFragment_makeHdr( Java_com_xypower_mppreview_Camera2RawFragment_makeHdr(

@ -0,0 +1,68 @@
#include <jni.h>
#include <string>
#include <vector>
#include <fcntl.h>
#include <unistd.h>
#include <omp.h>
#include <android/imagedecoder.h>
#include <android/log.h>
#include <media/NdkImage.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "hdr.h"
int main( int argc, char** argv )
{
if (argc != 7)
{
return -1;
}
std::string outputPath = argv[1];
std::string tmpFilePath = argv[2];
std::vector<float> times;
times.push_back((double)(atoi(argv[3])) / 1000000000.0);
times.push_back((double)(atoi(argv[5])) / 1000000000.0);
std::vector<std::string> paths;
paths.push_back(std::string(argv[4]));
paths.push_back(std::string(argv[6]));
std::vector<cv::Mat> images;
images.resize(2);
printf("Start Decode");
#pragma omp parallel for num_threads(2)
for (int idx = 0; idx < 2; idx++)
{
images[idx] = cv::imread(paths[idx].c_str());
}
printf("End Decode");
cv::Mat rgb;
printf("Start MakeHDR3");
makeHdr(times, images, rgb);
printf("End MakeHDR3");
std::vector<int> params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
params.push_back(100);
if (cv::imwrite(outputPath.c_str(), rgb, params))
{
printf("End HDR3");
return JNI_TRUE;
}
return 0;
}

@ -0,0 +1,25 @@
#include <jni.h>
#include <string>
#include <vector>
#include <fcntl.h>
#include <unistd.h>
#include <omp.h>
#include <android/imagedecoder.h>
#include <android/log.h>
#include <media/NdkImage.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#define HDR_TAG "HDR"
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, HDR_TAG,__VA_ARGS__)
#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, HDR_TAG,__VA_ARGS__)
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, HDR_TAG, __VA_ARGS__)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, HDR_TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, HDR_TAG,__VA_ARGS__)
bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& rgb);

@ -6,6 +6,7 @@ import static java.lang.System.loadLibrary;
import android.Manifest; import android.Manifest;
import android.app.Activity; import android.app.Activity;
import android.content.Context; import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.ImageFormat; import android.graphics.ImageFormat;
@ -59,7 +60,11 @@ import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback; import com.xypower.mppreview.interfaces.CompleteCallback;
import com.xypower.mppreview.widget.ErrorDialog; import com.xypower.mppreview.widget.ErrorDialog;
import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -1498,4 +1503,59 @@ public class Camera2RawFragment extends Fragment {
} }
private static int execHdr(Context context, long exposureTime1, String path1, long exposureTime2, String path2, String outputPath, String tmpFilePath) {
ApplicationInfo applicationInfo = null;
try {
applicationInfo = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_SHARED_LIBRARY_FILES);
} catch (Exception ex) {
}
String exeFilePath = applicationInfo.nativeLibraryDir + '/' + "libhdr.so";
File hdrpFile = new File(exeFilePath);
if (!hdrpFile.exists()) {
return -1;
}
String cmd = exeFilePath + " " + outputPath + " ";
cmd += tmpFilePath + " ";
cmd += Long.toString(exposureTime1) + " ";
cmd += path1 + " ";
cmd += Long.toString(exposureTime2) + " ";
cmd += path2 + " ";
String[] params = new String[]{""};
File workDir = context.getFilesDir();
int exitCode = 0;
try {
Process process = Runtime.getRuntime().exec(cmd, params, workDir.getAbsoluteFile());
// Intrinsics.checkNotNullExpressionValue(process, "process");
InputStream inputStream = process.getInputStream();
BufferedReader reader = new BufferedReader((Reader)(new InputStreamReader(inputStream)));
// StringBuilder stringBuilder = new StringBuilder();
while(true) {
String line = reader.readLine();
if (line == null) {
exitCode = process.exitValue();
reader.close();
process.destroy();
break;
}
if (line != null) {
// this.outputCallback.invoke(var5);
Log.d("HDRPlus", line);
// stringBuilder.append(line);
// stringBuilder.append("\r\n");
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
return exitCode;
}
} }

@ -8,10 +8,18 @@ import android.os.Build;
import android.os.Environment; import android.os.Environment;
import android.provider.MediaStore; import android.provider.MediaStore;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.graphics.Bitmap;
import androidx.activity.result.ActivityResultLauncher; import androidx.activity.result.ActivityResultLauncher;
import androidx.core.content.FileProvider; import androidx.core.content.FileProvider;
import java.io.File; import java.io.File;
import java.io.FileOutputStream;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.Date; import java.util.Date;
@ -80,4 +88,216 @@ public class PhotoUtil {
} }
private final static int BMP_WIDTH_OF_TIMES = 4;
private final static int BYTE_PER_PIXEL = 3;
/**
* Android Bitmap Object to Window's v3 24bit Bmp Format File
* @param orgBitmap
* @param filePath
* @return file saved result
*/
public static boolean save(Bitmap orgBitmap, String filePath){
if(orgBitmap == null){
return false;
}
if(filePath == null){
return false;
}
boolean isSaveSuccess = true;
//image size
int width = orgBitmap.getWidth();
int height = orgBitmap.getHeight();
//image dummy data size
//reason : bmp file's width equals 4's multiple
int dummySize = 0;
byte[] dummyBytesPerRow = null;
boolean hasDummy = false;
if(isBmpWidth4Times(width)){
hasDummy = true;
dummySize = BMP_WIDTH_OF_TIMES - (width % BMP_WIDTH_OF_TIMES);
dummyBytesPerRow = new byte[dummySize * BYTE_PER_PIXEL];
for(int i = 0; i < dummyBytesPerRow.length; i++){
dummyBytesPerRow[i] = (byte)0xFF;
}
}
int[] pixels = new int[width * height];
int imageSize = pixels.length * BYTE_PER_PIXEL + (height * dummySize * BYTE_PER_PIXEL);
int imageDataOffset = 0x36;
int fileSize = imageSize + imageDataOffset;
//Android Bitmap Image Data
orgBitmap.getPixels(pixels, 0, width, 0, 0, width, height);
//ByteArrayOutputStream baos = new ByteArrayOutputStream(fileSize);
ByteBuffer buffer = ByteBuffer.allocate(fileSize);
try {
/**
* BITMAP FILE HEADER Write Start
**/
buffer.put((byte)0x42);
buffer.put((byte)0x4D);
//size
buffer.put(writeInt(fileSize));
//reserved
buffer.put(writeShort((short)0));
buffer.put(writeShort((short)0));
//image data start offset
buffer.put(writeInt(imageDataOffset));
/** BITMAP FILE HEADER Write End */
//*******************************************
/** BITMAP INFO HEADER Write Start */
//size
buffer.put(writeInt(0x28));
//width, height
buffer.put(writeInt(width));
buffer.put(writeInt(height));
//planes
buffer.put(writeShort((short)1));
//bit count
buffer.put(writeShort((short)24));
//bit compression
buffer.put(writeInt(0));
//image data size
buffer.put(writeInt(imageSize));
//horizontal resolution in pixels per meter
buffer.put(writeInt(0));
//vertical resolution in pixels per meter (unreliable)
buffer.put(writeInt(0));
//컬러 사용 유무
buffer.put(writeInt(0));
//중요하게 사용하는 색
buffer.put(writeInt(0));
/** BITMAP INFO HEADER Write End */
int row = height;
int col = width;
int startPosition = 0;
int endPosition = 0;
while( row > 0 ){
startPosition = (row - 1) * col;
endPosition = row * col;
for(int i = startPosition; i < endPosition; i++ ){
buffer.put(write24BitForPixcel(pixels[i]));
if(hasDummy){
if(isBitmapWidthLastPixcel(width, i)){
buffer.put(dummyBytesPerRow);
}
}
}
row--;
}
FileOutputStream fos = new FileOutputStream(filePath);
fos.write(buffer.array());
fos.close();
} catch (IOException e1) {
e1.printStackTrace();
isSaveSuccess = false;
}
finally{
}
return isSaveSuccess;
}
/**
* Is last pixel in Android Bitmap width
* @param width
* @param i
* @return
*/
private static boolean isBitmapWidthLastPixcel(int width, int i) {
return i > 0 && (i % (width - 1)) == 0;
}
/**
* BMP file is a multiples of 4?
* @param width
* @return
*/
private static boolean isBmpWidth4Times(int width) {
return width % BMP_WIDTH_OF_TIMES > 0;
}
/**
* Write integer to little-endian
* @param value
* @return
* @throws IOException
*/
private static byte[] writeInt(int value) throws IOException {
byte[] b = new byte[4];
b[0] = (byte)(value & 0x000000FF);
b[1] = (byte)((value & 0x0000FF00) >> 8);
b[2] = (byte)((value & 0x00FF0000) >> 16);
b[3] = (byte)((value & 0xFF000000) >> 24);
return b;
}
/**
* Write integer pixel to little-endian byte array
* @param value
* @return
* @throws IOException
*/
private static byte[] write24BitForPixcel(int value) throws IOException {
byte[] b = new byte[3];
b[0] = (byte)(value & 0x000000FF);
b[1] = (byte)((value & 0x0000FF00) >> 8);
b[2] = (byte)((value & 0x00FF0000) >> 16);
return b;
}
/**
* Write short to little-endian byte array
* @param value
* @return
* @throws IOException
*/
private static byte[] writeShort(short value) throws IOException {
byte[] b = new byte[2];
b[0] = (byte)(value & 0x00FF);
b[1] = (byte)((value & 0xFF00) >> 8);
return b;
}
} }

Loading…
Cancel
Save