You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
MpPreview/app/src/main/cpp/MpPreview.cpp

436 lines
15 KiB
C++

6 months ago
#include <jni.h>
#include <string>
#include <vector>
// #include "ncnn/yolov5ncnn.h"
6 months ago
#include <fcntl.h>
#include <unistd.h>
#include <omp.h>
#include <android/imagedecoder.h>
#include <android/log.h>
#include <media/NdkImage.h>
6 months ago
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
6 months ago
#define HDR_TAG "HDR"
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, HDR_TAG,__VA_ARGS__)
#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, HDR_TAG,__VA_ARGS__)
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, HDR_TAG, __VA_ARGS__)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, HDR_TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, HDR_TAG,__VA_ARGS__)
bool AndroidBitmap_CompressWriteFile(void *userContext, const void *data, size_t size)
{
int file = (int)((size_t)userContext);
int bytesWritten = write(file, data, size);
return bytesWritten == size;
}
bool AndroidBitmap_CompressWriteBuffer(void *userContext, const void *data, size_t size)
{
std::vector<uint8_t>* buffer = (std::vector<uint8_t>*)userContext;
// int bytesWritten = write(file, data, size);
const uint8_t* pBytes = (const uint8_t*)data;
buffer->insert(buffer->cend(), pBytes, pBytes + size);
return true;
}
void ConvertDngToPng(const uint8_t* buffer, size_t bufferLength, std::vector<uint8_t>& pngData)
{
AImageDecoder* imageDecoder = NULL;
AImageDecoder_createFromBuffer(buffer, bufferLength, &imageDecoder);
// int fd = open("/sdcard/com.xypower.mpapp/tmp/4.dng", O_RDONLY);
// AImageDecoder_createFromFd(fd, &imageDecoder);
const AImageDecoderHeaderInfo* headerInfo = AImageDecoder_getHeaderInfo(imageDecoder);
const char *mimeType = AImageDecoderHeaderInfo_getMimeType(headerInfo);
AndroidBitmapInfo bmpInfo = { 0 };
bmpInfo.flags = AImageDecoderHeaderInfo_getAlphaFlags(headerInfo);
bmpInfo.width = AImageDecoderHeaderInfo_getWidth(headerInfo);
bmpInfo.height = AImageDecoderHeaderInfo_getHeight(headerInfo);
bmpInfo.format = AImageDecoderHeaderInfo_getAndroidBitmapFormat(headerInfo);
bmpInfo.stride = AImageDecoder_getMinimumStride(imageDecoder); // Image decoder does not
// use padding by default
int32_t fmt = ANDROID_BITMAP_FORMAT_RGBA_8888;
size_t stride = bmpInfo.width * 4;
size_t size = stride * bmpInfo.height;
size = bmpInfo.stride * bmpInfo.height;
int32_t dataSpace = AImageDecoderHeaderInfo_getDataSpace(headerInfo);
std::vector<uint8_t> frame;
frame.resize(size);
// AImageDecoder_setTargetSize(imageDecoder, 5376, 3024);
int result = AImageDecoder_decodeImage(imageDecoder, (void *)(&frame[0]), bmpInfo.stride, size);
// close(fd);
if (result == ANDROID_IMAGE_DECODER_SUCCESS)
{
// std::string imagePath = "/data/data/com.xypower.mppreview/files/test.png";
// int file = open(imagePath.c_str(), O_CREAT | O_RDWR | O_TRUNC, S_IRUSR | S_IWUSR);
// if (file == -1) {}
pngData.clear();
AndroidBitmap_compress(&bmpInfo, dataSpace, &frame[0], ANDROID_BITMAP_COMPRESS_FORMAT_PNG, 100, (void*)&pngData, AndroidBitmap_CompressWriteBuffer);
// close(file);
std::vector<uint8_t> empty;
empty.swap(frame);
}
AImageDecoder_delete(imageDecoder);
}
void ConvertDngToPng(const uint8_t* buffer, size_t bufferLength, cv::Mat& rgb)
{
AImageDecoder* imageDecoder = NULL;
AImageDecoder_createFromBuffer(buffer, bufferLength, &imageDecoder);
// int fd = open("/sdcard/com.xypower.mpapp/tmp/4.dng", O_RDONLY);
// AImageDecoder_createFromFd(fd, &imageDecoder);
const AImageDecoderHeaderInfo* headerInfo = AImageDecoder_getHeaderInfo(imageDecoder);
const char *mimeType = AImageDecoderHeaderInfo_getMimeType(headerInfo);
AndroidBitmapInfo bmpInfo = { 0 };
bmpInfo.flags = AImageDecoderHeaderInfo_getAlphaFlags(headerInfo);
bmpInfo.width = AImageDecoderHeaderInfo_getWidth(headerInfo);
bmpInfo.height = AImageDecoderHeaderInfo_getHeight(headerInfo);
bmpInfo.format = AImageDecoderHeaderInfo_getAndroidBitmapFormat(headerInfo);
bmpInfo.stride = AImageDecoder_getMinimumStride(imageDecoder); // Image decoder does not
// use padding by default
int32_t fmt = ANDROID_BITMAP_FORMAT_RGBA_8888;
size_t stride = bmpInfo.width * 4;
size_t size = stride * bmpInfo.height;
size = bmpInfo.stride * bmpInfo.height;
int32_t dataSpace = AImageDecoderHeaderInfo_getDataSpace(headerInfo);
std::vector<uint8_t> frame;
frame.resize(size);
// AImageDecoder_setTargetSize(imageDecoder, 5376, 3024);
int result = AImageDecoder_decodeImage(imageDecoder, (void *)(&frame[0]), bmpInfo.stride, size);
// close(fd);
if (result == ANDROID_IMAGE_DECODER_SUCCESS)
{
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, &frame[0]);
tmp.copyTo(rgb);
//convert RGB to BGR
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
/*
// std::string imagePath = "/data/data/com.xypower.mppreview/files/test.png";
// int file = open(imagePath.c_str(), O_CREAT | O_RDWR | O_TRUNC, S_IRUSR | S_IWUSR);
// if (file == -1) {}
std::vector<uint8_t> pngData;
result = AndroidBitmap_compress(&bmpInfo, dataSpace, &frame[0], ANDROID_BITMAP_COMPRESS_FORMAT_PNG, 100, (void*)&pngData, AndroidBitmap_CompressWriteBuffer);
{
std::vector<uint8_t> empty;
empty.swap(frame);
}
// close(file);
if (ANDROID_BITMAP_RESULT_SUCCESS == result)
{
rgb = cv::imdecode(pngData, cv::IMREAD_COLOR);
}
*/
}
AImageDecoder_delete(imageDecoder);
}
6 months ago
inline std::string jstring2string(JNIEnv *env, jstring jStr)
{
if (!jStr)
return "";
const jclass stringClass = env->GetObjectClass(jStr);
const jmethodID getBytes = env->GetMethodID(stringClass, "getBytes", "(Ljava/lang/String;)[B");
const jbyteArray stringJbytes = (jbyteArray) env->CallObjectMethod(jStr, getBytes, env->NewStringUTF("UTF-8"));
size_t length = (size_t) env->GetArrayLength(stringJbytes);
jbyte* pBytes = env->GetByteArrayElements(stringJbytes, NULL);
std::string ret = std::string((char *)pBytes, length);
env->ReleaseByteArrayElements(stringJbytes, pBytes, JNI_ABORT);
env->DeleteLocalRef(stringJbytes);
env->DeleteLocalRef(stringClass);
return ret;
}
bool makeHdr(std::vector<float>& times, std::vector<std::string>& paths, cv::Mat& rgb)
{
// Read images and exposure times
std::vector<cv::Mat> images;
6 months ago
images.resize(paths.size());
6 months ago
6 months ago
#pragma omp parallel for
for (int idx = 0; idx < paths.size(); idx++)
6 months ago
{
6 months ago
images[idx] = cv::imread(paths[idx].c_str());
6 months ago
}
// Align input images
// cout << "Aligning images ... " << endl;
cv::Ptr<cv::AlignMTB> alignMTB = cv::createAlignMTB();
#if 0
alignMTB->process(images, images);
#endif
// Obtain Camera Response Function (CRF)
// cout << "Calculating Camera Response Function (CRF) ... " << endl;
cv::Mat responseDebevec;
cv::Ptr<cv::CalibrateDebevec> calibrateDebevec = cv::createCalibrateDebevec();
calibrateDebevec->process(images, responseDebevec, times);
// Merge images into an HDR linear image
// cout << "Merging images into one HDR image ... ";
cv::Mat hdrDebevec;
cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
// Save HDR image.
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
// cout << "saved hdrDebevec.hdr " << endl;
{
std::vector<cv::Mat> empty;
empty.swap(images);
}
// Tonemap using Reinhard's method to obtain 24-bit color image
// cout << "Tonemaping using Reinhard's method ... ";
cv::Mat ldrReinhard;
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release();
int type = ldrReinhard.type();
ldrReinhard = ldrReinhard * 255;
ldrReinhard.convertTo(rgb, CV_8U);
ldrReinhard.release();
return true;
}
6 months ago
bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& rgb)
{
// Read images and exposure times
// Align input images
// cout << "Aligning images ... " << endl;
cv::Ptr<cv::AlignMTB> alignMTB = cv::createAlignMTB();
#if 0
alignMTB->process(images, images);
#endif
// Obtain Camera Response Function (CRF)
// cout << "Calculating Camera Response Function (CRF) ... " << endl;
cv::Mat responseDebevec;
cv::Ptr<cv::CalibrateDebevec> calibrateDebevec = cv::createCalibrateDebevec();
calibrateDebevec->process(images, responseDebevec, times);
// Merge images into an HDR linear image
// cout << "Merging images into one HDR image ... ";
cv::Mat hdrDebevec;
cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
// Save HDR image.
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
// cout << "saved hdrDebevec.hdr " << endl;
{
std::vector<cv::Mat> empty;
empty.swap(images);
}
// Tonemap using Reinhard's method to obtain 24-bit color image
// cout << "Tonemaping using Reinhard's method ... ";
cv::Mat ldrReinhard;
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release();
int type = ldrReinhard.type();
ldrReinhard = ldrReinhard * 255;
ldrReinhard.convertTo(rgb, CV_8U);
ldrReinhard.release();
return true;
}
6 months ago
extern "C"
JNIEXPORT jboolean JNICALL
6 months ago
Java_com_xypower_mppreview_Camera2RawFragment_makeHdr(
6 months ago
JNIEnv *env, jobject thiz, jlong exposureTime1, jstring path1, jlong exposureTime2,
jstring path2, jstring outputPath) {
6 months ago
cv::setNumThreads(4);
6 months ago
std::vector<float> times;
std::vector<std::string> paths;
6 months ago
times.push_back((double)(exposureTime1) / 1000000000.0);
times.push_back((double)(exposureTime2) / 1000000000.0);
6 months ago
6 months ago
paths.push_back(jstring2string(env, path1));
paths.push_back(jstring2string(env, path2));
6 months ago
cv::Mat rgb;
if (makeHdr(times, paths, rgb))
{
6 months ago
std::string fileName = jstring2string(env, outputPath);
6 months ago
if (cv::imwrite(fileName.c_str(), rgb))
{
return JNI_TRUE;
}
}
return JNI_FALSE;
}
extern "C"
JNIEXPORT jboolean JNICALL
6 months ago
Java_com_xypower_mppreview_Camera2RawFragment_makeHdr2(
6 months ago
JNIEnv *env, jobject thiz, jlong exposureTime1, jstring path1, jlong exposureTime2,
jstring path2, jlong exposureTime3, jstring path3, jstring outputPath) {
6 months ago
cv::setNumThreads(4);
6 months ago
std::vector<float> times;
std::vector<std::string> paths;
times.push_back((float)(exposureTime1 / 1000) / 1000000.0);
times.push_back((float)(exposureTime2 / 1000) / 1000000.0);
times.push_back((float)(exposureTime3 / 1000) / 1000000.0);
paths.push_back(jstring2string(env, path1));
paths.push_back(jstring2string(env, path2));
paths.push_back(jstring2string(env, path3));
cv::Mat rgb;
if (makeHdr(times, paths, rgb))
{
std::string fileName = jstring2string(env, outputPath);
if (cv::imwrite(fileName.c_str(), rgb))
{
return JNI_TRUE;
}
}
6 months ago
6 months ago
return JNI_FALSE;
}
//extern "C"
//JNIEXPORT void JNICALL
//Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env,
// jobject thiz,
// jlong img1_obj,
// jlong img2_obj,
// jlong img3_obj,
// jfloat time1,
// jfloat time2,
// jfloat time3,
// jlong hdrImg) {
// cv::Mat &mImg1 = *(cv::Mat *) img1_obj;
// cv::Mat &mImg2 = *(cv::Mat *) img2_obj;
// cv::Mat &mImg3 = *(cv::Mat *) img3_obj;
//
// vector<cv::Mat> images;
// vector<float> times;
// images.push_back(mImg1);
// images.push_back(mImg2);
// images.push_back(mImg3);
//
// //曝光时间列表
// static const float timesArray[] = { time1,time2,time3};
// times.assign(timesArray, timesArray + 3);
//
// //利用中值阈值图MTB进行对齐
// /*Ptr<AlignMTB> alignMTB = createAlignMTB();
// alignMTB->process(images, images);
//
// //恢复相机响应函数
// Mat responce;
// Ptr<CalibrateDebevec> calibratedebevec = createCalibrateDebevec();
// calibratedebevec->process(images, responce, times);*/
//
// //融合
// cv::Mat hdrMat;
// Ptr<MergeDebevec> mergedebevec = createMergeDebevec();
// mergedebevec->process(images, hdrMat, times);
//
// cv::Mat sdr;
// float gamma = 1.0f;
// Ptr<Tonemap> tonemap = createTonemap(gamma);
// tonemap->process(hdrMat, hdrMat);
//
// hdrMat = hdrMat * 255;
// hdrMat.convertTo(*(Mat *)hdrImg, CV_8UC3);
//
// //hdrImg = cv::normalize(images, None, 0, 255, cv::NORM_MINMAX, cv::CV_8UC3)
//
// /*Mat fusion;
// Ptr<MergeMertens> merge_mertens = createMergeMertens();
// merge_mertens->process(images, *(Mat *)hdrImg);*/
6 months ago
//}
extern "C"
JNIEXPORT void JNICALL
Java_com_xypower_mppreview_MainActivity_test(JNIEnv *env, jobject thiz) {
// TODO: implement test()
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz,
jlong exposureTime1, jobject img1, jint length1,
jlong exposureTime2, jobject img2, jint length2,
jstring outputPath) {
ALOGI("Start HDR3");
std::vector<cv::Mat> images;
images.resize(2);
std::vector<size_t> pngLengths;
pngLengths.push_back(length1);
pngLengths.push_back(length2);
std::vector<const uint8_t*> pngDatas;
pngDatas.resize(2);
pngDatas[0] = (const uint8_t*)env->GetDirectBufferAddress(img1);
pngDatas[1] = (const uint8_t*)env->GetDirectBufferAddress(img2);
// omp_set_num_threads(2);
#pragma omp parallel for num_threads(2)
for (int idx = 0; idx < 2; idx++)
{
ConvertDngToPng(pngDatas[idx], pngLengths[idx], images[idx]);
}
cv::Mat rgb;
std::vector<float> times;
times.push_back((double)(exposureTime1) / 1000000000.0);
times.push_back((double)(exposureTime2) / 1000000000.0);
ALOGI("Start MakeHDR3");
makeHdr(times, images, rgb);
ALOGI("End MakeHDR3");
std::string fileName = jstring2string(env, outputPath);
std::vector<int> params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
params.push_back(100);
if (cv::imwrite(fileName.c_str(), rgb, params))
{
ALOGI("End HDR3");
return JNI_TRUE;
}
return JNI_FALSE;
}