优化代码

mtkhdr
Matthew 6 months ago
parent 31b8d04103
commit 137a395956

@ -23,6 +23,305 @@
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, HDR_TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, HDR_TAG,__VA_ARGS__)
namespace cv2
{
using namespace cv;
Mat linearResponseNew(int channels)
{
Mat response = Mat(LDR_SIZE, 1, CV_MAKETYPE(CV_32F, channels));
for(int i = 0; i < LDR_SIZE; i++) {
response.at<Vec3f>(i) = Vec3f::all(static_cast<float>(i));
}
return response;
}
Mat triangleWeightsNew()
{
// hat function
Mat w(LDR_SIZE, 1, CV_32F);
int half = LDR_SIZE / 2;
for(int i = 0; i < LDR_SIZE; i++) {
w.at<float>(i) = i < half ? i + 1.0f : LDR_SIZE - i;
}
return w;
}
class CV_EXPORTS_W MergeExposuresNew
{
public:
virtual ~MergeExposuresNew() {}
/** @brief Merges images.
@param src vector of input images
@param dst result image
@param times vector of exposure time values for each image
@param response 256x1 matrix with inverse camera response function for each pixel value, it should
have the same number of channels as images.
*/
CV_WRAP virtual void process(InputArrayOfArrays src, OutputArray dst,
InputArray times, InputArray response) = 0;
};
class CV_EXPORTS_W MergeDebevecNew : public MergeExposuresNew
{
public:
CV_WRAP virtual void process(InputArrayOfArrays src, OutputArray dst,
InputArray times, InputArray response) CV_OVERRIDE = 0;
CV_WRAP virtual void process(InputArrayOfArrays src, OutputArray dst, InputArray times) = 0;
};
class MergeDebevecImplNew CV_FINAL : public MergeDebevecNew
{
public:
MergeDebevecImplNew() :
name("MergeDebevecNew"),
weights(triangleWeightsNew())
{
}
void process(InputArrayOfArrays src, OutputArray dst, InputArray _times, InputArray input_response) CV_OVERRIDE
{
// CV_INSTRUMENT_REGION();
ALOGD("HDR Merge 1");
std::vector<Mat> images;
src.getMatVector(images);
Mat times = _times.getMat();
#if 0
CV_Assert(images.size() == times.total());
checkImageDimensions(images);
CV_Assert(images[0].depth() == CV_8U);
#endif
int channels = images[0].channels();
Size size = images[0].size();
int CV_32FCC = CV_MAKETYPE(CV_32F, channels);
ALOGD("HDR Merge 2");
dst.create(images[0].size(), CV_32FCC);
Mat result = dst.getMat();
Mat response = input_response.getMat();
if(response.empty()) {
response = linearResponseNew(channels);
response.at<Vec3f>(0) = response.at<Vec3f>(1);
}
ALOGD("HDR Merge 3");
Mat log_response;
log(response, log_response);
CV_Assert(log_response.rows == LDR_SIZE && log_response.cols == 1 &&
log_response.channels() == channels);
Mat exp_values(times.clone());
log(exp_values, exp_values);
ALOGD("HDR Merge 4");
result = Mat::zeros(size, CV_32FCC);
std::vector<Mat> result_split;
split(result, result_split);
Mat weight_sum = Mat::zeros(size, CV_32F);
ALOGD("HDR Merge 5");
// #pragma omp parallel for num_threads(2)
for(size_t i = 0; i < images.size(); i++) {
std::vector<Mat> splitted;
split(images[i], splitted);
Mat w = Mat::zeros(size, CV_32F);
for(int c = 0; c < channels; c++) {
LUT(splitted[c], weights, splitted[c]);
w += splitted[c];
}
w /= channels;
Mat response_img;
LUT(images[i], log_response, response_img);
split(response_img, splitted);
for(int c = 0; c < channels; c++) {
result_split[c] += w.mul(splitted[c] - exp_values.at<float>((int)i));
}
weight_sum += w;
}
ALOGD("HDR Merge 6");
weight_sum = 1.0f / weight_sum;
for(int c = 0; c < channels; c++) {
result_split[c] = result_split[c].mul(weight_sum);
}
ALOGD("HDR Merge 7");
merge(result_split, result);
exp(result, result);
ALOGD("HDR Merge 8");
}
void process(InputArrayOfArrays src, OutputArray dst, InputArray times) CV_OVERRIDE
{
// CV_INSTRUMENT_REGION();
process(src, dst, times, Mat());
}
protected:
String name;
Mat weights;
};
Ptr<MergeDebevecNew> createMergeDebevecNew()
{
return makePtr<MergeDebevecImplNew>();
}
class CV_EXPORTS_W TonemapReinhardNew
{
public:
CV_WRAP virtual void process(InputArray src, OutputArray dst) = 0;
virtual ~TonemapReinhardNew() {}
CV_WRAP virtual float getGamma() const = 0;
CV_WRAP virtual void setGamma(float gamma) = 0;
CV_WRAP virtual float getIntensity() const = 0;
CV_WRAP virtual void setIntensity(float intensity) = 0;
CV_WRAP virtual float getLightAdaptation() const = 0;
CV_WRAP virtual void setLightAdaptation(float light_adapt) = 0;
CV_WRAP virtual float getColorAdaptation() const = 0;
CV_WRAP virtual void setColorAdaptation(float color_adapt) = 0;
};
inline void log_(const Mat& src, Mat& dst)
{
max(src, Scalar::all(1e-4), dst);
log(dst, dst);
}
class TonemapReinhardImpl CV_FINAL : public TonemapReinhardNew
{
public:
TonemapReinhardImpl(float _gamma, float _intensity, float _light_adapt, float _color_adapt) :
name("TonemapReinhardNew"),
gamma(_gamma),
intensity(_intensity),
light_adapt(_light_adapt),
color_adapt(_color_adapt)
{
}
void process(InputArray _src, OutputArray _dst) CV_OVERRIDE
{
ALOGD("HDR 1 ");
Mat src = _src.getMat();
CV_Assert(!src.empty());
_dst.create(src.size(), CV_32FC3);
Mat img = _dst.getMat();
Ptr<Tonemap> linear = createTonemap(1.0f);
linear->process(src, img);
ALOGD("HDR 2 ");
Mat gray_img;
cvtColor(img, gray_img, COLOR_RGB2GRAY);
Mat log_img;
log_(gray_img, log_img);
float log_mean = static_cast<float>(sum(log_img)[0] / log_img.total());
double log_min, log_max;
minMaxLoc(log_img, &log_min, &log_max);
log_img.release();
ALOGD("HDR 3 ");
double key = static_cast<float>((log_max - log_mean) / (log_max - log_min));
float map_key = 0.3f + 0.7f * pow(static_cast<float>(key), 1.4f);
intensity = exp(-intensity);
Scalar chan_mean = mean(img);
float gray_mean = static_cast<float>(mean(gray_img)[0]);
std::vector<Mat> channels(3);
split(img, channels);
ALOGD("HDR 4 ");
#pragma omp parallel for num_threads(3)
for (int i = 0; i < 3; i++) {
float global = color_adapt * static_cast<float>(chan_mean[i]) + (1.0f - color_adapt) * gray_mean;
Mat adapt = color_adapt * channels[i] + (1.0f - color_adapt) * gray_img;
adapt = light_adapt * adapt + (1.0f - light_adapt) * global;
pow(intensity * adapt, map_key, adapt);
channels[i] = channels[i].mul(1.0f / (adapt + channels[i]));
}
gray_img.release();
merge(channels, img);
ALOGD("HDR 5 ");
linear->setGamma(gamma);
linear->process(img, img);
ALOGD("HDR 6 ");
}
float getGamma() const CV_OVERRIDE { return gamma; }
void setGamma(float val) CV_OVERRIDE { gamma = val; }
float getIntensity() const CV_OVERRIDE { return intensity; }
void setIntensity(float val) CV_OVERRIDE { intensity = val; }
float getLightAdaptation() const CV_OVERRIDE { return light_adapt; }
void setLightAdaptation(float val) CV_OVERRIDE { light_adapt = val; }
float getColorAdaptation() const CV_OVERRIDE { return color_adapt; }
void setColorAdaptation(float val) CV_OVERRIDE { color_adapt = val; }
void write(FileStorage& fs) const
{
#if 0
writeFormat(fs);
fs << "name" << name
<< "gamma" << gamma
<< "intensity" << intensity
<< "light_adapt" << light_adapt
<< "color_adapt" << color_adapt;
#endif
}
void read(const FileNode& fn)
{
#if 0
FileNode n = fn["name"];
CV_Assert(n.isString() && String(n) == name);
gamma = fn["gamma"];
intensity = fn["intensity"];
light_adapt = fn["light_adapt"];
color_adapt = fn["color_adapt"];
#endif
}
protected:
String name;
float gamma, intensity, light_adapt, color_adapt;
};
Ptr<TonemapReinhardNew> createTonemapReinhardNew(float gamma, float contrast, float sigma_color, float sigma_space)
{
return makePtr<TonemapReinhardImpl>(gamma, contrast, sigma_color, sigma_space);
}
};
bool AndroidBitmap_CompressWriteFile(void *userContext, const void *data, size_t size)
{
int file = (int)((size_t)userContext);
@ -205,7 +504,7 @@ bool makeHdr(std::vector<float>& times, std::vector<std::string>& paths, cv::Mat
// Tonemap using Reinhard's method to obtain 24-bit color image
// cout << "Tonemaping using Reinhard's method ... ";
cv::Mat ldrReinhard;
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
cv::Ptr<cv2::TonemapReinhardNew> tonemapReinhard = cv2::createTonemapReinhardNew(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release();
@ -231,15 +530,15 @@ bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& r
#endif
// Obtain Camera Response Function (CRF)
// cout << "Calculating Camera Response Function (CRF) ... " << endl;
ALOGI("Calculating Camera Response Function (CRF) ... ");
cv::Mat responseDebevec;
cv::Ptr<cv::CalibrateDebevec> calibrateDebevec = cv::createCalibrateDebevec();
calibrateDebevec->process(images, responseDebevec, times);
// Merge images into an HDR linear image
// cout << "Merging images into one HDR image ... ";
ALOGI("Merging images into one HDR image ... ");
cv::Mat hdrDebevec;
cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
cv::Ptr<cv2::MergeDebevecNew> mergeDebevec = cv2::createMergeDebevecNew();
mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
// Save HDR image.
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
@ -251,9 +550,9 @@ bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& r
}
// Tonemap using Reinhard's method to obtain 24-bit color image
// cout << "Tonemaping using Reinhard's method ... ";
ALOGI("Tonemaping using Reinhard's method ... ");
cv::Mat ldrReinhard;
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
cv::Ptr<cv2::TonemapReinhardNew> tonemapReinhard = cv2::createTonemapReinhardNew(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release();
@ -397,21 +696,34 @@ Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz
std::vector<cv::Mat> images;
images.resize(2);
std::vector<size_t> pngLengths;
pngLengths.push_back(length1);
pngLengths.push_back(length2);
std::vector<const uint8_t*> pngDatas;
pngDatas.resize(2);
pngDatas[0] = (const uint8_t*)env->GetDirectBufferAddress(img1);
pngDatas[1] = (const uint8_t*)env->GetDirectBufferAddress(img2);
std::vector<jobject> bitmaps;
bitmaps.push_back(img1);
bitmaps.push_back(img2);
ALOGI("Start Decode");
// omp_set_num_threads(2);
#pragma omp parallel for num_threads(2)
// #pragma omp parallel for num_threads(2)
for (int idx = 0; idx < 2; idx++)
{
ConvertDngToPng(pngDatas[idx], pngLengths[idx], images[idx]);
AndroidBitmapInfo bmpInfo = { 0 };
AHardwareBuffer* hardwareBuffer = NULL;
int result = AndroidBitmap_getInfo(env, bitmaps[idx], &bmpInfo);
result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer);
void* outVirtualAddress = NULL;
int32_t fence = -1;
result = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY, fence, NULL, &outVirtualAddress);
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outVirtualAddress);
AHardwareBuffer_unlock(hardwareBuffer, &fence);
tmp.copyTo(images[idx]);
//convert RGB to BGR
cv::cvtColor(images[idx], images[idx], cv::COLOR_RGB2BGR);
// ConvertDngToPng(pngDatas[idx], pngLengths[idx], images[idx]);
}
ALOGI("End Decode");
cv::Mat rgb;
std::vector<float> times;

@ -9,8 +9,10 @@ import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.RectF;
@ -139,7 +141,7 @@ public class Camera2RawFragment extends Fragment {
public static native boolean makeHdr2(long exposureTime1, String path1, long exposureTime2, String path2, long exposureTime3, String path3, String outputPath);
public static native boolean makeHdr3(long exposureTime1, ByteBuffer img1, int length1, long exposureTime2, ByteBuffer img2, int length2, String outputPath);
public static native boolean makeHdr3(long exposureTime1, Bitmap img1, int length1, long exposureTime2, Bitmap img2, int length2, String outputPath);
private int mExposureComp = MainActivity.ExposureComp;
@ -147,7 +149,7 @@ public class Camera2RawFragment extends Fragment {
private Integer sensitivity;
private double pic1 = 0;
private int pic1 = 0;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events of a
@ -1100,101 +1102,112 @@ public class Camera2RawFragment extends Fragment {
Rational rational = mCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
double step = rational.doubleValue();
// mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP)
ArrayList<PngPhotoBean> mlist = new ArrayList<>();
List<CaptureRequest> requests = new ArrayList<>();
double v = 0;
if (pic1 < 21) {
// mCharacteristics.get(CameraMetadata.CONTROL_AE_COMPENSATION_STEP)
ArrayList<PngPhotoBean> mlist = new ArrayList<>();
List<CaptureRequest> requests = new ArrayList<>();
double v = 0;
ImageSaver.ImagePair imagePair = new ImageSaver.ImagePair(2);
ImageSaver.ImagePairRunnable runnable = new ImageSaver.ImagePairRunnable(imagePair) {
@Override
public void run() {
final List<ImageSaver.ImageInfo> images = imagePair.getImages();
final String outputPath = "/sdcard/DCIM/";
new Thread(new Runnable() {
@Override
public void run() {
// makeHdr3()
if (images.size() != 2) {
return;
ImageSaver.ImagePair imagePair = new ImageSaver.ImagePair(2);
ImageSaver.ImagePairRunnable runnable = new ImageSaver.ImagePairRunnable(imagePair) {
@Override
public void run() {
final List<ImageSaver.ImageInfo> images = imagePair.getImages();
final String outputPath = "/sdcard/DCIM/";
new Thread(new Runnable() {
@Override
public void run() {
// makeHdr3()
if (images.size() != 2) {
return;
}
ImageSaver.ImageInfo img1 = images.get(0);
ImageSaver.ImageInfo img2 = images.get(1);
makeHdr3(img1.exposureTime, img1.bitmap, img1.length, img2.exposureTime, img2.bitmap, img2.length, outputPath + "HDR_" + HdrUtil.generateTimestamp() + ".jpg");
}
ImageSaver.ImageInfo img1 = images.get(0);
ImageSaver.ImageInfo img2 = images.get(1);
makeHdr3(img1.exposureTime, img1.byteBuffer, img1.length, img2.exposureTime, img2.byteBuffer, img2.length, outputPath + "HDR_" + HdrUtil.generateTimestamp() + ".jpg");
}
}).start();
}
};
}).start();
}
};
imagePair.setRunnable(runnable);
imagePair.setRunnable(runnable);
for (int idx = 0; idx < 2; idx++) {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
for (int idx = 0; idx < 2; idx++) {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
if (idx == 0) {
if (idx == 0) {
// captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, new Integer(4));
// 设置曝光时间例如设置为1000微秒
// 设置曝光时间例如设置为1000微秒
// long exposureTime = 1000 000000L; // 1000微秒
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
v = exposureTime;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
}
}
if (idx == 1) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
if (pic1 <= 0) {
v = exposureTime * 7;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
} else {
v = exposureTime * pic1;
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
v = exposureTime;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
}
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100);
if (idx == 1) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
if (exposureTime > 0) {
if (pic1 <= 0) {
v = exposureTime * 7;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
} else {
v = exposureTime * pic1;
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) v);
}
}
if (sensitivity > 0) {
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 100);
}
}
}
CaptureRequest request = captureBuilder.build();
CaptureRequest request = captureBuilder.build();
// ImageSaverBuilder jpegBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
rawBuilder.setImagePair(imagePair);
rawBuilder.setCallback(new CompleteCallback() {
@Override
public void onResult() {
showToast("HDR拍摄成功");
}
});
rawBuilder.setList(mlist);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);//保存拍照参数
rawBuilder.setImagePair(imagePair);
rawBuilder.setCallback(new CompleteCallback() {
@Override
public void onResult() {
showToast("HDR拍摄成功");
}
});
rawBuilder.setList(mlist);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
requests.add(request);
}
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
} else {
// Set request tag to easily track results in callbacks.
captureBuilder.setTag(mRequestCounter.getAndIncrement());
// captureBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
captureBuilder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.CONTROL_POST_RAW_SENSITIVITY_BOOST, 100);
captureBuilder.set(CaptureRequest.DISTORTION_CORRECTION_MODE, CaptureRequest.DISTORTION_CORRECTION_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
captureBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR);
CaptureRequest request = captureBuilder.build();
// Create an ImageSaverBuilder in which to collect results, and add it to the queue
// of active requests.
// ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
ImageSaverBuilder rawBuilder = new ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
mRawResultQueue.put((int) request.getTag(), rawBuilder);
requests.add(request);
mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
}
mCaptureSession.captureBurst(requests, mCaptureCallback, mBackgroundHandler);
//
// // Set request tag to easily track results in callbacks.
// captureBuilder.setTag(mRequestCounter.getAndIncrement());
//
// CaptureRequest request = captureBuilder.build();
//
// // Create an ImageSaverBuilder in which to collect results, and add it to the queue
// // of active requests.
// ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
// ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(activity).setCharacteristics(mCharacteristics);
//
// mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
// mRawResultQueue.put((int) request.getTag(), rawBuilder);
//
// mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {

@ -4,6 +4,8 @@ import static com.xypower.mppreview.Camera2RawFragment.makeHdr;
import static com.xypower.mppreview.HdrUtil.generateTimestamp;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.ImageDecoder;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
@ -15,6 +17,8 @@ import android.os.Environment;
import android.util.Log;
import android.widget.Toast;
import androidx.annotation.NonNull;
import com.xypower.mppreview.bean.Contants;
import com.xypower.mppreview.bean.PngPhotoBean;
@ -43,12 +47,12 @@ public class ImageSaver implements Runnable {
public static class ImageInfo {
public long exposureTime;
public int length;
public ByteBuffer byteBuffer;
public Bitmap bitmap;
ImageInfo(ByteBuffer bb, int length, long exp) {
ImageInfo(Bitmap bmp, int length, long exp) {
this.exposureTime = exp;
this.length = length;
byteBuffer = bb;
bitmap = bmp;
}
}
@ -67,9 +71,9 @@ public class ImageSaver implements Runnable {
mRunnable = runnable;
}
public void addImage(ByteBuffer byteBuffer, int length, long exp) {
public void addImage(Bitmap bitmap, int length, long exp) {
boolean isFull = false;
ImageInfo imageInfo = new ImageInfo(byteBuffer, length, exp);
ImageInfo imageInfo = new ImageInfo(bitmap, length, exp);
synchronized (mImages) {
mImages.add(imageInfo);
isFull = (mImages.size() == mExpectedCount);
@ -136,26 +140,78 @@ public class ImageSaver implements Runnable {
case ImageFormat.RAW_SENSOR: {
DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
ByteBuffer byteBuffer = null;
ByteBufferOutputStream baos = null;
Long t = mCaptureResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
try {
byteBuffer = ByteBuffer.allocateDirect(mImage.getWidth() * mImage.getHeight() * 2 + 81768);
baos = new ByteBufferOutputStream(byteBuffer);
Log.d(TAG, "Before Saving DNG");
dngCreator.writeImage(baos, mImage);
Log.d(TAG, "After Saving DNG pos=" + byteBuffer.position());
mImagePair.addImage(byteBuffer, byteBuffer.position(), t.longValue());
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(baos);
if (mImagePair != null) {
ByteBuffer byteBuffer = null;
ByteBufferOutputStream baos = null;
Long t = mCaptureResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
try {
byteBuffer = ByteBuffer.allocateDirect(mImage.getWidth() * mImage.getHeight() * 2 + 81768);
baos = new ByteBufferOutputStream(byteBuffer);
Log.d(TAG, "Before Saving DNG");
dngCreator.writeImage(baos, mImage);
byteBuffer.limit(byteBuffer.position());
byteBuffer.flip();
Log.d(TAG, "After Saving DNG pos=" + byteBuffer.position());
/*
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
dngCreator.writeImage(output, mImage);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(output);
}
*/
ImageDecoder.OnHeaderDecodedListener listener = new ImageDecoder.OnHeaderDecodedListener() {
@Override
public void onHeaderDecoded(@NonNull ImageDecoder decoder, @NonNull ImageDecoder.ImageInfo info, @NonNull ImageDecoder.Source source) {
decoder.setAllocator(ImageDecoder.ALLOCATOR_HARDWARE);
}
};
Log.i("HDR", "Start Hardware Decode");
ImageDecoder imageDecoder = null;
Bitmap bmp = null;
// ImageDecoder.Source source = ImageDecoder.createSource(mFile);
ImageDecoder.Source source = ImageDecoder.createSource(byteBuffer);
try {
bmp = ImageDecoder.decodeBitmap(source, listener);
} catch (Exception ex) {
ex.printStackTrace();
}
Log.i("HDR", "End Hardware Decode");
byteBuffer = null;
mImagePair.addImage(bmp, 0, t.longValue());
success = true;
} catch (Exception e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(baos);
}
} else {
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
dngCreator.writeImage(output, mImage);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(output);
}
}
break;
}
default: {

@ -22,5 +22,6 @@
<item>X18</item>
<item>X19</item>
<item>X20</item>
<item>X0</item>
</string-array>
</resources>
Loading…
Cancel
Save