优化拍照代码

TempBranch
Matthew 8 months ago
parent 823ad5a992
commit 9f4bf01718

@ -195,7 +195,7 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
google_breakpad::ExceptionHandler eh(descriptor, NULL, DumpCallback, NULL, true, -1);
#endif
#if 0
{
struct sigaction sig_action = {};
sig_action.sa_sigaction = posix_signal_handler;
@ -223,6 +223,7 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
// }
env->DeleteLocalRef(clazz);
#endif
return result;
}

@ -46,6 +46,18 @@ extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
// are normalized to eight bits.
static const int kMaxChannelValue = 262143;
class ByteArraysPointer
{
public:
ByteArraysPointer()
{
}
~ByteArraysPointer()
{
byteArrays.clear();
}
std::vector<std::vector<uint8_t> > byteArrays;
};
cv::Mat convert16bit2_8bit_(cv::Mat ans){
if(ans.type()==CV_16UC3){
@ -181,6 +193,24 @@ bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat& rgb)
return false;
}
bool CPhoneDevice::CPhoneCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, cv::Mat rgb)
{
if (m_dev != NULL)
{
return m_dev->onOneCapture(characteristics, result, ldr, rgb);
}
return false;
}
bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames)
{
if (m_dev != NULL)
{
return m_dev->onBurstCapture(characteristics, results, ldr, frames);
}
return false;
}
bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
{
if (m_dev != NULL)
@ -211,6 +241,24 @@ CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t
{
}
bool CPhoneDevice::CJpegCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, cv::Mat rgb)
{
if (m_dev != NULL)
{
return m_dev->onOneCapture(characteristics, result, ldr, rgb);
}
return false;
}
bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames)
{
if (m_dev != NULL)
{
m_dev->onBurstCapture(characteristics, results, ldr, frames);
}
return true;
}
bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
{
if (m_dev != NULL)
@ -1512,6 +1560,323 @@ void DrawOutlineText(cv::Ptr<cv::ft::FreeType2> ft2, cv::Mat& mat, const std::st
}
}
bool CPhoneDevice::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics,
std::shared_ptr<ACameraMetadata> result,
uint32_t ldr, cv::Mat rgb)
{
time_t takingTime = time(NULL);
if (mPhotoInfo.remedy != 0)
{
if ((takingTime - mPhotoInfo.scheduleTime) > 30)
{
takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2;
}
}
mPhotoInfo.photoTime = takingTime;
vector<IDevice::OSD_INFO> osds;
osds.swap(mOsds);
PHOTO_INFO photoInfo = mPhotoInfo;
std::string path;
path.swap(mPath);
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
ACameraMetadata_const_entry e = { 0 };
camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
if (status == ACAMERA_OK)
{
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
}
int sensorOrientation = 0;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
if (status == ACAMERA_OK)
{
sensorOrientation = (int)e.data.i32[0];
}
}
bool turnOffOtg = (photoInfo.usbCamera != 0);
CPhoneCamera* pCamera = mCamera;
mCamera = NULL;
media_status_t mstatus;
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
m_threadClose.swap(closeThread);
if (closeThread.joinable())
{
closeThread.detach();
}
CPhoneDevice* pThis = this;
std::thread th([pThis, characteristics, result, photoInfo, osds, path, rgb, facing, sensorOrientation, ldr, takingTime]()mutable
{
std::string cameraInfo;
if (photoInfo.outputDbgInfo != 0)
{
NdkCamera::CAPTURE_RESULT captureResult = { 0 };
NdkCamera::EnumCameraResult(result.get(), captureResult);
char extimeunit[4] = { 0 };
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
char str[128] = { 0 };
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
captureResult.autoExposure, captureResult.autoFocus,
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
(uint32_t)captureResult.duration, captureResult.frameDuration);
cameraInfo = str;
}
#ifdef OUTPUT_CAMERA_DBG_INFO
#if 0
bool shouldRetry = false;
if (ldr != ~0)
{
if (ldr < MIN_LIGHT_Y)
{
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
{
shouldRetry = true;
char presetBuf[16] = {0};
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
(uint32_t) captureResult.avgY);
// photoInfo.usingRawFormat = 1;
}
}
else if (ldr > MAX_LIGHT_Y)
{
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
{
shouldRetry = true;
char presetBuf[16] = {0};
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
(uint32_t) captureResult.avgY);
}
photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
}
}
#endif // 0
#endif // OUTPUT_CAMERA_DBG_INFO
// Notify to take next photo
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
if (res)
{
// TakePhotoCb(2, photoInfo, path, takingTime);
}
});
th.detach();
return true;
}
bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics,
std::vector<std::shared_ptr<ACameraMetadata> >& results,
uint32_t ldr, std::vector<std::vector<uint8_t> >& frames)
{
time_t takingTime = time(NULL);
if (mPhotoInfo.remedy != 0)
{
if ((takingTime - mPhotoInfo.scheduleTime) > 30)
{
takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2;
}
}
mPhotoInfo.photoTime = takingTime;
vector<IDevice::OSD_INFO> osds;
osds.swap(mOsds);
PHOTO_INFO photoInfo = mPhotoInfo;
std::string path;
path.swap(mPath);
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
std::shared_ptr<ByteArraysPointer> pByteArrays = std::make_shared<ByteArraysPointer>();
pByteArrays.get()->byteArrays.swap(frames);
bool turnOffOtg = (photoInfo.usbCamera != 0);
CPhoneCamera* pCamera = mCamera;
mCamera = NULL;
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
m_threadClose.swap(closeThread);
if (closeThread.joinable())
{
closeThread.detach();
}
CPhoneDevice* pThis = this;
std::thread th([pThis, characteristics, results, photoInfo, osds, path, pByteArrays, ldr, takingTime]()mutable
{
cv::Mat rgb;
std::string cameraInfo;
media_status_t mstatus;
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
ACameraMetadata_const_entry e = { 0 };
camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
if (status == ACAMERA_OK)
{
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
}
int sensorOrientation = 0;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
if (status == ACAMERA_OK)
{
sensorOrientation = (int)e.data.i32[0];
}
}
if (photoInfo.outputDbgInfo != 0)
{
if (!results.empty())
{
NdkCamera::CAPTURE_RESULT captureResult = { 0 };
NdkCamera::EnumCameraResult(results[0].get(), captureResult);
char extimeunit[4] = { 0 };
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
char str[128] = { 0 };
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
captureResult.autoExposure, captureResult.autoFocus,
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
(uint32_t)captureResult.duration, captureResult.frameDuration);
cameraInfo = str;
}
}
#ifdef OUTPUT_CAMERA_DBG_INFO
#if 0
bool shouldRetry = false;
if (ldr != ~0)
{
if (ldr < MIN_LIGHT_Y)
{
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
{
shouldRetry = true;
char presetBuf[16] = {0};
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
(uint32_t) captureResult.avgY);
// photoInfo.usingRawFormat = 1;
}
}
else if (ldr > MAX_LIGHT_Y)
{
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
{
shouldRetry = true;
char presetBuf[16] = {0};
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
(uint32_t) captureResult.avgY);
}
photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
}
}
#endif // 0
#endif // OUTPUT_CAMERA_DBG_INFO
// Notify to take next photo
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
hdrplus::hdrplus_pipeline pipeline;
std::vector<std::vector<uint8_t> > localFrames;
localFrames.swap(pByteArrays.get()->byteArrays);
pipeline.run_pipeline(localFrames, 0, rgb);
localFrames.clear();
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
{
cv::Mat tempPic = convert16bit2_8bit_(rgb);
rgb = tempPic;
}
if (photoInfo.orientation > 0)
{
if (photoInfo.orientation == 1)
{
if (facing == ACAMERA_LENS_FACING_FRONT)
{
cv::flip(rgb, rgb, 1);
}
}
else if (photoInfo.orientation == 2)
{
cv::Mat tempPic;
cv::transpose(rgb, tempPic);
cv::flip(tempPic, rgb, 1);
}
else if (photoInfo.orientation == 3)
{
if (facing == ACAMERA_LENS_FACING_FRONT)
{
flip(rgb, rgb, 0);
}
else
{
cv::flip(rgb, rgb, -1);
}
}
else if (photoInfo.orientation == 4)
{
cv::Mat tempPic;
cv::transpose(rgb, tempPic);
cv::flip(tempPic, rgb, 0);
}
XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
}
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
if (res)
{
// TakePhotoCb(2, photoInfo, path, takingTime);
}
});
th.detach();
return true;
}
bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics,
std::vector<std::shared_ptr<ACameraMetadata> >& results,
uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
@ -1532,7 +1897,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
std::string path;
path.swap(mPath);
std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
// std::string tmpPath = m_appPath + std::string(APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
ACameraMetadata_const_entry e = { 0 };
@ -1556,38 +1921,37 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
CPhoneCamera* pCamera = mCamera;
mCamera = NULL;
std::thread th([=]()mutable
{
cv::Mat rgb;
std::vector<std::vector<uint8_t> > rawFiles;
std::vector<std::shared_ptr<hdrplus::MemFile> > rawFiles;
media_status_t mstatus;
std::string cameraInfo;
if (photoInfo.usingRawFormat != 0)
{
//
for (int idx = 0; idx < frames.size(); idx++)
{
std::shared_ptr<AImage> spImage = frames[idx];
std::shared_ptr<ACameraMetadata> result = results[idx];
std::shared_ptr<ACameraMetadata> spResult = results[idx];
auto it = rawFiles.insert(rawFiles.end(), std::vector<uint8_t>());
hdrplus::MemFile* rawImage = new hdrplus::MemFile();
rawFiles.push_back(std::shared_ptr<hdrplus::MemFile>(rawImage));
// rawImage->FromAImage(spImage.get(), characteristics.get(), spResult.get());
int32_t width;
int32_t height;
AImage_getWidth(spImage.get(), &width);
AImage_getHeight(spImage.get(), &height);
int32_t width = 0;
int32_t height = 0;
mstatus = AImage_getWidth(spImage.get(), &width);
mstatus = AImage_getHeight(spImage.get(), &height);
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
int32_t planeCount = 0;
mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *planeData = NULL;
int planeDataLen = 0;
mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
DngCreator dngCreator(characteristics.get(), result.get());
dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0);
ALOGD("Start Converting Dng");
DngCreator dngCreator(characteristics.get(), spResult.get());
dngCreator.writeInputBuffer(rawImage->content, planeData, planeDataLen, width, height, 0);
ALOGD("End Converting Dng");
}
}
else
@ -1680,6 +2044,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
}
frames.clear();
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
m_threadClose.swap(closeThread);
if (closeThread.joinable())
@ -1687,6 +2052,10 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
closeThread.detach();
}
CPhoneDevice* pThis = this;
std::thread th([pThis, characteristics, results, photoInfo, osds, path, rgb, rawFiles, facing, sensorOrientation, ldr, takingTime]()mutable
{
std::string cameraInfo;
if (photoInfo.outputDbgInfo != 0)
{
if (!results.empty())
@ -1749,14 +2118,16 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
#endif // OUTPUT_CAMERA_DBG_INFO
// Notify to take next photo
TakePhotoCb(1, photoInfo, "", takingTime);
pThis->TakePhotoCb(1, photoInfo, "", takingTime);
if (photoInfo.usingRawFormat != 0)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
hdrplus::hdrplus_pipeline pipeline;
pipeline.run_pipeline(rawFiles, 0, rgb);
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
rawFiles.clear();
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
#ifdef NDEBUG
for (auto it = rawFilePaths.cbegin(); it != rawFilePaths.cend(); ++it)
@ -1806,7 +2177,7 @@ bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristi
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
}
bool res = PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
bool res = pThis->PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
if (res)
{
// TakePhotoCb(2, photoInfo, path, takingTime);

@ -161,6 +161,8 @@ public:
virtual bool on_image(cv::Mat& rgb);
virtual void on_error(const std::string& msg);
virtual void onDisconnected(ACameraDevice* device);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames);
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
protected:
@ -174,6 +176,8 @@ public:
virtual void onImageAvailable(AImageReader* reader);
virtual int32_t getOutputFormat() const;
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
protected:
@ -275,6 +279,8 @@ protected:
std::string QueryCpuTemperature();
bool OnImageReady(cv::Mat& mat);
bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, cv::Mat rgb);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
void onError(const std::string& msg);
void onDisconnected(ACameraDevice* device);

@ -138,10 +138,12 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
mPreviewImageReader = NULL;
mPreviewImageWindow = NULL;
mPreviewOutputTarget = NULL;
mPreviewSessionOutput = NULL;
mImageReader = NULL;
mImageWindow = NULL;
mOutputTarget = NULL;
mSessionOutput = NULL;
camera_device = 0;
@ -587,7 +589,7 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 2, &mImageReader);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures, &mImageReader);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
@ -1170,7 +1172,7 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
return;
}
if (mLdr == ~0)
// if (mLdr == ~0)
{
uint8_t* y_data = 0;
int y_len = 0;
@ -1182,13 +1184,138 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif
avgY = avgY / (uint64_t)y_len;
m_locker.lock();
mLdr = avgY;
m_locker.unlock();
}
AImage_delete(image);
return;
}
else
{
uint32_t burstCaptures = m_params.burstCaptures;
if (burstCaptures == 0)
{
burstCaptures = 1;
}
if (burstCaptures == 1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < m_params.burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus);
}
}
return;
}
int32_t format;
mstatus = AImage_getFormat(image, &format);
if (format == AIMAGE_FORMAT_YUV_420_888)
{
int32_t width;
int32_t height;
mstatus = AImage_getWidth(image, &width);
mstatus = AImage_getHeight(image, &height);
int32_t y_pixelStride = 0;
int32_t u_pixelStride = 0;
int32_t v_pixelStride = 0;
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
int32_t y_rowStride = 0;
int32_t u_rowStride = 0;
int32_t v_rowStride = 0;
AImage_getPlaneRowStride(image, 0, &y_rowStride);
AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t* y_data = 0;
uint8_t* u_data = 0;
uint8_t* v_data = 0;
int y_len = 0;
int u_len = 0;
int v_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
{
// already nv21
ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation,
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame);
}
else
{
// construct nv21
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
{
// Y
uint8_t* yptr = nv21;
for (int y = 0; y < height; y++)
{
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
for (int x = 0; x < width; x++)
{
yptr[0] = y_data_ptr[0];
yptr++;
y_data_ptr += y_pixelStride;
}
}
// UV
uint8_t* uvptr = nv21 + width * height;
for (int y = 0; y < height / 2; y++)
{
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
for (int x = 0; x < width / 2; x++)
{
uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0];
uvptr += 2;
v_data_ptr += v_pixelStride;
u_data_ptr += u_pixelStride;
}
}
}
ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation,
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame);
delete[] nv21;
}
}
m_photoTaken = true;
AImage_delete(image);
std::shared_ptr<ACameraMetadata> result;
bool captureCompleted = false;
m_locker.lock();
if (!mCaptureResults.empty())
{
captureCompleted = true;
result = mCaptureResults[0];
}
m_locker.unlock();
if (captureCompleted)
{
onOneCapture(mCharacteristics, result, mLdr, mOneFrame);
}
}
else
{
while (1)
{
@ -1222,9 +1349,11 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
if (captureCompleted)
{
onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames);
FireBurstCapture();
}
}
}
}
void NdkCamera::on_error(const std::string& msg)
@ -1240,6 +1369,16 @@ bool NdkCamera::on_image(cv::Mat& rgb)
return false;
}
bool NdkCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, cv::Mat rgb)
{
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames)
{
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
{
return false;
@ -1572,24 +1711,89 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
}
else
{
#ifdef _DEBUG
uint64_t tid = getThreadIdOfULL();
ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid);
#endif
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
bool captureCompleted = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
if (expectedTimes == 1)
{
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = !mOneFrame.empty();
m_locker.unlock();
if (captureCompleted)
{
onOneCapture(mCharacteristics, captureResult, mLdr, mOneFrame);
}
}
else
{
m_locker.lock();
mCaptureResults.push_back(std::shared_ptr<ACameraMetadata>(pCopy, ACameraMetadata_free));
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes;
m_locker.unlock();
if (captureCompleted)
{
onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames);
FireBurstCapture();
}
}
}
}
void NdkCamera::FireBurstCapture()
{
size_t expectedTimes = mCaptureRequests.size() - 1;
std::vector<std::shared_ptr<ACameraMetadata> > captureResults;
uint32_t ldr;
std::vector<std::shared_ptr<AImage> > captureFrames;
m_locker.lock();
ldr = mLdr;
captureResults.swap(mCaptureResults);
captureFrames.swap(mCaptureFrames);
m_locker.unlock();
media_status_t mstatus;
std::vector<std::vector<uint8_t> > frames;
for (int idx = 0; idx < expectedTimes; idx++)
{
std::shared_ptr<AImage> spImage = captureFrames[idx];
std::shared_ptr<ACameraMetadata> spResult = captureResults[idx];
auto it = frames.insert(frames.end(), std::vector<uint8_t>());
int32_t width = 0;
int32_t height = 0;
mstatus = AImage_getWidth(spImage.get(), &width);
mstatus = AImage_getHeight(spImage.get(), &height);
int32_t planeCount = 0;
mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
AASSERT(mstatus == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *planeData = NULL;
int planeDataLen = 0;
mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
ALOGD("Start Converting Dng");
DngCreator dngCreator(mCharacteristics.get(), spResult.get());
dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0);
ALOGD("End Converting Dng");
}
captureFrames.clear();
onBurstCapture(mCharacteristics, captureResults, ldr, frames);
}
void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult)
{
camera_status_t status = ACAMERA_ERROR_BASE;
@ -1912,7 +2116,6 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt
}
}
// on_image((unsigned ch
}
}

@ -166,6 +166,9 @@ public:
virtual void on_error(const std::string& msg);
virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height);
virtual void onDisconnected(ACameraDevice* device);
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::vector<uint8_t> >& frames);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
void onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result);
@ -176,6 +179,8 @@ public:
void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult);
void FireBurstCapture();
uint32_t GetLdr() const
{
return mLdr;
@ -252,6 +257,9 @@ protected:
uint32_t mLdr;
std::vector<std::shared_ptr<AImage> > mCaptureFrames;
cv::Mat mOneFrame;
std::vector<std::vector<uint8_t> > mRawFrames;
ACameraCaptureSession* capture_session;
// AImageReader* image_reader;

@ -10,11 +10,29 @@
namespace hdrplus
{
class MemFile
{
public:
std::vector<uint8_t> content;
const std::vector<uint8_t> GetConstData() const
{
return content;
}
std::vector<uint8_t> GetData()
{
return content;
}
};
class bayer_image
{
public:
explicit bayer_image( const std::string& bayer_image_path );
explicit bayer_image( const std::vector<uint8_t>& bayer_image_content );
explicit bayer_image( std::shared_ptr<MemFile> bayer_image_file );
~bayer_image() = default;
std::pair<double, double> get_noise_params() const;

@ -8,12 +8,14 @@
namespace hdrplus
{
class burst
{
public:
explicit burst( const std::string& burst_path, const std::string& reference_image_path );
explicit burst(const std::vector<std::string>& burst_paths, int reference_image_index);
explicit burst( const std::vector<std::vector<uint8_t> >& bayer_image_contents, int reference_image_index );
explicit burst( const std::vector<std::shared_ptr<MemFile> >& bayer_image_files, int reference_image_index );
~burst() = default;

@ -21,6 +21,7 @@ class hdrplus_pipeline
void run_pipeline( const std::string& burst_path, const std::string& reference_image_path );
bool run_pipeline( const std::vector<std::string>& burst_paths, int reference_image_index, cv::Mat& finalImg );
bool run_pipeline( const std::vector<std::vector<uint8_t> >& burst_contents, int reference_image_index, cv::Mat& finalImg );
bool run_pipeline( const std::vector<std::shared_ptr<MemFile> >& burst_contents, int reference_image_index, cv::Mat& finalImg );
hdrplus_pipeline() = default;
~hdrplus_pipeline() = default;

@ -141,6 +141,74 @@ bayer_image::bayer_image( const std::vector<uint8_t>& bayer_image_content )
#endif
}
bayer_image::bayer_image( std::shared_ptr<MemFile> bayer_image_file )
{
libraw_processor = std::make_shared<LibRaw>();
// Open RAW image file
int return_code;
{
std::vector<uint8_t>& fileData = bayer_image_file->content;
if ( ( return_code = libraw_processor->open_buffer( (void *)(&fileData[0]), fileData.size() ) ) != LIBRAW_SUCCESS )
{
libraw_processor->recycle();
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error opening file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
}
// Unpack the raw image
if ( ( return_code = libraw_processor->unpack() ) != LIBRAW_SUCCESS )
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error unpack file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
// Get image basic info
width = int( libraw_processor->imgdata.rawdata.sizes.raw_width );
height = int( libraw_processor->imgdata.rawdata.sizes.raw_height );
// Read exif tags
Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(&bayer_image_file->content[0], bayer_image_file->content.size());
assert(image.get() != 0);
image->readMetadata();
Exiv2::ExifData &exifData = image->exifData();
if (exifData.empty()) {
std::string error = "No Exif data found in the file";
std::cout << error << std::endl;
}
white_level = exifData["Exif.Image.WhiteLevel"].toLong();
black_level_per_channel.resize( 4 );
black_level_per_channel.at(0) = exifData["Exif.Image.BlackLevel"].toLong(0);
black_level_per_channel.at(1) = exifData["Exif.Image.BlackLevel"].toLong(1);
black_level_per_channel.at(2) = exifData["Exif.Image.BlackLevel"].toLong(2);
black_level_per_channel.at(3) = exifData["Exif.Image.BlackLevel"].toLong(3);
iso = exifData["Exif.Image.ISOSpeedRatings"].toLong();
// Create CV mat
// https://answers.opencv.org/question/105972/de-bayering-a-cr2-image/
// https://www.libraw.org/node/2141
raw_image = cv::Mat( height, width, CV_16U, libraw_processor->imgdata.rawdata.raw_image ).clone(); // changed the order of width and height
// 2x2 box filter
grayscale_image = box_filter_kxk<uint16_t, 2>( raw_image );
#ifndef NDEBUG
printf("%s::%s read bayer image with\n width %zu\n height %zu\n iso %.3f\n white level %d\n black level %d %d %d %d\n", \
__FILE__, __func__, width, height, iso, white_level, \
black_level_per_channel[0], black_level_per_channel[1], black_level_per_channel[2], black_level_per_channel[3] );
fflush( stdout );
#endif
}
std::pair<double, double> bayer_image::get_noise_params() const
{
// Set ISO to 100 if not positive

@ -248,4 +248,74 @@ burst::burst( const std::vector<std::vector<uint8_t> >& bayer_image_contents, in
#endif
}
burst::burst( const std::vector<std::shared_ptr<MemFile> >& bayer_image_files, int reference_image_index )
{
// Number of images
num_images = bayer_image_files.size();
// Find reference image path in input directory
// reference image path need to be absolute path
reference_image_idx = -1;
if ( reference_image_index >= 0 && reference_image_index < bayer_image_files.size() )
{
reference_image_idx = reference_image_index;
}
if ( reference_image_idx == -1 )
{
return;
// throw std::runtime_error("Error reference image index is out of range " );
}
#ifndef NDEBUG
printf("%s::%s reference image idx %d\n", \
__FILE__, __func__, reference_image_idx );
#endif
// Get source bayer image
// Downsample original bayer image by 2x2 box filter
for ( const auto& bayer_image_file : bayer_image_files )
{
bayer_images.emplace_back( bayer_image_file );
}
// Pad information
int tile_size_bayer = 32;
int padding_top = tile_size_bayer / 2;
int padding_bottom = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer );
int padding_left = tile_size_bayer / 2;
int padding_right = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer );
padding_info_bayer = std::vector<int>{ padding_top, padding_bottom, padding_left, padding_right };
// Pad bayer image
for ( const auto& bayer_image_i : bayer_images )
{
cv::Mat bayer_image_pad_i;
cv::copyMakeBorder( bayer_image_i.raw_image, \
bayer_image_pad_i, \
padding_top, padding_bottom, padding_left, padding_right, \
cv::BORDER_REFLECT );
// cv::Mat use internal reference count
bayer_images_pad.emplace_back( bayer_image_pad_i );
grayscale_images_pad.emplace_back( box_filter_kxk<uint16_t, 2>( bayer_image_pad_i ) );
}
#ifndef NDEBUG
printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \
__FILE__, __func__, \
bayer_images[ 0 ].height, \
bayer_images[ 0 ].width, \
bayer_images_pad[ 0 ].size().height, \
bayer_images_pad[ 0 ].size().width );
printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \
__FILE__, __func__, \
padding_top, padding_bottom, padding_left, padding_right );
#endif
}
} // namespace hdrplus

@ -102,4 +102,37 @@ bool hdrplus_pipeline::run_pipeline( \
return true;
}
bool hdrplus_pipeline::run_pipeline( \
const std::vector<std::shared_ptr<MemFile> >& burst_files, \
int reference_image_index, cv::Mat& finalImg )
{
// Create burst of images
burst burst_images( burst_files, reference_image_index );
std::vector<std::vector<std::vector<std::pair<int, int>>>> alignments;
#ifdef __ANDROID__
ALOGI("Finish loading images");
#endif
// Run align
align_module.process( burst_images, alignments );
#ifdef __ANDROID__
ALOGI("Finish align");
#endif
// Run merging
merge_module.process( burst_images, alignments );
#ifdef __ANDROID__
ALOGI("Finish merging");
#endif
// Run finishing
finish_module.process( burst_images, finalImg);
#ifdef __ANDROID__
ALOGI("Finish process");
#endif
return true;
}
} // namespace hdrplus

Loading…
Cancel
Save