NDK实现RAW格式拍照

TempBranch
Matthew 8 months ago
parent 0779d47b36
commit cbf3dce87e

@ -52,6 +52,7 @@ android {
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
} }
debug { debug {
minifyEnabled false
jniDebuggable true jniDebuggable true
testCoverageEnabled false testCoverageEnabled false
} }

@ -8,6 +8,7 @@
#include "GPIOControl.h" #include "GPIOControl.h"
#include "CvText.h" #include "CvText.h"
#include "PositionHelper.h" #include "PositionHelper.h"
#include "DngCreator.h"
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <opencv2/core.hpp> #include <opencv2/core.hpp>
@ -21,6 +22,7 @@
#include <android/thermal.h> #include <android/thermal.h>
#include <android/imagedecoder.h> #include <android/imagedecoder.h>
#include <sys/system_properties.h> #include <sys/system_properties.h>
#include <media/NdkImage.h>
#include <mat.h> #include <mat.h>
#ifdef USING_HDRPLUS #ifdef USING_HDRPLUS
@ -29,6 +31,7 @@
#include <fcntl.h> #include <fcntl.h>
#include <filesystem> #include <filesystem>
#include <cstdio>
namespace fs = std::filesystem; namespace fs = std::filesystem;
#define CMD_SET_485_EN_STATE 131 #define CMD_SET_485_EN_STATE 131
@ -159,7 +162,7 @@ static inline uint32_t YUV2RGB(int nY, int nU, int nV) {
return 0xff000000 | (nR << 16) | (nG << 8) | nB; return 0xff000000 | (nR << 16) | (nG << 8) | nB;
} }
CPhoneDevice::CPhoneCamera::CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params, int burstCaptures) : NdkCamera(width, height, params, burstCaptures), m_dev(dev) CPhoneDevice::CPhoneCamera::CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params) : NdkCamera(width, height, params), m_dev(dev)
{ {
} }
@ -178,9 +181,13 @@ bool CPhoneDevice::CPhoneCamera::on_image(cv::Mat& rgb)
return false; return false;
} }
bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, const std::vector<std::shared_ptr<ACameraMetadata> >& results, const std::vector<std::shared_ptr<AImage> >& frames) bool CPhoneDevice::CPhoneCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
{ {
return true; if (m_dev != NULL)
{
return m_dev->onBurstCapture(characteristics, results, ldr, frames);
}
return false;
} }
void CPhoneDevice::CPhoneCamera::on_error(const std::string& msg) void CPhoneDevice::CPhoneCamera::on_error(const std::string& msg)
@ -204,8 +211,12 @@ CPhoneDevice::CJpegCamera::CJpegCamera(CPhoneDevice* dev, int32_t width, int32_t
{ {
} }
bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, const std::vector<std::shared_ptr<ACameraMetadata> >& results, const std::vector<std::shared_ptr<AImage> >& frames) bool CPhoneDevice::CJpegCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
{ {
if (m_dev != NULL)
{
m_dev->onBurstCapture(characteristics, results, ldr, frames);
}
return true; return true;
} }
@ -242,8 +253,7 @@ void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader)
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif #endif
avgY = avgY / (uint64_t)y_len; avgY = avgY / (uint64_t)y_len;
mResult.avgY = avgY; mLdr = avgY;
mFinalResult.avgY = avgY;
#if 1 #if 1
if (avgY < 50) if (avgY < 50)
{ {
@ -268,10 +278,6 @@ void CPhoneDevice::CJpegCamera::onImageAvailable(AImageReader* reader)
} }
#endif #endif
XYLOG(XYLOG_SEVERITY_INFO, "Photo Taken: AES=%u AFS=%u AWBS=%u", (uint32_t)mFinalResult.aeState, (uint32_t)mFinalResult.awbState, (uint32_t)mFinalResult.afState);
mFinalResult.duration = GetMicroTimeStamp() - m_startTime;
int32_t format; int32_t format;
AImage_getFormat(image, &format); AImage_getFormat(image, &format);
@ -1337,6 +1343,8 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
params.requestTemplate = mPhotoInfo.requestTemplate; params.requestTemplate = mPhotoInfo.requestTemplate;
params.awbMode = mPhotoInfo.awbMode; params.awbMode = mPhotoInfo.awbMode;
params.wait3ALocked = mPhotoInfo.wait3ALocked; params.wait3ALocked = mPhotoInfo.wait3ALocked;
params.burstRawCapture = mPhotoInfo.usingRawFormat;
params.burstCaptures = mPhotoInfo.burstCaptures;
if (params.requestTemplate <= 0 || params.requestTemplate > 5) if (params.requestTemplate <= 0 || params.requestTemplate > 5)
{ {
params.requestTemplate = 2; params.requestTemplate = 2;
@ -1363,7 +1371,7 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
TurnOnCameraPower(NULL); TurnOnCameraPower(NULL);
res = true; res = true;
if (mPhotoInfo.mediaType == 0 && mPhotoInfo.usingRawFormat == 0) if (mPhotoInfo.mediaType == 0/* && mPhotoInfo.usingRawFormat == 0*/)
{ {
mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params); mCamera = new CPhoneCamera(this, photoInfo.width, photoInfo.height, params);
// mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params); // mCamera = new CJpegCamera(this, photoInfo.width, photoInfo.height, mPath, params);
@ -1507,6 +1515,330 @@ void DrawOutlineText(cv::Ptr<cv::ft::FreeType2> ft2, cv::Mat& mat, const std::st
} }
} }
bool CPhoneDevice::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
{
time_t takingTime = time(NULL);
if (mPhotoInfo.remedy != 0)
{
if ((takingTime - mPhotoInfo.scheduleTime) > 30)
{
takingTime = mPhotoInfo.scheduleTime + mPhotoInfo.channel * 2;
}
}
mPhotoInfo.photoTime = takingTime;
vector<IDevice::OSD_INFO> osds;
osds.swap(mOsds);
PHOTO_INFO photoInfo = mPhotoInfo;
std::string path;
path.swap(mPath);
std::string tmpPath = m_appPath + (APP_DIR_TMP DIR_SEP_STR) + std::to_string(photoInfo.photoId);
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
ACameraMetadata_const_entry e = { 0 };
camera_status_t status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_LENS_FACING, &e);
if (status == ACAMERA_OK)
{
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
}
int sensorOrientation = 0;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(characteristics.get(), ACAMERA_SENSOR_ORIENTATION, &e);
if (status == ACAMERA_OK)
{
sensorOrientation = (int)e.data.i32[0];
}
}
bool turnOffOtg = (photoInfo.usbCamera != 0);
CPhoneCamera* pCamera = mCamera;
mCamera = NULL;
std::thread th([=]
{
cv::Mat rgb;
std::vector<std::vector<uint8_t> > rawFiles;
media_status_t mstatus;
std::string cameraInfo;
if (photoInfo.usingRawFormat != 0)
{
//
for (int idx = 0; idx < frames.size(); idx++)
{
std::shared_ptr<AImage> spImage = frames[idx];
std::shared_ptr<ACameraMetadata> result = results[idx];
auto it = rawFiles.insert(rawFiles.end(), std::vector<uint8_t>());
int32_t width;
int32_t height;
AImage_getWidth(spImage.get(), &width);
AImage_getHeight(spImage.get(), &height);
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
AASSERT(status == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *data = nullptr;
int len = 0;
mstatus = AImage_getPlaneData(spImage.get(), 0, &data, &len);
DngCreator dngCreator(characteristics.get(), result.get());
dngCreator.writeInputBuffer(*it, data, len, width, height, 0);
}
}
else
{
if (results.size() == 1 && frames.size() == 1)
{
std::shared_ptr<ACameraMetadata> result = results[0];
std::shared_ptr<AImage> frame = frames[0];
if (photoInfo.outputDbgInfo != 0)
{
NdkCamera::CAPTURE_RESULT captureResult = { 0 };
NdkCamera::EnumCameraResult(result.get(), captureResult);
char extimeunit[4] = { 0 };
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
char str[128] = { 0 };
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
captureResult.autoExposure, captureResult.autoFocus,
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
captureResult.sceneMode, GpioControl::getLightAdc(), ldr, captureResult.zoomRatio,
(uint32_t)captureResult.duration, captureResult.frameDuration);
cameraInfo = str;
}
int32_t format;
media_status_t mstatus = AImage_getFormat(frame.get(), &format);
if (format == AIMAGE_FORMAT_YUV_420_888)
{
int32_t width;
int32_t height;
mstatus = AImage_getWidth(frame.get(), &width);
mstatus = AImage_getHeight(frame.get(), &height);
int32_t y_pixelStride = 0;
int32_t u_pixelStride = 0;
int32_t v_pixelStride = 0;
AImage_getPlanePixelStride(frame.get(), 0, &y_pixelStride);
AImage_getPlanePixelStride(frame.get(), 1, &u_pixelStride);
AImage_getPlanePixelStride(frame.get(), 2, &v_pixelStride);
int32_t y_rowStride = 0;
int32_t u_rowStride = 0;
int32_t v_rowStride = 0;
AImage_getPlaneRowStride(frame.get(), 0, &y_rowStride);
AImage_getPlaneRowStride(frame.get(), 1, &u_rowStride);
AImage_getPlaneRowStride(frame.get(), 2, &v_rowStride);
uint8_t* y_data = 0;
uint8_t* u_data = 0;
uint8_t* v_data = 0;
int y_len = 0;
int u_len = 0;
int v_len = 0;
AImage_getPlaneData(frame.get(), 0, &y_data, &y_len);
AImage_getPlaneData(frame.get(), 1, &u_data, &u_len);
AImage_getPlaneData(frame.get(), 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
{
// already nv21
ConvertYUV21ToMat(y_data, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
}
else
{
// construct nv21
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
{
// Y
uint8_t* yptr = nv21;
for (int y = 0; y < height; y++)
{
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
for (int x = 0; x < width; x++)
{
yptr[0] = y_data_ptr[0];
yptr++;
y_data_ptr += y_pixelStride;
}
}
// UV
uint8_t* uvptr = nv21 + width * height;
for (int y = 0; y < height / 2; y++)
{
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
for (int x = 0; x < width / 2; x++)
{
uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0];
uvptr += 2;
v_data_ptr += v_pixelStride;
u_data_ptr += u_pixelStride;
}
}
}
ConvertYUV21ToMat(nv21, width, height, photoInfo.width, photoInfo.height, sensorOrientation, facing == ACAMERA_LENS_FACING_FRONT, photoInfo.orientation, rgb);
delete[] nv21;
}
if (photoInfo.outputDbgInfo != 0)
{
}
}
}
}
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, photoInfo.photoId, turnOffOtg);
m_threadClose.swap(closeThread);
if (closeThread.joinable())
{
closeThread.detach();
}
#ifdef OUTPUT_CAMERA_DBG_INFO
#if 0
bool shouldRetry = false;
if (ldr != ~0)
{
if (ldr < MIN_LIGHT_Y)
{
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
{
shouldRetry = true;
char presetBuf[16] = {0};
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
(uint32_t) captureResult.avgY);
// photoInfo.usingRawFormat = 1;
}
}
else if (ldr > MAX_LIGHT_Y)
{
if (photoInfo.retries < (DEFAULT_TAKE_PHOTO_RETRIES - 1))
{
shouldRetry = true;
char presetBuf[16] = {0};
snprintf(presetBuf, sizeof(presetBuf), "%02X", photoInfo.retries);
// replaceAll(fullPath, ".jpg", std::string("-") + std::to_string(photoInfo.retries) + ".jpg");
replaceAll(fullPath, "_FF_", std::string("_") + presetBuf + std::string("_"));
XYLOG(XYLOG_SEVERITY_ERROR, "Photo is TOO dark or light(LDR=%u), will RETRY it",
(uint32_t) captureResult.avgY);
}
photoInfo.compensation = -2 * ((int16_t) ((uint16_t) captureResult.avgY));
}
}
#endif // 0
#endif // OUTPUT_CAMERA_DBG_INFO
// Notify to take next photo
TakePhotoCb(1, photoInfo, "", takingTime);
if (photoInfo.usingRawFormat != 0)
{
std::vector<std::string> rawFilePaths;
for (auto it = rawFiles.cbegin(); it != rawFiles.cend(); ++it)
{
std::string dngFilePath = tmpPath + std::to_string(std::distance(rawFiles.cbegin(), it)) + ".dng";
#ifdef _DEBUG
char log[256] = { 0 };
strcpy(log, dngFilePath.c_str());
#endif
FILE *file = fopen(dngFilePath.c_str(), "wb");
if (file) {
if (!(*it).empty())
{
fwrite(&((*it)[0]), 1, (*it).size(), file);
}
fclose(file);
rawFilePaths.push_back(dngFilePath);
}
}
XYLOG(XYLOG_SEVERITY_ERROR, "Start HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
hdrplus::hdrplus_pipeline pipeline;
pipeline.run_pipeline(rawFilePaths, 0, rgb);
XYLOG(XYLOG_SEVERITY_ERROR, "Finish HDR CH=%u IMGID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.photoId);
#ifdef NDEBUG
for (auto it = rawFilePaths.cbegin(); it != rawFiles.cend(); ++it)
{
std::remove((*it).c_str());
}
#endif
{
cv::Mat tempPic = convert16bit2_8bit_(rgb);
rgb = tempPic;
}
if (photoInfo.orientation > 0)
{
if (photoInfo.orientation == 1)
{
if (facing == ACAMERA_LENS_FACING_FRONT)
{
cv::flip(rgb, rgb, 1);
}
} else if (photoInfo.orientation == 2)
{
cv::Mat tempPic;
cv::transpose(rgb, tempPic);
cv::flip(tempPic, rgb, 1);
}
else if (photoInfo.orientation == 3)
{
if (facing == ACAMERA_LENS_FACING_FRONT)
{
flip(rgb, rgb, 0);
}
else
{
cv::flip(rgb, rgb, -1);
}
}
else if (photoInfo.orientation == 4)
{
cv::Mat tempPic;
cv::transpose(rgb, tempPic);
cv::flip(tempPic, rgb, 0);
}
XYLOG(XYLOG_SEVERITY_ERROR, "Finish rotation CH=%u IMGID=%u", (uint32_t)photoInfo.channel, (uint32_t)photoInfo.photoId);
}
cv::cvtColor(rgb, rgb, cv::COLOR_RGB2BGR);
}
bool res = PostProcessPhoto(photoInfo, osds, path, cameraInfo, rgb);
if (res)
{
// TakePhotoCb(2, photoInfo, path, takingTime);
}
});
th.detach();
return true;
}
bool CPhoneDevice::OnImageReady(cv::Mat& mat) bool CPhoneDevice::OnImageReady(cv::Mat& mat)
{ {
time_t takingTime = time(NULL); time_t takingTime = time(NULL);
@ -1662,24 +1994,13 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
if (mCamera != NULL) if (mCamera != NULL)
{ {
NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
if (mPhotoInfo.outputDbgInfo != 0) if (mPhotoInfo.outputDbgInfo != 0)
{ {
cv::Scalar scalarRed(0, 0, 255); // red cv::Scalar scalarRed(0, 0, 255); // red
char extimeunit[4] = { 0 }; char extimeunit[4] = { 0 };
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "μs");
char str[128] = { 0 }; char str[128] = { 0 };
snprintf(str, sizeof(str), "AE=%u AF=%u EXPS=%u%s(%d) ISO=%d AFS=%u AES=%u AWBS=%u SCENE=%d LDR=%d(%u) %0.1fx T=%u FD=%lld",
captureResult.autoExposure, captureResult.autoFocus,
extime, extimeunit, captureResult.compensation, captureResult.sensitivity,
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
(unsigned int)captureResult.afState, (unsigned int)captureResult.aeState, captureResult.awbState,
captureResult.sceneMode, GpioControl::getLightAdc(), (unsigned int)captureResult.avgY, captureResult.zoomRatio,
(uint32_t)captureResult.duration, captureResult.frameDuration);
// cv::putText(mat, str, cv::Point(0, mat.rows - 20), cv::FONT_HERSHEY_COMPLEX, fontScale, scalarWhite, thickness1, cv::LINE_AA);
int fs = fontSize * 2 / 3; int fs = fontSize * 2 / 3;
textSize = ft2->getTextSize(str, fs, -1, &baseline); textSize = ft2->getTextSize(str, fs, -1, &baseline);
@ -1811,29 +2132,255 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
#ifdef OUTPUT_CAMERA_DBG_INFO #ifdef OUTPUT_CAMERA_DBG_INFO
if (shouldRetry) if (shouldRetry)
{ {
TakePhotoCb(false, mPhotoInfo, fullPath, takingTime, objs); TakePhotoCb(0, mPhotoInfo, fullPath, takingTime, objs);
} }
else else
{ {
TakePhotoCb(res, mPhotoInfo, fullPath, takingTime, objs); TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs);
} }
#else #else
TakePhotoCb(res, mPhotoInfo, fullPath, takingTime, objs); TakePhotoCb(res ? 3 : 0, mPhotoInfo, fullPath, takingTime, objs);
#endif #endif
} }
else else
{ {
ALOGI("Photo file exists: %s", mPath.c_str()); ALOGI("Photo file exists: %s", mPath.c_str());
} }
CPhoneCamera* pCamera = mCamera;
mCamera = NULL;
bool turnOffOtg = (mPhotoInfo.usbCamera != 0); return res;
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); }
m_threadClose.swap(closeThread);
if (closeThread.joinable()) bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat)
{
int baseline = 0;
cv::Size textSize;
double height = mat.rows;
double width = mat.cols;
// double ratio = std::min(height / 1024, width / 1920);
double ratio = height / 1024.0;
int thickness = round(1.4 * ratio);
if (thickness < 1) thickness = 1;
else if (thickness > 5) thickness = 5;
cv::Scalar scalarWhite(255, 255, 255); // white
int fontSize = (int)(28.0 * ratio);
cv::Point pt;
std::string fontPath;
if (existsFile("/system/fonts/NotoSansCJK-Regular.ttc"))
{ {
closeThread.detach(); fontPath = "/system/fonts/NotoSansCJK-Regular.ttc";
}
else if (existsFile("/system/fonts/NotoSerifCJK-Regular.ttc"))
{
fontPath = "/system/fonts/NotoSerifCJK-Regular.ttc";
}
else
{
fontPath = m_appPath+ "fonts/Noto.otf";
}
cv::Ptr<cv::ft::FreeType2> ft2;
ft2 = cv::ft::createFreeType2();
ft2->loadFontData(fontPath.c_str(), 0);
// cv::Rect rc(0, 0, mat.cols, mat.rows);
// cv::rectangle (mat, rc, cv::Scalar(255, 255, 255), cv::FILLED);
std::vector<IDevice::RECOG_OBJECT> objs;
if ((m_pRecognizationCfg != NULL) && (m_pRecognizationCfg->enabled != 0) && (photoInfo.recognization != 0))
{
XYLOG(XYLOG_SEVERITY_INFO, "Channel AI Enabled");
// visualize(ncnnPath.c_str(), in);
#ifdef _DEBUG
double startTime = ncnn::get_current_time();
#endif // _DEBUG
bool detected = YoloV5NcnnDetect(mat, true, m_pRecognizationCfg->blobName8, m_pRecognizationCfg->blobName16, m_pRecognizationCfg->blobName32, objs);
#ifdef _DEBUG
double elasped = ncnn::get_current_time() - startTime;
// __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "%.2fms detect", elasped);
#endif // _DEBUG
#ifdef _DEBUG
ALOGI( "NCNN recognization: %.2fms res=%d", elasped, ((detected && !objs.empty()) ? 1 : 0));
#endif
if (detected && !objs.empty())
{
cv::Scalar borderColor(m_pRecognizationCfg->borderColor & 0xFF, (m_pRecognizationCfg->borderColor & 0xFF00) >> 8, (m_pRecognizationCfg->borderColor & 0xFF0000) >> 16);
cv::Scalar textColor(m_pRecognizationCfg->textColor & 0xFF, (m_pRecognizationCfg->textColor & 0xFF00) >> 8, (m_pRecognizationCfg->textColor & 0xFF0000) >> 16);
float minSizeW = m_pRecognizationCfg->minSize > 0 ? (photoInfo.width * m_pRecognizationCfg->minSize / 100) : 0;
float minSizeH = m_pRecognizationCfg->minSize > 0 ? (photoInfo.height * m_pRecognizationCfg->minSize / 100) : 0;
for (std::vector<IDevice::RECOG_OBJECT>::const_iterator it = objs.cbegin(); it != objs.cend();)
{
if (it->label >= m_pRecognizationCfg->items.size())
{
it = objs.erase(it);
continue;
}
const IDevice::CFG_RECOGNIZATION::ITEM& item = m_pRecognizationCfg->items[it->label];
if (item.enabled == 0 || it->prob < item.prob)
{
it = objs.erase(it);
continue;
}
if (m_pRecognizationCfg->minSize > 0)
{
if (it->w < minSizeW || it->h < minSizeH)
{
it = objs.erase(it);
continue;
}
}
if ((photoInfo.recognization & 0x2) != 0)
{
cv::Rect rc(it->x, it->y, it->w, it->h);
cv::rectangle(mat, rc, borderColor, m_pRecognizationCfg->thickness);
textSize = ft2->getTextSize(item.name, fontSize, thickness, &baseline);
textSize.height += baseline;
if (it->y > textSize.height)
{
pt.y = it->y - textSize.height - 4 - m_pRecognizationCfg->thickness;
}
else if (mat.rows - it->y - it->h > textSize.height)
{
pt.y = it->y + it->h + 4 + m_pRecognizationCfg->thickness;
}
else
{
// Inner
pt.y = it->y + 4 + m_pRecognizationCfg->thickness;
}
if (mat.cols - it->x > textSize.width)
{
pt.x = it->x;
}
else
{
pt.x = it->x + it->w - textSize.width;
}
#ifdef OUTPUT_CAMERA_DBG_INFO
char buf[128];
snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)",
it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height);
XYLOG(XYLOG_SEVERITY_DEBUG, buf);
#endif
ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true);
}
++it;
}
}
}
else
{
XYLOG(XYLOG_SEVERITY_WARNING, "Channel AI Disabled");
}
// #ifdef OUTPUT_CAMERA_DBG_INFO
if (!cameraInfo.empty())
{
// NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
if (photoInfo.outputDbgInfo != 0)
{
cv::Scalar scalarRed(0, 0, 255); // red
int fs = fontSize * 2 / 3;
textSize = ft2->getTextSize(cameraInfo, fs, -1, &baseline);
cv::Point lt(0, mat.rows - fs - 20 * ratio);
cv::Point lt2(0, lt.y - 2 * ratio);
cv::Point rb(0 + textSize.width + 2 * ratio, lt2.y + textSize.height + 8 * ratio);
if (rb.x > (int)width - 1)
{
rb.x = (int)width - 1;
}
if (rb.y > (int)height - 1)
{
rb.y = (int)height - 1;
}
cv::Mat roi = mat(cv::Rect(lt2, rb));
cv::Mat clrMat(roi.size(), CV_8UC3, scalarWhite);
double alpha = 0.5;
cv::addWeighted(clrMat, alpha, roi, 1.0 - alpha, 0.0, roi);
// cv::rectangle(mat, lt2, rb,cv::Scalar(255, 255, 255), -1);
ft2->putText(mat, cameraInfo, lt, fs, scalarRed, -1, cv::LINE_AA, false);
// DrawOutlineText(ft2, mat, str, cv::Point(0, mat.rows - fs - 20 * ratio), fs, scalarWhite, 1);
}
}
// #endif // OUTPUT_CAMERA_DBG_INFO
for (vector<OSD_INFO>::const_iterator it = osds.cbegin(); it != osds.cend(); ++it)
{
if (it->text.empty())
{
continue;
}
#ifdef _DEBUG
if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT)
{
int aa = 0;
}
#endif
textSize = ft2->getTextSize(it->text, fontSize, thickness, &baseline);
XYLOG(XYLOG_SEVERITY_DEBUG, "%s font Size=%d height: %d baseline=%d", it->text.c_str(), fontSize, textSize.height, baseline);
if (it->alignment == OSD_ALIGNMENT_TOP_LEFT)
{
pt.x = it->x * ratio;
pt.y = it->y * ratio;
}
else if (it->alignment == OSD_ALIGNMENT_TOP_RIGHT)
{
pt.x = width - textSize.width - it->x * ratio;
pt.y= it->y * ratio;
}
else if (it->alignment == OSD_ALIGNMENT_BOTTOM_RIGHT)
{
pt.x = width - textSize.width - it->x * ratio;
pt.y = height - it->y * ratio - textSize.height - baseline;
}
else if (it->alignment == OSD_ALIGNMENT_BOTTOM_LEFT)
{
pt.x = it->x * ratio;
pt.y = height - it->y * ratio - textSize.height - baseline;
}
// cv::Rect rc(pt.x, pt.y, textSize.width, textSize.height);
// cv::rectangle(mat, rc, cv::Scalar(0,255,255), 2);
DrawOutlineText(ft2, mat, it->text, pt, fontSize, scalarWhite, thickness);
}
std::vector<int> params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
params.push_back((int)((uint32_t)photoInfo.quality));
bool res = false;
std::string fullPath = endsWith(path, ".jpg") ? path : (path + CTerminal::BuildPhotoFileName(photoInfo));
if (!std::filesystem::exists(std::filesystem::path(fullPath)))
{
bool res = cv::imwrite(fullPath.c_str(), mat, params);
if (!res)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to Write File: %s", fullPath.c_str() + m_appPath.size());
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "Succeeded to Write File: %s", fullPath.c_str() + m_appPath.size());
}
TakePhotoCb(res ? 2 : 0, photoInfo, fullPath, photoInfo.photoTime, objs);
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "Photo File Exists: %s", fullPath.c_str() + m_appPath.size());
} }
return res; return res;
@ -1851,7 +2398,7 @@ bool CPhoneDevice::OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat,
else else
{ {
std::vector<IDevice::RECOG_OBJECT> objs; std::vector<IDevice::RECOG_OBJECT> objs;
TakePhotoCb(result, mPhotoInfo, "", time(NULL), objs); TakePhotoCb(0, mPhotoInfo, "", time(NULL), objs);
CPhoneCamera* pCamera = mCamera; CPhoneCamera* pCamera = mCamera;
mCamera = NULL; mCamera = NULL;
@ -1885,7 +2432,7 @@ bool CPhoneDevice::OnVideoReady(bool photoOrVideo, bool result, const char* path
{ {
std::rename(path, fullPath.c_str()); std::rename(path, fullPath.c_str());
} }
TakePhotoCb(result, mPhotoInfo, fullPath, time(NULL), objs); TakePhotoCb(result ? 3 : 0, mPhotoInfo, fullPath, time(NULL), objs);
bool turnOffOtg = (mPhotoInfo.usbCamera != 0); bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
@ -1907,7 +2454,7 @@ void CPhoneDevice::onError(const std::string& msg)
CPhoneCamera* pCamera = mCamera; CPhoneCamera* pCamera = mCamera;
mCamera = NULL; mCamera = NULL;
TakePhotoCb(false, mPhotoInfo, mPath, 0); TakePhotoCb(0, mPhotoInfo, mPath, 0);
bool turnOffOtg = (mPhotoInfo.usbCamera != 0); bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
@ -1926,7 +2473,7 @@ void CPhoneDevice::onDisconnected(ACameraDevice* device)
CPhoneCamera* pCamera = mCamera; CPhoneCamera* pCamera = mCamera;
mCamera = NULL; mCamera = NULL;
TakePhotoCb(false, mPhotoInfo, mPath, 0); TakePhotoCb(0, mPhotoInfo, mPath, 0);
bool turnOffOtg = (mPhotoInfo.usbCamera != 0); bool turnOffOtg = (mPhotoInfo.usbCamera != 0);
std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg); std::thread closeThread(&CPhoneDevice::CloseCamera2, this, pCamera, mPhotoInfo.photoId, turnOffOtg);
@ -2157,6 +2704,7 @@ int CPhoneDevice::GetWData(IDevice::WEATHER_INFO *weatherInfo)
return true; return true;
} }
#ifdef USING_N938 #ifdef USING_N938
bool CPhoneDevice::OpenSensors() bool CPhoneDevice::OpenSensors()
{ {
@ -2235,4 +2783,4 @@ bool CPhoneDevice::CloseSensors()
{ {
return false; return false;
} }
#endif #endif

@ -156,12 +156,12 @@ public:
class CPhoneCamera : public NdkCamera class CPhoneCamera : public NdkCamera
{ {
public: public:
CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params, int burstCaptures = 1); CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params);
virtual ~CPhoneCamera(); virtual ~CPhoneCamera();
virtual bool on_image(cv::Mat& rgb); virtual bool on_image(cv::Mat& rgb);
virtual void on_error(const std::string& msg); virtual void on_error(const std::string& msg);
virtual void onDisconnected(ACameraDevice* device); virtual void onDisconnected(ACameraDevice* device);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, const std::vector<std::shared_ptr<ACameraMetadata> >& results, const std::vector<std::shared_ptr<AImage> >& frames); virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
protected: protected:
CPhoneDevice* m_dev; CPhoneDevice* m_dev;
@ -174,7 +174,7 @@ public:
virtual void onImageAvailable(AImageReader* reader); virtual void onImageAvailable(AImageReader* reader);
virtual int32_t getOutputFormat() const; virtual int32_t getOutputFormat() const;
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, const std::vector<std::shared_ptr<ACameraMetadata> >& results, const std::vector<std::shared_ptr<AImage> >& frames); virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
protected: protected:
std::string m_path; std::string m_path;
@ -250,7 +250,8 @@ protected:
bool SendBroadcastMessage(std::string action, int value); bool SendBroadcastMessage(std::string action, int value);
// bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_, // bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat);
inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const
{ {
if (m_listener != NULL) if (m_listener != NULL)
{ {
@ -259,13 +260,12 @@ protected:
return false; return false;
} }
inline bool TakePhotoCb(int result, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime) const
inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime) const
{ {
if (m_listener != NULL) if (m_listener != NULL)
{ {
std::vector<IDevice::RECOG_OBJECT> objects; std::vector<IDevice::RECOG_OBJECT> objects;
return m_listener->OnPhotoTaken(res, photoInfo, path, photoTime, objects); return m_listener->OnPhotoTaken(result, photoInfo, path, photoTime, objects);
} }
return false; return false;
@ -275,6 +275,7 @@ protected:
std::string QueryCpuTemperature(); std::string QueryCpuTemperature();
bool OnImageReady(cv::Mat& mat); bool OnImageReady(cv::Mat& mat);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
void onError(const std::string& msg); void onError(const std::string& msg);
void onDisconnected(ACameraDevice* device); void onDisconnected(ACameraDevice* device);

@ -17,6 +17,11 @@
#ifndef __CAMERA2_HELPER_H__ #ifndef __CAMERA2_HELPER_H__
#define __CAMERA2_HELPER_H__ #define __CAMERA2_HELPER_H__
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui.hpp>
#include "mat.h"
template <typename T> template <typename T>
class RangeValue { class RangeValue {
@ -103,4 +108,107 @@ private:
}; };
inline void ConvertYUV21ToMat(const uint8_t* nv21, int nv21_width, int nv21_height, int orgWidth, int orgHeight,
int sensorOrientation, bool front, int rotation, cv::Mat& rgb)
{
int w = 0;
int h = 0;
int rotate_type = 0;
cv::Mat nv21_rotated;
const unsigned char* yuv420data = nv21;
if (rotation != 0)
{
int co = 0;
if (front)
{
co = (sensorOrientation + (rotation - 1) * 90) % 360;
co = (360 - co) % 360;
}
else
{
co = (sensorOrientation - (rotation - 1) * 90 + 360) % 360;
}
// XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing);
// int co = 0;
if (co == 0)
{
w = nv21_width;
h = nv21_height;
rotate_type = front ? 2 : 1;
}
else if (co == 90)
{
w = nv21_height;
h = nv21_width;
int tmp = orgWidth;
orgWidth = orgHeight;
orgHeight = tmp;
rotate_type = front ? 5 : 6;
}
else if (co == 180)
{
w = nv21_width;
h = nv21_height;
rotate_type = front ? 4 : 3;
}
else if (co == 270)
{
w = nv21_height;
h = nv21_width;
int tmp = orgWidth;
orgWidth = orgHeight;
orgHeight = tmp;
rotate_type = front ? 7 : 8;
}
nv21_rotated.create(h + h / 2, w, CV_8UC1);
ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type);
yuv420data = nv21_rotated.data;
}
else
{
w = nv21_width;
h = nv21_height;
}
// nv21_rotated to rgb
if (w == orgWidth && h == orgHeight)
{
rgb.create(h, w, CV_8UC3);
// ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data);
ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data);
}
else
{
cv::Mat org(h, w, CV_8UC3);
ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data);
if (w * orgHeight == h * orgWidth) // Same Ratio
{
cv::resize(org, rgb, cv::Size(orgWidth, orgHeight));
}
else
{
// Crop image
if (w > orgWidth && h >= orgHeight)
{
int left = (w - orgWidth) / 2;
int top = (h - orgHeight) / 2;
rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth));
}
else
{
rgb = org;
}
}
}
}
#endif /* __CAMERA2_HELPER_H__ */ #endif /* __CAMERA2_HELPER_H__ */

@ -1,3 +1,4 @@
/* /*
* Copyright (C) 2017 The Android Open Source Project * Copyright (C) 2017 The Android Open Source Project
* *

@ -100,7 +100,7 @@ void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureR
((NdkCamera*)context)->onCaptureCompleted(session, request, result); ((NdkCamera*)context)->onCaptureCompleted(session, request, result);
} }
NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params, int burstCaptures) : mBurstCaptures(burstCaptures) NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params)
{ {
camera_facing = 0; camera_facing = 0;
camera_orientation = 0; camera_orientation = 0;
@ -153,7 +153,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
lightDetected = false; lightDetected = false;
mResult = { 0 }; mResult = { 0 };
mResult.avgY = ~0; mLdr = ~0;
} }
NdkCamera::~NdkCamera() NdkCamera::~NdkCamera()
@ -567,6 +567,12 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACaptureSessionOutputContainer_create(&capture_session_output_container); status = ACaptureSessionOutputContainer_create(&capture_session_output_container);
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
{
burstCaptures = 1;
}
// setup imagereader and its surface // setup imagereader and its surface
media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, 5, &mPreviewImageReader); media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, 5, &mPreviewImageReader);
if (mstatus == AMEDIA_OK) if (mstatus == AMEDIA_OK)
@ -579,7 +585,7 @@ int NdkCamera::open(const std::string& cameraId) {
ANativeWindow_acquire(mPreviewImageWindow); ANativeWindow_acquire(mPreviewImageWindow);
} }
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), mBurstCaptures, &mImageReader); mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures, &mImageReader);
if (mstatus == AMEDIA_OK) if (mstatus == AMEDIA_OK)
{ {
AImageReader_ImageListener listener; AImageReader_ImageListener listener;
@ -593,16 +599,19 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget); status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget);
status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget); status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget);
for (int idx = 0; idx <= mBurstCaptures; idx++)
for (int idx = 0; idx <= burstCaptures; idx++)
{ {
CaptureRequest *request = new CaptureRequest(); CaptureRequest *request = new CaptureRequest();
std::memset(request, 0, sizeof(CaptureRequest)); std::memset(request, 0, sizeof(CaptureRequest));
bool isPreviewReqest = (idx == PREVIEW_REQUEST_IDX);
request->pThis = this; request->pThis = this;
request->imageReader = (idx == PREVIEW_REQUEST_IDX) ? mPreviewImageReader : mImageReader; request->imageReader = isPreviewReqest ? mPreviewImageReader : mImageReader;
request->imageWindow = (idx == PREVIEW_REQUEST_IDX) ? mPreviewImageWindow : mImageWindow; request->imageWindow = isPreviewReqest ? mPreviewImageWindow : mImageWindow;
request->imageTarget = (idx == PREVIEW_REQUEST_IDX) ? mPreviewOutputTarget : mOutputTarget; request->imageTarget = isPreviewReqest ? mPreviewOutputTarget : mOutputTarget;
request->templateId = (idx == PREVIEW_REQUEST_IDX) ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; request->templateId = isPreviewReqest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
mCaptureRequests.push_back(request); mCaptureRequests.push_back(request);
@ -709,27 +718,30 @@ int NdkCamera::open(const std::string& cameraId) {
} }
} }
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) if (isPreviewReqest)
{ {
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); {
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE");
mResult.aeLockSetted = 1; mResult.aeLockSetted = 1;
} }
else else
{ {
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported");
} }
uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status);
m_precaptureStartTime = m_startTime; m_precaptureStartTime = m_startTime;
// ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff);
}
} }
else else
{ {
@ -818,6 +830,8 @@ void NdkCamera::close()
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str()); XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str());
camera_status_t res = ACAMERA_OK; camera_status_t res = ACAMERA_OK;
mCaptureFrames.clear();
if ((ACameraManager *)camera_manager != NULL) if ((ACameraManager *)camera_manager != NULL)
{ {
// res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb); // res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb);
@ -928,22 +942,22 @@ void NdkCamera::close()
void NdkCamera::onImageAvailable(AImageReader* reader) void NdkCamera::onImageAvailable(AImageReader* reader)
{ {
AImage* image = 0; AImage* image = 0;
media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image); media_status_t mstatus = AMEDIA_OK;
if (mstatus != AMEDIA_OK)
{
// error
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus);
}
return;
}
if (reader == mPreviewImageReader) if (reader == mPreviewImageReader)
{ {
if (mResult.avgY == ~0) mstatus = AImageReader_acquireLatestImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus);
}
return;
}
if (mLdr == ~0)
{ {
uint8_t* y_data = 0; uint8_t* y_data = 0;
int y_len = 0; int y_len = 0;
@ -955,54 +969,34 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif #endif
avgY = avgY / (uint64_t)y_len; avgY = avgY / (uint64_t)y_len;
mResult.avgY = avgY; mLdr = avgY;
mFinalResult.avgY = avgY;
} }
AImage_delete(image); AImage_delete(image);
return; return;
} }
else
#if 0
if (!lightDetected)
{ {
AImage_getPlaneData(image, 0, &y_data, &y_len); while (1)
lightDetected = true;
#if 1
if (avgY < 50)
{ {
if (m_params.autoExposure) mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{ {
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; // https://stackoverflow.com/questions/67063562
camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY); if (mCaptureFrames.size() < m_params.burstCaptures)
status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); {
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus);
int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY); }
status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime); }
break;
XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY,
mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity);
} }
AImage_delete(image);
return; m_photoTaken = true;
mCaptureFrames.push_back(std::shared_ptr<AImage>(image, AImage_delete));
} }
#endif
} }
#endif
m_photoTaken = true;
XYLOG(XYLOG_SEVERITY_INFO, "Photo Taken: AES=%u AFS=%u AWBS=%u", (uint32_t)mFinalResult.aeState, (uint32_t)mFinalResult.awbState, (uint32_t)mFinalResult.afState);
mFinalResult.duration = GetMicroTimeStamp() - m_startTime;
mCaptureFrames.push_back(std::shared_ptr<AImage>(image, AImage_delete));
} }
void NdkCamera::on_error(const std::string& msg) void NdkCamera::on_error(const std::string& msg)
@ -1011,7 +1005,6 @@ void NdkCamera::on_error(const std::string& msg)
void NdkCamera::onDisconnected(ACameraDevice* device) void NdkCamera::onDisconnected(ACameraDevice* device)
{ {
} }
bool NdkCamera::on_image(cv::Mat& rgb) bool NdkCamera::on_image(cv::Mat& rgb)
@ -1019,7 +1012,7 @@ bool NdkCamera::on_image(cv::Mat& rgb)
return false; return false;
} }
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, const std::vector<std::shared_ptr<ACameraMetadata> >& results, const std::vector<std::shared_ptr<AImage> >& frames) bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
{ {
return false; return false;
} }
@ -1146,46 +1139,23 @@ void NdkCamera::onSessionReady(ACameraCaptureSession *session)
{ {
if (m_photoTaken) if (m_photoTaken)
{ {
AASSERT(mCaptureFrames.size() == mCaptureResults.size(), "Frame size %u doesn't equal to result size %u", for (int idx = 0; idx < 10; idx++)
(uint32_t)mCaptureFrames.size(), (uint32_t)mCaptureResults.size());
#ifndef NDEBUG
for (int idx = 0; idx < mCaptureFrames.size(); idx++)
{ {
std::shared_ptr<AImage> spImage = mCaptureFrames[idx]; if (mCaptureFrames.size() >= m_params.burstCaptures && mCaptureResults.size() >= m_params.burstCaptures)
int32_t format;
AImage_getFormat(spImage.get(), &format);
if (format == AIMAGE_FORMAT_YUV_420_888)
{ {
break;
} }
else std::this_thread::sleep_for(std::chrono::milliseconds(16));
{
ALOGW("Capture Available TID=%lld", (long long)getThreadIdOfULL());
uint32_t frameNumber = mFrameNumber.fetch_add(1);
std::string path = "/sdcard/com.xypower.mpapp/tmp/" + std::to_string(frameNumber);
if (format == AIMAGE_FORMAT_JPEG)
{
path += ".jpg";
writeJpegFile(spImage.get(), path.c_str());
}
else
{
path += ".dng";
writeRawFile(spImage.get(), mCharacteristics.get(), mCaptureResults[idx].get(), path.c_str());
}
}
} }
#endif // NDEBUG AASSERT(mCaptureFrames.size() == mCaptureResults.size(), "Frame size %u doesn't equal to result size %u",
(uint32_t)mCaptureFrames.size(), (uint32_t)mCaptureResults.size());
onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames);
} }
} }
void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{ {
} }
void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
@ -1448,9 +1418,13 @@ bool NdkCamera::IsCameraAvailable(const std::string& cameraId)
int32_t NdkCamera::getOutputFormat() const int32_t NdkCamera::getOutputFormat() const
{ {
return AIMAGE_FORMAT_YUV_420_888; return m_params.burstRawCapture ? AIMAGE_FORMAT_RAW16 : AIMAGE_FORMAT_YUV_420_888;
} }
int32_t NdkCamera::getBurstCaptures() const
{
return m_params.burstRawCapture ? m_params.burstCaptures : 1;
}
void NdkCamera::CreateSession(ANativeWindow* previewWindow, void NdkCamera::CreateSession(ANativeWindow* previewWindow,
ANativeWindow* jpgWindow, bool manualPreview, ANativeWindow* jpgWindow, bool manualPreview,
@ -1582,8 +1556,6 @@ void NdkCamera::writeJpegFile(AImage *image, const char* path)
void NdkCamera::writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path) void NdkCamera::writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path)
{ {
// dngCreator. // dngCreator.
int32_t width; int32_t width;
int32_t height; int32_t height;
@ -1639,9 +1611,9 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt
AImage_getPlaneRowStride(image, 1, &u_rowStride); AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride); AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t* y_data = 0; uint8_t *y_data = 0;
uint8_t* u_data = 0; uint8_t *u_data = 0;
uint8_t* v_data = 0; uint8_t *v_data = 0;
int y_len = 0; int y_len = 0;
int u_len = 0; int u_len = 0;
int v_len = 0; int v_len = 0;
@ -1649,23 +1621,20 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt
AImage_getPlaneData(image, 1, &u_data, &u_len); AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len); AImage_getPlaneData(image, 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 &&
{ u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width &&
v_rowStride == width) {
// already nv21 :) // already nv21 :)
// on_image((unsigned char*)y_data, (int)width, (int)height); // on_image((unsigned char*)y_data, (int)width, (int)height);
} } else {
else
{
// construct nv21 // construct nv21
unsigned char* nv21 = new unsigned char[width * height + width * height / 2]; unsigned char *nv21 = new unsigned char[width * height + width * height / 2];
{ {
// Y // Y
unsigned char* yptr = nv21; unsigned char *yptr = nv21;
for (int y = 0; y < height; y++) for (int y = 0; y < height; y++) {
{ const unsigned char *y_data_ptr = y_data + y_rowStride * y;
const unsigned char* y_data_ptr = y_data + y_rowStride * y; for (int x = 0; x < width; x++) {
for (int x = 0; x < width; x++)
{
yptr[0] = y_data_ptr[0]; yptr[0] = y_data_ptr[0];
yptr++; yptr++;
y_data_ptr += y_pixelStride; y_data_ptr += y_pixelStride;
@ -1673,13 +1642,11 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt
} }
// UV // UV
unsigned char* uvptr = nv21 + width * height; unsigned char *uvptr = nv21 + width * height;
for (int y = 0; y < height / 2; y++) for (int y = 0; y < height / 2; y++) {
{ const unsigned char *v_data_ptr = v_data + v_rowStride * y;
const unsigned char* v_data_ptr = v_data + v_rowStride * y; const unsigned char *u_data_ptr = u_data + u_rowStride * y;
const unsigned char* u_data_ptr = u_data + u_rowStride * y; for (int x = 0; x < width / 2; x++) {
for (int x = 0; x < width / 2; x++)
{
uvptr[0] = v_data_ptr[0]; uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0]; uvptr[1] = u_data_ptr[0];
uvptr += 2; uvptr += 2;
@ -1689,8 +1656,71 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt
} }
} }
// on_image((unsigned char*)nv21, (int)width, (int)height); // on_image((unsigned ch
}
}
delete[] nv21; void NdkCamera::EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult)
{
camera_status_t status = ACAMERA_ERROR_BASE;
ACameraMetadata_const_entry val = { 0 };
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val);
captureResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val);
captureResult.awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val);
captureResult.afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val);
int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1;
captureResult.exposureTime = exTime;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_MODE, &val);
captureResult.autoFocus = (status == ACAMERA_OK) ? *(val.data.u8) : 0;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_MODE, &val);
uint8_t aeMode = (status == ACAMERA_OK) ? val.data.u8[0] : 0;
captureResult.autoExposure = aeMode;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_FRAME_DURATION, &val);
int64_t frameDuration = (status == ACAMERA_OK) ? val.data.i64[0] : 0;
captureResult.frameDuration = frameDuration;
val = { 0 };
float focusDistance = NAN;
status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val);
if (status == ACAMERA_OK)
{
focusDistance = *val.data.f;
}
captureResult.FocusDistance = focusDistance;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_ZOOM_RATIO, &val);
if (status == ACAMERA_OK)
{
captureResult.zoomRatio = *val.data.f;
} }
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val);
captureResult.sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_SCENE_MODE, &val);
captureResult.sceneMode = (status == ACAMERA_OK) ? *(val.data.u8) : 0;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val);
captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
} }

@ -81,6 +81,7 @@ public:
unsigned int orientation:3; unsigned int orientation:3;
unsigned int zoom : 1; unsigned int zoom : 1;
unsigned int wait3ALocked : 3; unsigned int wait3ALocked : 3;
unsigned int burstRawCapture : 1;
unsigned int reserved : 18; unsigned int reserved : 18;
int64_t exposureTime; int64_t exposureTime;
unsigned int sensitivity; unsigned int sensitivity;
@ -88,6 +89,7 @@ public:
float zoomRatio; float zoomRatio;
uint8_t requestTemplate; uint8_t requestTemplate;
uint8_t awbMode; uint8_t awbMode;
uint8_t burstCaptures;
unsigned short focusTimeout; // milli-seconds 65535 unsigned short focusTimeout; // milli-seconds 65535
}; };
@ -135,7 +137,7 @@ public:
int sequenceId; int sequenceId;
}; };
NdkCamera(int32_t width, int32_t height, const CAMERA_PARAMS& params, int burstCaptures = 1); NdkCamera(int32_t width, int32_t height, const CAMERA_PARAMS& params);
virtual ~NdkCamera(); virtual ~NdkCamera();
// facing 0=front 1=back // facing 0=front 1=back
@ -143,13 +145,14 @@ public:
void close(); void close();
int selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY); int selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY);
void writeJpegFile(AImage *image, const char* path); static void writeJpegFile(AImage *image, const char* path);
void writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path); static void writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path);
void onAvailabilityCallback(const char* cameraId); void onAvailabilityCallback(const char* cameraId);
void onUnavailabilityCallback(const char* cameraId); void onUnavailabilityCallback(const char* cameraId);
virtual void onImageAvailable(AImageReader* reader); virtual void onImageAvailable(AImageReader* reader);
virtual int32_t getOutputFormat() const; virtual int32_t getOutputFormat() const;
virtual int32_t getBurstCaptures() const;
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height); void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height);
void CreateSession(ANativeWindow* previewWindow); void CreateSession(ANativeWindow* previewWindow);
@ -160,7 +163,7 @@ public:
virtual void on_error(const std::string& msg); virtual void on_error(const std::string& msg);
virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height); virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height);
virtual void onDisconnected(ACameraDevice* device); virtual void onDisconnected(ACameraDevice* device);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, const std::vector<std::shared_ptr<ACameraMetadata> >& results, const std::vector<std::shared_ptr<AImage> >& frames); virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
void onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result); void onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result);
void onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result); void onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result);
@ -168,14 +171,15 @@ public:
void onSessionReady(ACameraCaptureSession *session); void onSessionReady(ACameraCaptureSession *session);
void onError(ACameraDevice* device, int error); void onError(ACameraDevice* device, int error);
const CAPTURE_RESULT& getCaptureResult() const uint32_t GetLdr() const
{ {
return mFinalResult; return mLdr;
} }
bool IsCameraAvailable(const std::string& cameraId); bool IsCameraAvailable(const std::string& cameraId);
static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height); static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height);
static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult);
protected: protected:
std::mutex m_locker; std::mutex m_locker;
@ -183,10 +187,8 @@ protected:
std::atomic<uint32_t> mFrameNumber; std::atomic<uint32_t> mFrameNumber;
protected: protected:
CAMERA_PARAMS m_params; CAMERA_PARAMS m_params;
int mBurstCaptures;
int camera_facing; int camera_facing;
int camera_orientation; int camera_orientation;
bool m_firstFrame; bool m_firstFrame;
@ -218,7 +220,6 @@ protected:
bool mCaptureTriggered; bool mCaptureTriggered;
CAPTURE_RESULT mResult; CAPTURE_RESULT mResult;
CAPTURE_RESULT mFinalResult;
unsigned long long m_startTime; unsigned long long m_startTime;
protected: protected:
@ -241,7 +242,9 @@ protected:
std::shared_ptr<ACameraMetadata> mCharacteristics; std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests; std::vector<CaptureRequest*> mCaptureRequests;
std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults; std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
uint32_t mLdr;
std::vector<std::shared_ptr<AImage> > mCaptureFrames; std::vector<std::shared_ptr<AImage> > mCaptureFrames;
ACameraCaptureSession* capture_session; ACameraCaptureSession* capture_session;

Loading…
Cancel
Save