调整回预览+静态抓拍模式

针对iso为200-300之间的粉色问题,做了补丁,强制走预览模式,临时绕过这个问题
nx2024
Matthew 2 months ago
parent c5dadae819
commit 0639f23f3f

@ -32,22 +32,54 @@
#include "mtk_platform_metadata_tag.h"
#include "mtk_metadata_tag.h"
#define MTK_MFBMODE_TAG "com.mediatek.mfbmode"
#define MTK_MFNR_ENABLE_TAG "com.mediatek.3afeature.mfnrenable"
#define MTK_VENDOR_TAG_SECTION 0x8000
#define MTK_MFNR_FEATURE 4
#define MTK_MFNR_FEATURE_START ((MTK_MFNR_FEATURE + MTK_VENDOR_TAG_SECTION) << 16)
#define MTK_MFNR_FEATURE_MFB_MODE MTK_MFNR_FEATURE_START
#define MTK_MFNR_FEATURE_MFB_RESULT (MTK_MFNR_FEATURE_START + 1)
#define MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES (MTK_MFNR_FEATURE_START + 2)
void saveYuvToFile(AImage* image, const std::string& filePath) {
#define MTK_MFNR_FEATURE_MFB_AUTO 0xFF
int32_t width, height;
AImage_getWidth(image, &width);
AImage_getHeight(image, &height);
// 获取 YUV 数据
uint8_t* yPlane = nullptr;
uint8_t* uPlane = nullptr;
uint8_t* vPlane = nullptr;
int yLength, uLength, vLength;
AImage_getPlaneData(image, 0, &yPlane, &yLength); // Y 分量
AImage_getPlaneData(image, 1, &uPlane, &uLength); // U 分量
AImage_getPlaneData(image, 2, &vPlane, &vLength); // V 分量
int32_t yStride, uStride, vStride;
AImage_getPlaneRowStride(image, 0, &yStride); // Y 分量的 Stride
AImage_getPlaneRowStride(image, 1, &uStride); // U 分量的 Stride
AImage_getPlaneRowStride(image, 2, &vStride); // V 分量的 Stride
// 打开文件
std::ofstream file(filePath, std::ios::binary);
if (!file.is_open()) {
// 文件打开失败
return;
}
// 写入 Y 分量(逐行复制,处理 Stride
for (int i = 0; i < height; i++) {
file.write(reinterpret_cast<const char*>(yPlane + i * yStride), width);
}
#define MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR 1
// 写入 U 分量(逐行复制,处理 Stride
for (int i = 0; i < height / 2; i++) {
file.write(reinterpret_cast<const char*>(uPlane + i * uStride), width / 2);
}
// #define MTK_3A_MFNR_ENABLE "com.mediatek.3afeature.mfnrenable"
#define MTK_3A_MFNR_INTENSITY_TAG "com.mediatek.3afeature.mfnrintensity"
// 写入 V 分量(逐行复制,处理 Stride
for (int i = 0; i < height / 2; i++) {
file.write(reinterpret_cast<const char*>(vPlane + i * vStride), width / 2);
}
// 关闭文件
file.close();
}
#ifdef _DEBUG
@ -225,16 +257,22 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
camera_device = 0;
mImageReader = NULL;
mImageWindow = NULL;
mOutputTarget = NULL;
mSessionOutput = NULL;
capture_session_output_container = 0;
capture_session = 0;
lightDetected = false;
mStableFrameCount = 0;
mResult = { 0 };
mLdr = ~0;
mFinalLdr = 0;
mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888;
m_delayFrames = 0;
}
NdkCamera::~NdkCamera()
@ -692,7 +730,7 @@ int NdkCamera::open(const std::string& cameraId) {
}
// setup imagereader and its surface
media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, burstCaptures + 1, &mPreviewImageReader);
media_status_t mstatus = AImageReader_new(previewWidth, previewHeight, AIMAGE_FORMAT_YUV_420_888, 4, &mPreviewImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new preview, status=%d", status);
if (mstatus == AMEDIA_OK)
{
@ -708,13 +746,26 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput);
CaptureRequest *request = CreateRequest(true);
if (m_params.burstRawCapture == 2)
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 1, &mImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new, status=%d", status);
if (mstatus == AMEDIA_OK)
{
SetupMFNR(mCharacteristics.get(), request->request);
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mImageReader, &listener);
mstatus = AImageReader_getWindow(mImageReader, &mImageWindow);
ANativeWindow_acquire(mImageWindow);
}
status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraOutputTarget_create, status=%d", status);
status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput);
AASSERT(status == ACAMERA_OK, "Failed to call ACaptureSessionOutput_create, status=%d", status);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput);
CaptureRequest *request = CreateRequest(true);
mCaptureRequests.push_back(request);
// capture session
@ -747,7 +798,7 @@ int NdkCamera::open(const std::string& cameraId) {
return status == ACAMERA_OK ? 0 : 1;
}
NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest, int32_t sensitivity/* = -1*/)
{
camera_status_t status = ACAMERA_OK;
@ -755,12 +806,22 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
std::memset(request, 0, sizeof(CaptureRequest));
request->pThis = this;
request->imageReader = mPreviewImageReader;
request->imageWindow = mPreviewImageWindow;
request->imageTarget = mPreviewOutputTarget;
request->sessionOutput = mPreviewSessionOutput;
// request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
request->templateId = (ACameraDevice_request_template)m_params.requestTemplate;
request->imageReader = isPreviewRequest ? mPreviewImageReader : mImageReader;
request->imageWindow = isPreviewRequest ? mPreviewImageWindow : mImageWindow;
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget;
request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : mSessionOutput;
request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
// request->templateId = (ACameraDevice_request_template)m_params.requestTemplate;
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : GetCaptureIntent((ACameraDevice_request_template)m_params.requestTemplate);
if (!isPreviewRequest && sensitivity >= 200 && sensitivity <= 300 && (m_params.burstRawCapture == 2 || m_params.burstRawCapture == 3))
{
if (request->templateId == TEMPLATE_STILL_CAPTURE)
{
request->templateId = TEMPLATE_PREVIEW;
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW;
}
}
// capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
@ -771,8 +832,6 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
// uint8_t captureIntent = GetCaptureIntent((ACameraDevice_request_template)m_params.requestTemplate);
uint8_t captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
@ -874,6 +933,8 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
}
}
if (isPreviewRequest)
{
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON;
@ -896,6 +957,7 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
// ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff);
}
}
else
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
@ -949,6 +1011,35 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
// status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput);
// status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput);
if (!isPreviewRequest)
{
#if 0
uint8_t colorMode = ACAMERA_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_COLOR_CORRECTION_MODE, 1, &colorMode);
// 设置均衡的RGGB增益
float rggbGains[4] = {1.0f, 1.0f, 1.0f, 1.0f};
status = ACaptureRequest_setEntry_float(request->request, ACAMERA_COLOR_CORRECTION_GAINS, 4, rggbGains);
// 设置单位色彩变换矩阵
float colorMatrix[9] = {
1.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 1.0f
};
status = ACaptureRequest_setEntry_float(request->request, ACAMERA_COLOR_CORRECTION_TRANSFORM, 9, colorMatrix);
#endif
if (m_params.burstRawCapture == 2)
{
SetupMFNR(mCharacteristics.get(), request->request, false, sensitivity);
}
else if (m_params.burstRawCapture == 3)
{
SetupMFNR(mCharacteristics.get(), request->request, true, sensitivity);
}
}
return request;
}
@ -977,6 +1068,8 @@ void NdkCamera::close()
}
*/
mPreviewResults.reset();
mCaptureResults.clear();
mCaptureFrames.clear();
mCaptureResultMap.clear();
@ -1049,6 +1142,42 @@ void NdkCamera::close()
ALOGD("After Free mPreviewImageReader");
#endif
}
if (mOutputTarget != NULL)
{
ACameraOutputTarget_free(mOutputTarget);
mOutputTarget = 0;
}
if (mImageWindow != NULL)
{
ANativeWindow_release(mImageWindow);
mImageWindow = 0;
}
if (mImageReader != NULL)
{
#ifdef _DEBUG
ALOGD("Will Free mImageReader");
#endif
AImage* image = NULL;
media_status_t mstatus;
while ((mstatus = AImageReader_acquireNextImage(mImageReader, &image)) == AMEDIA_OK)
{
AImage_delete(image);
image = NULL;
}
AImageReader_setImageListener(mImageReader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mImageReader = 0;
#ifdef _DEBUG
ALOGD("After Free mImageReader");
#endif
}
if (mPreviewSessionOutput != NULL)
{
if (capture_session_output_container)
@ -1059,6 +1188,16 @@ void NdkCamera::close()
mPreviewSessionOutput = 0;
}
if (mSessionOutput != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput);
}
ACaptureSessionOutput_free(mSessionOutput);
mSessionOutput = 0;
}
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_free(capture_session_output_container);
@ -1080,9 +1219,22 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
{
AImage* image = 0;
media_status_t mstatus = AMEDIA_OK;
int64_t minTs;
#if 0
if (reader == mPreviewImageReader)
{
mstatus = AImageReader_acquireLatestImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Preview AImageReader_acquireLatestImage error: %d", mstatus);
}
return;
}
if (!mCaptureTriggered)
{
uint8_t* y_data = 0;
int y_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
@ -1093,11 +1245,25 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif
avgY = avgY / (uint64_t)y_len;
#endif
m_locker.lock();
mLdr = (uint8_t)avgY;
m_locker.unlock();
}
AImage_delete(image);
return;
}
else
{
uint32_t burstCaptures = getBurstCaptures();
uint32_t numberOfFrames = 0;
uint64_t ts = GetMicroTimeStamp();
size_t expectedTimes = mCaptureRequests.size() - 1;
if (burstCaptures == 0)
{
burstCaptures = 1;
}
if (true)
{
while (1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
@ -1114,34 +1280,6 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
break;
}
m_locker.lock();
minTs = m_minTimestamp;
numberOfFrames = mOneFrame.size();
m_locker.unlock();
if (minTs == 0 || numberOfFrames >= burstCaptures)
{
AImage_delete(image);
continue;
}
int64_t frameTs = 0;
mstatus = AImage_getTimestamp(image, &frameTs);
bool valid = false;
if (frameTs >= minTs)
{
m_locker.lock();
auto it = mCaptureResultMap.find(frameTs);
valid = (it != mCaptureResultMap.end());
m_locker.unlock();
}
if (!valid)
{
AImage_delete(image);
continue;
}
int32_t format;
mstatus = AImage_getFormat(image, &format);
@ -1226,21 +1364,34 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
}
m_photoTaken = true;
int64_t frameTs = 0;
mstatus = AImage_getTimestamp(image, &frameTs);
AImage_delete(image);
bool captureCompleted = false;
bool captureDispatchable = false;
m_locker.lock();
if (!frame.empty())
{
mOneFrame.push_back(std::make_pair<>(frameTs, frame));
numberOfFrames = mOneFrame.size();
}
if (numberOfFrames >= burstCaptures)
if (mOneFrame.size() >= expectedTimes)
{
bool allExisted = true;
for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame)
{
if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend())
{
allExisted = false;
break;
}
}
if (allExisted)
{
captureCompleted = true;
}
}
if (captureCompleted && !mCaptureDispatched)
{
@ -1253,11 +1404,54 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
{
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable");
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
uint64_t ts = GetMicroTimeStamp();
FireOneCapture(ts);
// onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame);
break;
}
}
}
else
{
while (1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus);
}
}
break;
}
m_photoTaken = true;
m_locker.lock();
mCaptureFrames.push_back(std::shared_ptr<AImage>(image, Auto_AImage_delete));
m_locker.unlock();
break;
ALOGD("Capture Image Received");
}
bool captureCompleted = false;
bool captureDispatchable = false;
m_locker.lock();
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
FireBurstCapture();
}
}
}
}
@ -1418,56 +1612,27 @@ void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequ
void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
camera_status_t status = ACAMERA_ERROR_BASE;
m_delayFrames++;
if (m_delayFrames <= 4)
{
if (m_delayFrames == 4)
{
uint8_t captureIntent = GetCaptureIntent((ACameraDevice_request_template)m_params.requestTemplate);
if (captureIntent != ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW)
{
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
void* context = NULL;
ACaptureRequest_getUserContext(request, &context);
CaptureRequest* pCaptureRequest = reinterpret_cast<CaptureRequest *>(context);
if (m_params.burstRawCapture == 2 || m_params.burstRawCapture == 3)
if (pCaptureRequest->request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request)
{
int32_t stillCaptureHint = 1;
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &stillCaptureHint);
}
}
mCaptureTriggered = true;
}
return;
}
m_locker.lock();
int64_t minTs = m_minTimestamp;
m_locker.unlock();
int64_t resultTimestamp = GetTimestamp(result);
if (minTs > 0)
if (mCaptureTriggered)
{
uint32_t burstCaptures = getBurstCaptures();
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
mCaptureResultMap[resultTimestamp] = captureResult;
return;
}
void* context = NULL;
ACaptureRequest_getUserContext(request, &context);
CaptureRequest* pCaptureRequest = reinterpret_cast<CaptureRequest *>(context);
bool readyForCapture = true;
uint64_t ts = GetMicroTimeStamp();
camera_status_t status = ACAMERA_ERROR_BASE;
unsigned long long ts = GetMicroTimeStamp();
uint8_t aeState = ACAMERA_CONTROL_AE_STATE_INACTIVE;
uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE;
uint8_t afState = ACAMERA_CONTROL_AF_STATE_INACTIVE;
int32_t sensitivity = -1;
ACameraMetadata_const_entry val = { 0 };
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val);
@ -1481,6 +1646,11 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val);
afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val);
sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : -1;
// XYLOG(XYLOG_SEVERITY_DEBUG, "Preview State AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime));
// Check if timeout
@ -1598,9 +1768,19 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
#endif
}
if (readyForCapture/* && mCaptureRequests.size() > 1*/)
if (readyForCapture)
{
mStableFrameCount++;
if (mStableFrameCount >= 3) { // 确保连续3帧稳定
// 进行实际的静态抓拍
mStableFrameCount = 0;
} else {
readyForCapture = false; // 继续等待
}
}
if (readyForCapture/* && mCaptureRequests.size() > 1*/)
{
// Must update mFinalLdr As getBurstCaptures getOutputFormat depends mFinalLdr
if (mLdr != ~0)
{
@ -1612,11 +1792,17 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
uint32_t burstCaptures = getBurstCaptures();
std::vector<ACaptureRequest*> requests;
int sequenceId = 0;
requests.reserve(burstCaptures);
m_locker.lock();
m_minTimestamp = resultTimestamp;
m_locker.unlock();
for (int idx = 0; idx < burstCaptures; idx++)
{
CaptureRequest* request = CreateRequest(false, sensitivity);
mCaptureRequests.push_back(request);
// CopyPreviewRequest(mCaptureRequests[idx]->request, result);
requests.push_back(request->request);
}
#if 0
if (m_params.customHdr && burstCaptures > 1)
@ -1651,6 +1837,110 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
}
#endif
// ALOGW("Will Stop Repeating Request");
// status = ACameraCaptureSession_stopRepeating(capture_session);
// ALOGW("Finished Repeating Request");
ACameraCaptureSession_captureCallbacks capture_session_capture_cb;
capture_session_capture_cb.context = this;
capture_session_capture_cb.onCaptureStarted = 0;
capture_session_capture_cb.onCaptureProgressed = ::onCaptureProgressed;
capture_session_capture_cb.onCaptureCompleted = ::onCaptureCompleted;
capture_session_capture_cb.onCaptureFailed = ::onCaptureFailed;
capture_session_capture_cb.onCaptureSequenceCompleted = onCaptureSequenceCompleted;
capture_session_capture_cb.onCaptureSequenceAborted = onCaptureSequenceAborted;
capture_session_capture_cb.onCaptureBufferLost = 0;
status = ACameraCaptureSession_stopRepeating(capture_session);
std::this_thread::sleep_for(std::chrono::milliseconds(50));
int numberOfRequests = requests.size();
status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb,
numberOfRequests, &requests[0], &sequenceId);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_capture, status=%d", status);
ALOGW("Capture num = %d sequenceId=%d", numberOfRequests, sequenceId);
for (int idx = 1; idx < mCaptureRequests.size(); idx++)
{
mCaptureRequests[idx]->sessionSequenceId = sequenceId;
}
mCaptureTriggered = true;
}
}
else
{
#ifdef _DEBUG
uint64_t tid = getThreadIdOfULL();
ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid);
#endif
unsigned long long ts = GetMicroTimeStamp();
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
bool captureCompleted = false;
bool captureDispatchable = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
int64_t resultTimestamp = GetTimestamp(result);
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
if (true)
{
m_locker.lock();
mCaptureResults.push_back(captureResult);
mCaptureResultMap[resultTimestamp] = captureResult;
if (mOneFrame.size() >= expectedTimes)
{
bool allExisted = true;
for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame)
{
if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend())
{
allExisted = false;
break;
}
}
if (allExisted)
{
captureCompleted = true;
}
}
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onCaptureCompleted");
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
FireOneCapture(ts);
}
}
else
{
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes;
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
FireBurstCapture();
}
}
}
}
@ -1680,18 +1970,18 @@ void NdkCamera::FireOneCapture(uint64_t ts)
{
std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt;
size_t idx = std::distance(mOneFrame.cbegin(), it);
auto itResult = mCaptureResultMap.find(it->first);
std::shared_ptr<ACameraMetadata> result = mCaptureResults[idx];
CAPTURE_RESULT captureResult = { 0 };
EnumCameraResult(itResult->second.get(), captureResult);
EnumCameraResult(result.get(), captureResult);
fileName += "_" + mCameraId + "_" + std::to_string(captureResult.aeState) + "_" + std::to_string(idx) + ".jpg";
cv::imwrite(fileName, it->second, params);
}
}
#endif
auto it = mOneFrame.back();
auto itResult = mCaptureResultMap.find(it.first);
onOneCapture(mCharacteristics, itResult->second, mFinalLdr, ts - m_startTime, it.second);
onOneCapture(mCharacteristics, mCaptureResults.back(), mFinalLdr, ts - m_startTime, mOneFrame.back().second);
}
void NdkCamera::FireBurstCapture()
@ -1700,29 +1990,19 @@ void NdkCamera::FireBurstCapture()
unsigned long long ts = GetMicroTimeStamp();
size_t expectedTimes = mCaptureRequests.size() - 1;
size_t expectedTimes = getBurstCaptures();
std::vector<std::shared_ptr<ACameraMetadata> > captureResults;
uint32_t ldr;
std::vector<std::shared_ptr<AImage> > captureFrames;
m_locker.lock();
ldr = mFinalLdr;
if (ldr == 0 && mLdr != ~0)
{
ldr = mLdr;
}
captureResults.swap(mCaptureResults);
captureFrames.swap(mCaptureFrames);
for (auto it = captureFrames.cbegin(); it != captureFrames.cend(); ++it)
{
int64_t imgTs = 0;
AImage_getTimestamp(it->get(), &imgTs);
auto itResult = mCaptureResultMap.find(imgTs);
if (itResult != mCaptureResultMap.end())
{
captureResults.push_back(itResult->second);
}
}
m_locker.unlock();
media_status_t mstatus;
@ -1789,6 +2069,16 @@ void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetada
focusDistance = *val.data.f;
}
*/
// 添加AWB和色彩校正参数的复制
ACameraMetadata_const_entry entry;
if (ACameraMetadata_getConstEntry(previewResult, ACAMERA_COLOR_CORRECTION_GAINS, &entry) == ACAMERA_OK) {
ACaptureRequest_setEntry_float(request, ACAMERA_COLOR_CORRECTION_GAINS, entry.count, entry.data.f);
}
if (ACameraMetadata_getConstEntry(previewResult, ACAMERA_COLOR_CORRECTION_TRANSFORM, &entry) == ACAMERA_OK) {
ACaptureRequest_setEntry_float(request, ACAMERA_COLOR_CORRECTION_TRANSFORM, entry.count, entry.data.f);
}
}
void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
@ -2160,15 +2450,11 @@ void NdkCamera::EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captur
captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
}
void NdkCamera::SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request)
void NdkCamera::SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity)
{
// 1. 设置基础的相机参数
// ACaptureRequest_setEntry_i32(request, ACAMERA_CONTROL_MODE, 1, ACAMERA_CONTROL_MODE_AUTO);
camera_status_t status;
// __system_property_set("vendor.mfll.force", "1");
#if 0
int32_t tagCount = 0;
const uint32_t* tags = nullptr;
@ -2190,6 +2476,15 @@ void NdkCamera::SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* req
}
}
#endif
ACameraMetadata_const_entry entry = { 0 };
status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]);
}
}
uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode);
@ -2198,25 +2493,29 @@ void NdkCamera::SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* req
ALOGE("Failed to set ACAMERA_NOISE_REDUCTION_MODE, status: %d", status);
}
uint8_t reqRemosaicEnable = 1;
status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable);
// int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR;
// status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning);
uint8_t aeMode = MTK_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode);
// 2. 设置 MediaTek 特定的 MFNR 参数
// 使用 vendor tag 描述符
// int32_t mfbMode = MTK_MFNR_FEATURE_MFB_AUTO; // 1 Enable MFNR
int32_t mfbMode = 1; // 1 Enable MFNR
int32_t mfbMode = ais ? 2 : 1; // 1 Enable MFNR
uint8_t aeMode = MTK_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode);
status = ACaptureRequest_setEntry_i32(request, MTK_MFNR_FEATURE_MFB_MODE, 1, &mfbMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_MFNR_FEATURE_MFB_MODE, status: %d", status);
}
int32_t ispTuning = (mfbMode != 0) ? MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR : MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_DEFAULT_NONE;
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning);
uint8_t reqRemosaicEnable = (mfbMode != 0) ? 1 : 0;
status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_HAL_REQUEST_REMOSAIC_ENABLE, status: %d", status);
}
if (m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;

@ -40,6 +40,9 @@ static const uint64_t kMaxExposureTime = static_cast<uint64_t>(250000000);
#define WAIT_AF_LOCKED 4
#define PREVIEW_REQUEST_IDX 0
#define CAPTURE_REQUEST_IDX 1
#define DEFAULT_WARMUP_TIME 250 // 250ms
class CameraManager
{
@ -161,7 +164,7 @@ public:
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height);
void CreateSession(ANativeWindow* previewWindow);
CaptureRequest* CreateRequest(bool isPreviewRequest);
CaptureRequest* CreateRequest(bool isPreviewRequest, int32_t sensitivity = -1);
void DestroyRequest(CaptureRequest* request);
void DestroySession();
@ -203,7 +206,7 @@ public:
static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult);
protected:
void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request);
void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity);
protected:
std::mutex m_locker;
@ -243,10 +246,10 @@ protected:
bool mCaptureTriggered;
bool mFocusTriggered;
bool mCaptureDispatched;
uint32_t mStableFrameCount;
CAPTURE_RESULT mResult;
uint64_t m_startTime;
uint64_t m_delayFrames;
protected:
@ -262,14 +265,22 @@ protected:
ACameraOutputTarget* mPreviewOutputTarget;
ACaptureSessionOutput* mPreviewSessionOutput;
AImageReader* mImageReader;
ANativeWindow* mImageWindow;
ACameraOutputTarget* mOutputTarget;
ACaptureSessionOutput* mSessionOutput;
std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests;
ACameraCaptureSession* capture_session;
std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
std::map<int64_t, std::shared_ptr<ACameraMetadata> > mCaptureResultMap;
uint32_t mLdr;
uint32_t mFinalLdr;
uint32_t mFinalBurstCaptures;
int32_t mFinalOutputFormat;
std::vector<std::shared_ptr<AImage> > mCaptureFrames;
@ -278,12 +289,6 @@ protected:
std::vector<std::vector<uint8_t> > mRawFrames;
int64_t m_minTimestamp;
// AImageReader* image_reader;
// ANativeWindow* image_reader_surface;
// ACameraOutputTarget* image_reader_target;
// ACaptureRequest* capture_request;
// ACaptureSessionOutput* capture_session_output;
};
#endif // NDKCAMERA_H

Loading…
Cancel
Save