调整拍照的实现,改成一个request实现测光和拍照

nx2024
Matthew 2 months ago
parent f2ce7f3563
commit 8eac1a54b4

@ -49,62 +49,6 @@
// #define MTK_3A_MFNR_ENABLE "com.mediatek.3afeature.mfnrenable"
#define MTK_3A_MFNR_INTENSITY_TAG "com.mediatek.3afeature.mfnrintensity"
void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request) {
// 1. 设置基础的相机参数
// ACaptureRequest_setEntry_i32(request, ACAMERA_CONTROL_MODE, 1, ACAMERA_CONTROL_MODE_AUTO);
camera_status_t status;
// __system_property_set("vendor.mfll.force", "1");
#if 0
int32_t tagCount = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(characteristics, &tagCount, &tags);
for (int32_t i = 0; i < tagCount; i++) {
if (MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES == tags[i])
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES Tag ID: 0x%x\n", tags[i]);
}
}
ACameraMetadata_const_entry entry;
status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]);
}
}
#endif
uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set ACAMERA_NOISE_REDUCTION_MODE, status: %d", status);
}
uint8_t reqRemosaicEnable = 1;
status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable);
// int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR;
// status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning);
uint8_t aeMode = MTK_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode);
// 2. 设置 MediaTek 特定的 MFNR 参数
// 使用 vendor tag 描述符
// int32_t mfbMode = MTK_MFNR_FEATURE_MFB_AUTO; // 1 Enable MFNR
int32_t mfbMode = 1; // 1 Enable MFNR
status = ACaptureRequest_setEntry_i32(request, MTK_MFNR_FEATURE_MFB_MODE, 1, &mfbMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_MFNR_FEATURE_MFB_MODE, status: %d", status);
}
}
#ifdef _DEBUG
void Auto_AImage_delete(AImage* image)
@ -236,6 +180,10 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
camera_orientation = 0;
m_params = params;
if (m_params.burstCaptures == 0)
{
m_params.burstCaptures = 1;
}
m_firstFrame = true;
m_photoTaken = false;
mWidth = width;
@ -257,6 +205,8 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
numberOfPrecaptures = 0;
m_precaptureStartTime = 0;
m_minTimestamp = 0;
activeArraySize[0] = 0;
activeArraySize[1] = 0;
@ -273,16 +223,6 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
mPreviewOutputTarget = NULL;
mPreviewSessionOutput = NULL;
mImageReader = NULL;
mImageWindow = NULL;
mOutputTarget = NULL;
mSessionOutput = NULL;
mImageReader2 = NULL;
mImageWindow2 = NULL;
mOutputTarget2 = NULL;
mSessionOutput2 = NULL;
camera_device = 0;
capture_session_output_container = 0;
@ -293,12 +233,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
mResult = { 0 };
mLdr = ~0;
mFinalLdr = 0;
mFinalBurstCaptures = m_params.burstRawCapture == 0 ? m_params.burstCaptures : m_params.burstCaptures;
if (mFinalBurstCaptures == 0)
{
mFinalBurstCaptures = 1;
}
mFinalOutputFormat = (m_params.burstRawCapture == 0) ? AIMAGE_FORMAT_YUV_420_888 : AIMAGE_FORMAT_RAW16;
mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888;
}
NdkCamera::~NdkCamera()
@ -756,7 +691,7 @@ int NdkCamera::open(const std::string& cameraId) {
}
// setup imagereader and its surface
media_status_t mstatus = AImageReader_new(previewWidth, previewHeight, AIMAGE_FORMAT_YUV_420_888, 4, &mPreviewImageReader);
media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, burstCaptures + 1, &mPreviewImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new preview, status=%d", status);
if (mstatus == AMEDIA_OK)
{
@ -772,50 +707,13 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 1, &mImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new, status=%d", status);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mImageReader, &listener);
mstatus = AImageReader_getWindow(mImageReader, &mImageWindow);
ANativeWindow_acquire(mImageWindow);
}
status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraOutputTarget_create, status=%d", status);
status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput);
AASSERT(status == ACAMERA_OK, "Failed to call ACaptureSessionOutput_create, status=%d", status);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput);
CaptureRequest *request = CreateRequest(true);
if (m_params.burstRawCapture == 1) // Auto
{
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, burstCaptures, &mImageReader2);
if (mstatus == AMEDIA_OK)
if (m_params.burstRawCapture == 2)
{
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mImageReader2, &listener);
mstatus = AImageReader_getWindow(mImageReader2, &mImageWindow2);
ANativeWindow_acquire(mImageWindow2);
}
status = ACameraOutputTarget_create(mImageWindow2, &mOutputTarget2);
status = ACaptureSessionOutput_create(mImageWindow2, &mSessionOutput2);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput2);
SetupMFNR(mCharacteristics.get(), request->request);
}
CaptureRequest *request = CreateRequest(true);
if (m_params.burstRawCapture == 0 && m_params.customHdr != 0)
{
// SetupMFNR(mCharacteristics.get(), request->request);
}
mCaptureRequests.push_back(request);
// capture session
@ -855,17 +753,14 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
CaptureRequest *request = new CaptureRequest();
std::memset(request, 0, sizeof(CaptureRequest));
bool autoSwitchToOneFrame = (m_params.burstRawCapture == 1) && (mFinalOutputFormat == AIMAGE_FORMAT_YUV_420_888);
request->pThis = this;
request->imageReader = isPreviewRequest ? mPreviewImageReader : (autoSwitchToOneFrame ? mImageReader2 : mImageReader);
request->imageWindow = isPreviewRequest ? mPreviewImageWindow : (autoSwitchToOneFrame ? mImageWindow2 : mImageWindow);
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : (autoSwitchToOneFrame ? mOutputTarget2 : mOutputTarget);
request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : (autoSwitchToOneFrame ? mSessionOutput2 : mSessionOutput);
request->imageReader = mPreviewImageReader;
request->imageWindow = mPreviewImageWindow;
request->imageTarget = mPreviewOutputTarget;
request->sessionOutput = mPreviewSessionOutput;
// request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
request->templateId = (ACameraDevice_request_template)m_params.requestTemplate;
// mCaptureRequests.push_back(request);
// capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraDevice_createCaptureRequest, status=%d", status);
@ -977,8 +872,6 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
}
}
if (isPreviewRequest)
{
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON;
@ -1001,7 +894,6 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
// ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff);
}
}
else
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
@ -1083,8 +975,6 @@ void NdkCamera::close()
}
*/
mPreviewResults.reset();
mCaptureResults.clear();
mCaptureFrames.clear();
mCaptureResultMap.clear();
@ -1156,61 +1046,6 @@ void NdkCamera::close()
#ifdef _DEBUG
ALOGD("After Free mPreviewImageReader");
#endif
}
if (mOutputTarget != NULL)
{
ACameraOutputTarget_free(mOutputTarget);
mOutputTarget = 0;
}
if (mImageWindow != NULL)
{
ANativeWindow_release(mImageWindow);
mImageWindow = 0;
}
if (mImageReader != NULL)
{
#ifdef _DEBUG
ALOGD("Will Free mImageReader");
#endif
AImage* image = NULL;
media_status_t mstatus;
while ((mstatus = AImageReader_acquireNextImage(mImageReader, &image)) == AMEDIA_OK)
{
AImage_delete(image);
image = NULL;
}
AImageReader_setImageListener(mImageReader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mImageReader = 0;
#ifdef _DEBUG
ALOGD("After Free mImageReader");
#endif
}
if (mOutputTarget2 != NULL)
{
ACameraOutputTarget_free(mOutputTarget2);
mOutputTarget2 = 0;
}
if (mImageWindow2 != NULL)
{
ANativeWindow_release(mImageWindow2);
mImageWindow2 = 0;
}
if (mImageReader2 != NULL)
{
AImageReader_setImageListener(mImageReader2, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mImageReader2);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mImageReader2 = 0;
}
if (mPreviewSessionOutput != NULL)
{
@ -1222,25 +1057,6 @@ void NdkCamera::close()
mPreviewSessionOutput = 0;
}
if (mSessionOutput != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput);
}
ACaptureSessionOutput_free(mSessionOutput);
mSessionOutput = 0;
}
if (mSessionOutput2 != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput2);
}
ACaptureSessionOutput_free(mSessionOutput2);
mSessionOutput2 = 0;
}
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_free(capture_session_output_container);
@ -1262,22 +1078,9 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
{
AImage* image = 0;
media_status_t mstatus = AMEDIA_OK;
int64_t minTs;
if (reader == mPreviewImageReader)
{
mstatus = AImageReader_acquireLatestImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Preview AImageReader_acquireLatestImage error: %d", mstatus);
}
return;
}
if (!mCaptureTriggered)
{
#if 0
uint8_t* y_data = 0;
int y_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
@ -1288,25 +1091,11 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif
avgY = avgY / (uint64_t)y_len;
m_locker.lock();
mLdr = (uint8_t)avgY;
m_locker.unlock();
}
#endif
AImage_delete(image);
return;
}
else
{
uint32_t burstCaptures = getBurstCaptures();
uint64_t ts = GetMicroTimeStamp();
size_t expectedTimes = mCaptureRequests.size() - 1;
if (burstCaptures == 0)
{
burstCaptures = 1;
}
if (m_params.burstRawCapture == 0)
{
uint32_t numberOfFrames = 0;
while (1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
@ -1323,6 +1112,34 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
break;
}
m_locker.lock();
minTs = m_minTimestamp;
numberOfFrames = mOneFrame.size();
m_locker.unlock();
if (minTs == 0 || numberOfFrames >= burstCaptures)
{
AImage_delete(image);
continue;
}
int64_t frameTs = 0;
mstatus = AImage_getTimestamp(image, &frameTs);
bool valid = false;
if (frameTs >= minTs)
{
m_locker.lock();
auto it = mCaptureResultMap.find(frameTs);
valid = (it != mCaptureResultMap.end());
m_locker.unlock();
}
if (!valid)
{
AImage_delete(image);
continue;
}
int32_t format;
mstatus = AImage_getFormat(image, &format);
@ -1407,33 +1224,21 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
}
m_photoTaken = true;
int64_t frameTs = 0;
mstatus = AImage_getTimestamp(image, &frameTs);
AImage_delete(image);
bool captureCompleted = false;
bool captureDispatchable = false;
m_locker.lock();
if (!frame.empty())
{
mOneFrame.push_back(std::make_pair<>(frameTs, frame));
numberOfFrames = mOneFrame.size();
}
if (mOneFrame.size() >= expectedTimes)
{
bool allExisted = true;
for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame)
{
if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend())
{
allExisted = false;
break;
}
}
if (allExisted)
if (numberOfFrames >= burstCaptures)
{
captureCompleted = true;
}
}
if (captureCompleted && !mCaptureDispatched)
{
@ -1446,54 +1251,11 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
{
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable");
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
uint64_t ts = GetMicroTimeStamp();
FireOneCapture(ts);
// onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame);
break;
}
}
}
else
{
while (1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus);
}
}
break;
}
m_photoTaken = true;
m_locker.lock();
mCaptureFrames.push_back(std::shared_ptr<AImage>(image, Auto_AImage_delete));
m_locker.unlock();
ALOGD("Capture Image Received");
}
bool captureCompleted = false;
bool captureDispatchable = false;
m_locker.lock();
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
FireBurstCapture();
}
break;
}
}
}
@ -1654,20 +1416,28 @@ void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequ
void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
void* context = NULL;
ACaptureRequest_getUserContext(request, &context);
CaptureRequest* pCaptureRequest = reinterpret_cast<CaptureRequest *>(context);
m_locker.lock();
int64_t minTs = m_minTimestamp;
m_locker.unlock();
int64_t resultTimestamp = GetTimestamp(result);
if (pCaptureRequest->request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request)
{
if (mCaptureTriggered)
if (minTs > 0)
{
uint32_t burstCaptures = getBurstCaptures();
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
mCaptureResultMap[resultTimestamp] = captureResult;
return;
}
void* context = NULL;
ACaptureRequest_getUserContext(request, &context);
CaptureRequest* pCaptureRequest = reinterpret_cast<CaptureRequest *>(context);
bool readyForCapture = true;
camera_status_t status = ACAMERA_ERROR_BASE;
unsigned long long ts = GetMicroTimeStamp();
uint64_t ts = GetMicroTimeStamp();
uint8_t aeState = ACAMERA_CONTROL_AE_STATE_INACTIVE;
uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE;
@ -1805,6 +1575,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
if (readyForCapture/* && mCaptureRequests.size() > 1*/)
{
// Must update mFinalLdr As getBurstCaptures getOutputFormat depends mFinalLdr
if (mLdr != ~0)
{
@ -1813,37 +1584,14 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
XYLOG(XYLOG_SEVERITY_INFO, "Ready for Capture AFS=%u AES=%u AWBS=%u LDR=%u Time=%u",
(unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, mFinalLdr, (unsigned int)(ts - m_startTime));
if (m_params.burstRawCapture == 1)
{
if (mFinalLdr > 50)
{
XYLOG(XYLOG_SEVERITY_WARNING, "Switch to OneFrame Capture(YUV) As LDR=%u", mFinalLdr);
mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888;
mFinalBurstCaptures = 1;
}
}
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
{
burstCaptures = 1;
}
std::vector<ACaptureRequest*> requests;
int sequenceId = 0;
requests.reserve(burstCaptures);
for (int idx = 0; idx < burstCaptures; idx++)
{
CaptureRequest* request = CreateRequest(false);
if (m_params.burstRawCapture == 0 && m_params.customHdr != 0)
{
SetupMFNR(mCharacteristics.get(), request->request);
}
mCaptureRequests.push_back(request);
// CopyPreviewRequest(mCaptureRequests[idx]->request, result);
requests.push_back(request->request);
}
m_locker.lock();
m_minTimestamp = resultTimestamp;
m_locker.unlock();
#if 0
if (m_params.customHdr && burstCaptures > 1)
@ -1878,108 +1626,8 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
}
#endif
// ALOGW("Will Stop Repeating Request");
// status = ACameraCaptureSession_stopRepeating(capture_session);
// ALOGW("Finished Repeating Request");
ACameraCaptureSession_captureCallbacks capture_session_capture_cb;
capture_session_capture_cb.context = this;
capture_session_capture_cb.onCaptureStarted = 0;
capture_session_capture_cb.onCaptureProgressed = ::onCaptureProgressed;
capture_session_capture_cb.onCaptureCompleted = ::onCaptureCompleted;
capture_session_capture_cb.onCaptureFailed = ::onCaptureFailed;
capture_session_capture_cb.onCaptureSequenceCompleted = onCaptureSequenceCompleted;
capture_session_capture_cb.onCaptureSequenceAborted = onCaptureSequenceAborted;
capture_session_capture_cb.onCaptureBufferLost = 0;
int numberOfRequests = requests.size();
status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb,
numberOfRequests, &requests[0], &sequenceId);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_capture, status=%d", status);
ALOGW("Capture num = %d sequenceId=%d", numberOfRequests, sequenceId);
for (int idx = 1; idx < mCaptureRequests.size(); idx++)
{
mCaptureRequests[idx]->sessionSequenceId = sequenceId;
}
mCaptureTriggered = true;
}
}
else
{
#ifdef _DEBUG
uint64_t tid = getThreadIdOfULL();
ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid);
#endif
unsigned long long ts = GetMicroTimeStamp();
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
bool captureCompleted = false;
bool captureDispatchable = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
int64_t resultTimestamp = GetTimestamp(result);
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
if (m_params.burstRawCapture == 0)
{
m_locker.lock();
mCaptureResults.push_back(captureResult);
mCaptureResultMap[resultTimestamp] = captureResult;
if (mOneFrame.size() >= expectedTimes)
{
bool allExisted = true;
for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame)
{
if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend())
{
allExisted = false;
break;
}
}
if (allExisted)
{
captureCompleted = true;
}
}
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onCaptureCompleted");
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
FireOneCapture(ts);
}
}
else
{
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes;
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
FireBurstCapture();
}
}
}
}
int64_t NdkCamera::GetTimestamp(const ACameraMetadata* result)
@ -2008,16 +1656,18 @@ void NdkCamera::FireOneCapture(uint64_t ts)
{
std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt;
size_t idx = std::distance(mOneFrame.cbegin(), it);
std::shared_ptr<ACameraMetadata> result = mCaptureResults[idx];
auto itResult = mCaptureResultMap.find(it->first);
CAPTURE_RESULT captureResult = { 0 };
EnumCameraResult(result.get(), captureResult);
EnumCameraResult(itResult->second.get(), captureResult);
fileName += "_" + mCameraId + "_" + std::to_string(captureResult.aeState) + "_" + std::to_string(idx) + ".jpg";
cv::imwrite(fileName, it->second, params);
}
}
#endif
onOneCapture(mCharacteristics, mCaptureResults.back(), mFinalLdr, ts - m_startTime, mOneFrame.back().second);
auto it = mOneFrame.back();
auto itResult = mCaptureResultMap.find(it.first);
onOneCapture(mCharacteristics, itResult->second, mFinalLdr, ts - m_startTime, it.second);
}
void NdkCamera::FireBurstCapture()
@ -2032,13 +1682,23 @@ void NdkCamera::FireBurstCapture()
std::vector<std::shared_ptr<AImage> > captureFrames;
m_locker.lock();
ldr = mFinalLdr;
if (ldr == 0 && mLdr != ~0)
{
ldr = mLdr;
}
captureResults.swap(mCaptureResults);
captureFrames.swap(mCaptureFrames);
for (auto it = captureFrames.cbegin(); it != captureFrames.cend(); ++it)
{
int64_t imgTs = 0;
AImage_getTimestamp(it->get(), &imgTs);
auto itResult = mCaptureResultMap.find(imgTs);
if (itResult != mCaptureResultMap.end())
{
captureResults.push_back(itResult->second);
}
}
m_locker.unlock();
media_status_t mstatus;
@ -2173,7 +1833,7 @@ int32_t NdkCamera::getOutputFormat() const
int32_t NdkCamera::getBurstCaptures() const
{
return mFinalBurstCaptures;
return m_params.burstCaptures;
}
void NdkCamera::CreateSession(ANativeWindow* previewWindow,
@ -2475,3 +2135,73 @@ void NdkCamera::EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captur
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val);
captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
}
void NdkCamera::SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request)
{
// 1. 设置基础的相机参数
// ACaptureRequest_setEntry_i32(request, ACAMERA_CONTROL_MODE, 1, ACAMERA_CONTROL_MODE_AUTO);
camera_status_t status;
// __system_property_set("vendor.mfll.force", "1");
#if 0
int32_t tagCount = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(characteristics, &tagCount, &tags);
for (int32_t i = 0; i < tagCount; i++) {
if (MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES == tags[i])
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES Tag ID: 0x%x\n", tags[i]);
}
}
ACameraMetadata_const_entry entry;
status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]);
}
}
#endif
uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set ACAMERA_NOISE_REDUCTION_MODE, status: %d", status);
}
uint8_t reqRemosaicEnable = 1;
status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable);
// int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR;
// status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning);
uint8_t aeMode = MTK_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode);
// 2. 设置 MediaTek 特定的 MFNR 参数
// 使用 vendor tag 描述符
// int32_t mfbMode = MTK_MFNR_FEATURE_MFB_AUTO; // 1 Enable MFNR
int32_t mfbMode = 1; // 1 Enable MFNR
status = ACaptureRequest_setEntry_i32(request, MTK_MFNR_FEATURE_MFB_MODE, 1, &mfbMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_MFNR_FEATURE_MFB_MODE, status: %d", status);
}
if (m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_CONTROL_AE_EXPOSURE_COMPENSATION, status: %d", status);
}
}
}

@ -202,6 +202,9 @@ public:
static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height);
static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult);
protected:
void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request);
protected:
std::mutex m_locker;
std::set<std::string> m_availableCameras;
@ -258,33 +261,21 @@ protected:
ACameraOutputTarget* mPreviewOutputTarget;
ACaptureSessionOutput* mPreviewSessionOutput;
AImageReader* mImageReader;
ANativeWindow* mImageWindow;
ACameraOutputTarget* mOutputTarget;
ACaptureSessionOutput* mSessionOutput;
AImageReader* mImageReader2;
ANativeWindow* mImageWindow2;
ACameraOutputTarget* mOutputTarget2;
ACaptureSessionOutput* mSessionOutput2;
std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests;
ACameraCaptureSession* capture_session;
std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
std::map<int64_t, std::shared_ptr<ACameraMetadata> > mCaptureResultMap;
uint32_t mLdr;
uint32_t mFinalLdr;
uint32_t mFinalBurstCaptures;
int32_t mFinalOutputFormat;
std::vector<std::shared_ptr<AImage> > mCaptureFrames;
// cv::Mat mOneFrame;
std::vector<std::pair<int64_t, cv::Mat> > mOneFrame;
std::vector<std::vector<uint8_t> > mRawFrames;
int64_t m_minTimestamp;
// AImageReader* image_reader;
// ANativeWindow* image_reader_surface;

Loading…
Cancel
Save