From 7a51b9610073845fbe18207c5615e814bdc197c8 Mon Sep 17 00:00:00 2001 From: Matthew Date: Tue, 15 Apr 2025 16:10:20 +0800 Subject: [PATCH] =?UTF-8?q?=E8=B0=83=E6=95=B4=E6=8B=8D=E7=85=A7=E7=9A=84?= =?UTF-8?q?=E5=AE=9E=E7=8E=B0=EF=BC=8C=E6=94=B9=E6=88=90=E4=B8=80=E4=B8=AA?= =?UTF-8?q?request=E5=AE=9E=E7=8E=B0=E6=B5=8B=E5=85=89=E5=92=8C=E6=8B=8D?= =?UTF-8?q?=E7=85=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/src/main/cpp/camera2/ndkcamera.cpp | 1113 +++++++++--------------- app/src/main/cpp/camera2/ndkcamera.h | 17 +- 2 files changed, 423 insertions(+), 707 deletions(-) diff --git a/app/src/main/cpp/camera2/ndkcamera.cpp b/app/src/main/cpp/camera2/ndkcamera.cpp index cf529e6e..20eebf68 100644 --- a/app/src/main/cpp/camera2/ndkcamera.cpp +++ b/app/src/main/cpp/camera2/ndkcamera.cpp @@ -50,62 +50,6 @@ // #define MTK_3A_MFNR_ENABLE "com.mediatek.3afeature.mfnrenable" #define MTK_3A_MFNR_INTENSITY_TAG "com.mediatek.3afeature.mfnrintensity" -void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request) { - // 1. 设置基础的相机参数 - // ACaptureRequest_setEntry_i32(request, ACAMERA_CONTROL_MODE, 1, ACAMERA_CONTROL_MODE_AUTO); - camera_status_t status; - - // __system_property_set("vendor.mfll.force", "1"); - - -#if 0 - int32_t tagCount = 0; - const uint32_t* tags = nullptr; - ACameraMetadata_getAllTags(characteristics, &tagCount, &tags); - for (int32_t i = 0; i < tagCount; i++) { - if (MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES == tags[i]) - { - ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES Tag ID: 0x%x\n", tags[i]); - } - } - - ACameraMetadata_const_entry entry; - status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry); - if (status == ACAMERA_OK) - { - for (int i = 0; i < entry.count; i++) - { - ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]); - } - } -#endif - - uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY; - status = ACaptureRequest_setEntry_u8(request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode); - if (status != ACAMERA_OK) - { - ALOGE("Failed to set ACAMERA_NOISE_REDUCTION_MODE, status: %d", status); - } - - uint8_t reqRemosaicEnable = 1; - status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable); - - // int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR; - // status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning); - - uint8_t aeMode = MTK_CONTROL_AE_MODE_ON; - status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode); - - // 2. 设置 MediaTek 特定的 MFNR 参数 - // 使用 vendor tag 描述符 - // int32_t mfbMode = MTK_MFNR_FEATURE_MFB_AUTO; // 1 Enable MFNR - int32_t mfbMode = 1; // 1 Enable MFNR - status = ACaptureRequest_setEntry_i32(request, MTK_MFNR_FEATURE_MFB_MODE, 1, &mfbMode); - if (status != ACAMERA_OK) - { - ALOGE("Failed to set MTK_MFNR_FEATURE_MFB_MODE, status: %d", status); - } -} void saveYuvToFile(AImage* image, const std::string& filePath) { @@ -128,8 +72,6 @@ void saveYuvToFile(AImage* image, const std::string& filePath) { AImage_getPlaneRowStride(image, 1, &uStride); // U 分量的 Stride AImage_getPlaneRowStride(image, 2, &vStride); // V 分量的 Stride - - // 打开文件 std::ofstream file(filePath, std::ios::binary); if (!file.is_open()) { @@ -286,6 +228,10 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA camera_orientation = 0; m_params = params; + if (m_params.burstCaptures == 0) + { + m_params.burstCaptures = 1; + } m_firstFrame = true; m_photoTaken = false; mWidth = width; @@ -307,6 +253,8 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA numberOfPrecaptures = 0; m_precaptureStartTime = 0; + m_minTimestamp = 0; + activeArraySize[0] = 0; activeArraySize[1] = 0; @@ -323,16 +271,6 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA mPreviewOutputTarget = NULL; mPreviewSessionOutput = NULL; - mImageReader = NULL; - mImageWindow = NULL; - mOutputTarget = NULL; - mSessionOutput = NULL; - - mImageReader2 = NULL; - mImageWindow2 = NULL; - mOutputTarget2 = NULL; - mSessionOutput2 = NULL; - camera_device = 0; capture_session_output_container = 0; @@ -343,12 +281,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA mResult = { 0 }; mLdr = ~0; mFinalLdr = 0; - mFinalBurstCaptures = m_params.burstRawCapture == 0 ? m_params.burstCaptures : m_params.burstCaptures; - if (mFinalBurstCaptures == 0) - { - mFinalBurstCaptures = 1; - } - mFinalOutputFormat = (m_params.burstRawCapture == 0) ? AIMAGE_FORMAT_YUV_420_888 : AIMAGE_FORMAT_RAW16; + mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888; } NdkCamera::~NdkCamera() @@ -797,7 +730,7 @@ int NdkCamera::open(const std::string& cameraId) { } // setup imagereader and its surface - media_status_t mstatus = AImageReader_new(previewWidth, previewHeight, AIMAGE_FORMAT_YUV_420_888, 4, &mPreviewImageReader); + media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, burstCaptures + 1, &mPreviewImageReader); AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new preview, status=%d", status); if (mstatus == AMEDIA_OK) { @@ -813,50 +746,13 @@ int NdkCamera::open(const std::string& cameraId) { status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput); status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput); - mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 1, &mImageReader); - AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new, status=%d", status); - if (mstatus == AMEDIA_OK) - { - AImageReader_ImageListener listener; - listener.context = this; - listener.onImageAvailable = ::onImageAvailable; - mstatus = AImageReader_setImageListener(mImageReader, &listener); - mstatus = AImageReader_getWindow(mImageReader, &mImageWindow); - ANativeWindow_acquire(mImageWindow); - } - status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget); - AASSERT(status == ACAMERA_OK, "Failed to call ACameraOutputTarget_create, status=%d", status); - - status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput); - AASSERT(status == ACAMERA_OK, "Failed to call ACaptureSessionOutput_create, status=%d", status); - - status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput); - + CaptureRequest *request = CreateRequest(true); - if (m_params.burstRawCapture == 1) // Auto + if (m_params.burstRawCapture == 2) { - mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, burstCaptures, &mImageReader2); - if (mstatus == AMEDIA_OK) - { - AImageReader_ImageListener listener; - listener.context = this; - listener.onImageAvailable = ::onImageAvailable; - mstatus = AImageReader_setImageListener(mImageReader2, &listener); - mstatus = AImageReader_getWindow(mImageReader2, &mImageWindow2); - ANativeWindow_acquire(mImageWindow2); - } - status = ACameraOutputTarget_create(mImageWindow2, &mOutputTarget2); - - status = ACaptureSessionOutput_create(mImageWindow2, &mSessionOutput2); - status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput2); + SetupMFNR(mCharacteristics.get(), request->request); } - - CaptureRequest *request = CreateRequest(true); - if (m_params.burstRawCapture == 0 && m_params.customHdr != 0) - { - // SetupMFNR(mCharacteristics.get(), request->request); - } mCaptureRequests.push_back(request); // capture session @@ -896,17 +792,14 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest) CaptureRequest *request = new CaptureRequest(); std::memset(request, 0, sizeof(CaptureRequest)); - bool autoSwitchToOneFrame = (m_params.burstRawCapture == 1) && (mFinalOutputFormat == AIMAGE_FORMAT_YUV_420_888); - request->pThis = this; - request->imageReader = isPreviewRequest ? mPreviewImageReader : (autoSwitchToOneFrame ? mImageReader2 : mImageReader); - request->imageWindow = isPreviewRequest ? mPreviewImageWindow : (autoSwitchToOneFrame ? mImageWindow2 : mImageWindow); - request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : (autoSwitchToOneFrame ? mOutputTarget2 : mOutputTarget); - request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : (autoSwitchToOneFrame ? mSessionOutput2 : mSessionOutput); + request->pThis = this; + request->imageReader = mPreviewImageReader; + request->imageWindow = mPreviewImageWindow; + request->imageTarget = mPreviewOutputTarget; + request->sessionOutput = mPreviewSessionOutput; // request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate; request->templateId = (ACameraDevice_request_template)m_params.requestTemplate; - // mCaptureRequests.push_back(request); - // capture request status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request); AASSERT(status == ACAMERA_OK, "Failed to call ACameraDevice_createCaptureRequest, status=%d", status); @@ -1018,30 +911,27 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest) } } - if (isPreviewRequest) - { - if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) - { - uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; - status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); + if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) + { + uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); - XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); - mResult.aeLockSetted = 1; - } - else - { - uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; - status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); - XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); - } + XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE"); + mResult.aeLockSetted = 1; + } + else + { + uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock); + XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported"); + } - uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; - status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); - XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); - m_precaptureStartTime = m_startTime; + uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; + status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); + XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status); + m_precaptureStartTime = m_startTime; - // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); - } + // ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff); } else { @@ -1124,8 +1014,6 @@ void NdkCamera::close() } */ - mPreviewResults.reset(); - mCaptureResults.clear(); mCaptureFrames.clear(); mCaptureResultMap.clear(); @@ -1198,61 +1086,6 @@ void NdkCamera::close() ALOGD("After Free mPreviewImageReader"); #endif } - - if (mOutputTarget != NULL) - { - ACameraOutputTarget_free(mOutputTarget); - mOutputTarget = 0; - } - - if (mImageWindow != NULL) - { - ANativeWindow_release(mImageWindow); - mImageWindow = 0; - } - - if (mImageReader != NULL) - { -#ifdef _DEBUG - ALOGD("Will Free mImageReader"); -#endif - AImage* image = NULL; - media_status_t mstatus; - while ((mstatus = AImageReader_acquireNextImage(mImageReader, &image)) == AMEDIA_OK) - { - AImage_delete(image); - image = NULL; - } - AImageReader_setImageListener(mImageReader, NULL); - - //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); - AImageReader_delete(mImageReader); - - //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); - mImageReader = 0; -#ifdef _DEBUG - ALOGD("After Free mImageReader"); -#endif - } - if (mOutputTarget2 != NULL) - { - ACameraOutputTarget_free(mOutputTarget2); - mOutputTarget2 = 0; - } - if (mImageWindow2 != NULL) - { - ANativeWindow_release(mImageWindow2); - mImageWindow2 = 0; - } - if (mImageReader2 != NULL) - { - AImageReader_setImageListener(mImageReader2, NULL); - //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str()); - AImageReader_delete(mImageReader2); - //XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str()); - - mImageReader2 = 0; - } if (mPreviewSessionOutput != NULL) { if (capture_session_output_container) @@ -1263,25 +1096,6 @@ void NdkCamera::close() mPreviewSessionOutput = 0; } - if (mSessionOutput != NULL) - { - if (capture_session_output_container) - { - ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput); - } - ACaptureSessionOutput_free(mSessionOutput); - mSessionOutput = 0; - } - if (mSessionOutput2 != NULL) - { - if (capture_session_output_container) - { - ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput2); - } - ACaptureSessionOutput_free(mSessionOutput2); - mSessionOutput2 = 0; - } - if (capture_session_output_container) { ACaptureSessionOutputContainer_free(capture_session_output_container); @@ -1303,250 +1117,195 @@ void NdkCamera::onImageAvailable(AImageReader* reader) { AImage* image = 0; media_status_t mstatus = AMEDIA_OK; + int64_t minTs; - if (reader == mPreviewImageReader) - { - mstatus = AImageReader_acquireLatestImage(reader, &image); +#if 0 + uint8_t* y_data = 0; + int y_len = 0; + AImage_getPlaneData(image, 0, &y_data, &y_len); + +#if __cplusplus >= 201703L + uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); +#else + uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); +#endif + avgY = avgY / (uint64_t)y_len; +#endif + + uint32_t burstCaptures = getBurstCaptures(); + uint32_t numberOfFrames = 0; + + while (1) + { + mstatus = AImageReader_acquireNextImage(reader, &image); if (mstatus != AMEDIA_OK) { // https://stackoverflow.com/questions/67063562 if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) { - XYLOG(XYLOG_SEVERITY_ERROR, "Preview AImageReader_acquireLatestImage error: %d", mstatus); + if (mCaptureFrames.size() < burstCaptures) + { + XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus); + } } - return; + break; } - if (!mCaptureTriggered) - { - uint8_t* y_data = 0; - int y_len = 0; - AImage_getPlaneData(image, 0, &y_data, &y_len); + m_locker.lock(); + minTs = m_minTimestamp; + numberOfFrames = mOneFrame.size(); + m_locker.unlock(); -#if __cplusplus >= 201703L - uint64_t avgY = std::reduce(y_data, y_data + y_len, 0); -#else - uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0); -#endif - avgY = avgY / (uint64_t)y_len; + if (minTs == 0 || numberOfFrames >= burstCaptures) + { + AImage_delete(image); + continue; + } + + int64_t frameTs = 0; + mstatus = AImage_getTimestamp(image, &frameTs); + bool valid = false; + if (frameTs >= minTs) + { m_locker.lock(); - mLdr = (uint8_t)avgY; + auto it = mCaptureResultMap.find(frameTs); + valid = (it != mCaptureResultMap.end()); m_locker.unlock(); - } - - AImage_delete(image); - return; - } - else - { - uint32_t burstCaptures = getBurstCaptures(); - uint64_t ts = GetMicroTimeStamp(); - size_t expectedTimes = mCaptureRequests.size() - 1; - if (burstCaptures == 0) - { - burstCaptures = 1; - } - if (m_params.burstRawCapture == 0) - { - while (1) - { - mstatus = AImageReader_acquireNextImage(reader, &image); - if (mstatus != AMEDIA_OK) - { - // https://stackoverflow.com/questions/67063562 - if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) - { - if (mCaptureFrames.size() < burstCaptures) - { - XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus); - } - } - break; - } - - int32_t format; - mstatus = AImage_getFormat(image, &format); - - cv::Mat frame; - if (format == AIMAGE_FORMAT_YUV_420_888) - { - int32_t width; - int32_t height; - mstatus = AImage_getWidth(image, &width); - mstatus = AImage_getHeight(image, &height); - - int32_t y_pixelStride = 0; - int32_t u_pixelStride = 0; - int32_t v_pixelStride = 0; - AImage_getPlanePixelStride(image, 0, &y_pixelStride); - AImage_getPlanePixelStride(image, 1, &u_pixelStride); - AImage_getPlanePixelStride(image, 2, &v_pixelStride); - - int32_t y_rowStride = 0; - int32_t u_rowStride = 0; - int32_t v_rowStride = 0; - AImage_getPlaneRowStride(image, 0, &y_rowStride); - AImage_getPlaneRowStride(image, 1, &u_rowStride); - AImage_getPlaneRowStride(image, 2, &v_rowStride); - - uint8_t* y_data = 0; - uint8_t* u_data = 0; - uint8_t* v_data = 0; - int y_len = 0; - int u_len = 0; - int v_len = 0; - AImage_getPlaneData(image, 0, &y_data, &y_len); - AImage_getPlaneData(image, 1, &u_data, &u_len); - AImage_getPlaneData(image, 2, &v_data, &v_len); - - if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) - { - // already nv21 - ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation, - camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame); - } - else - { - // construct nv21 - uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; - { - // Y - uint8_t* yptr = nv21; - for (int y = 0; y < height; y++) - { - const uint8_t* y_data_ptr = y_data + y_rowStride * y; - for (int x = 0; x < width; x++) - { - yptr[0] = y_data_ptr[0]; - yptr++; - y_data_ptr += y_pixelStride; - } - } - - // UV - uint8_t* uvptr = nv21 + width * height; - for (int y = 0; y < height / 2; y++) - { - const uint8_t* v_data_ptr = v_data + v_rowStride * y; - const uint8_t* u_data_ptr = u_data + u_rowStride * y; - for (int x = 0; x < width / 2; x++) - { - uvptr[0] = v_data_ptr[0]; - uvptr[1] = u_data_ptr[0]; - uvptr += 2; - v_data_ptr += v_pixelStride; - u_data_ptr += u_pixelStride; - } - } - } - - ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation, - camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame); - - delete[] nv21; - } - } - m_photoTaken = true; + } - int64_t frameTs = 0; - mstatus = AImage_getTimestamp(image, &frameTs); + if (!valid) + { + AImage_delete(image); + continue; + } -#ifdef OUTPUT_DBG_INFO - if (mWidth == 1920) - { - std::string dt = FormatLocalDateTime("%d%02d%02d%02d%02d%02d", time(NULL)); - std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt; - fileName += "_" + mCameraId + std::to_string(frameTs) + ".yuv"; - saveYuvToFile(image, fileName.c_str()); - } -#endif - AImage_delete(image); + int32_t format; + mstatus = AImage_getFormat(image, &format); - bool captureCompleted = false; - bool captureDispatchable = false; - m_locker.lock(); - if (!frame.empty()) - { - mOneFrame.push_back(std::make_pair<>(frameTs, frame)); - } - if (mOneFrame.size() >= expectedTimes) + cv::Mat frame; + if (format == AIMAGE_FORMAT_YUV_420_888) + { + int32_t width; + int32_t height; + mstatus = AImage_getWidth(image, &width); + mstatus = AImage_getHeight(image, &height); + + int32_t y_pixelStride = 0; + int32_t u_pixelStride = 0; + int32_t v_pixelStride = 0; + AImage_getPlanePixelStride(image, 0, &y_pixelStride); + AImage_getPlanePixelStride(image, 1, &u_pixelStride); + AImage_getPlanePixelStride(image, 2, &v_pixelStride); + + int32_t y_rowStride = 0; + int32_t u_rowStride = 0; + int32_t v_rowStride = 0; + AImage_getPlaneRowStride(image, 0, &y_rowStride); + AImage_getPlaneRowStride(image, 1, &u_rowStride); + AImage_getPlaneRowStride(image, 2, &v_rowStride); + + uint8_t* y_data = 0; + uint8_t* u_data = 0; + uint8_t* v_data = 0; + int y_len = 0; + int u_len = 0; + int v_len = 0; + AImage_getPlaneData(image, 0, &y_data, &y_len); + AImage_getPlaneData(image, 1, &u_data, &u_len); + AImage_getPlaneData(image, 2, &v_data, &v_len); + + if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) + { + // already nv21 + ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation, + camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame); + } + else + { + // construct nv21 + uint8_t* nv21 = new uint8_t[width * height + width * height / 2]; { - bool allExisted = true; - for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame) + // Y + uint8_t* yptr = nv21; + for (int y = 0; y < height; y++) { - if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend()) + const uint8_t* y_data_ptr = y_data + y_rowStride * y; + for (int x = 0; x < width; x++) { - allExisted = false; - break; + yptr[0] = y_data_ptr[0]; + yptr++; + y_data_ptr += y_pixelStride; } } - if (allExisted) + + // UV + uint8_t* uvptr = nv21 + width * height; + for (int y = 0; y < height / 2; y++) { - captureCompleted = true; + const uint8_t* v_data_ptr = v_data + v_rowStride * y; + const uint8_t* u_data_ptr = u_data + u_rowStride * y; + for (int x = 0; x < width / 2; x++) + { + uvptr[0] = v_data_ptr[0]; + uvptr[1] = u_data_ptr[0]; + uvptr += 2; + v_data_ptr += v_pixelStride; + u_data_ptr += u_pixelStride; + } } } - if (captureCompleted && !mCaptureDispatched) - { - mCaptureDispatched = true; - captureDispatchable = true; - } - m_locker.unlock(); + ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation, + camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame); - if (captureCompleted && captureDispatchable) - { - XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable"); - camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session); - FireOneCapture(ts); - // onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame); - break; - } - } - } - else - { - while (1) - { - mstatus = AImageReader_acquireNextImage(reader, &image); - if (mstatus != AMEDIA_OK) - { - // https://stackoverflow.com/questions/67063562 - if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE) - { - if (mCaptureFrames.size() < burstCaptures) - { - XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus); - } - } - break; - } + delete[] nv21; + } + } + m_photoTaken = true; - m_photoTaken = true; - m_locker.lock(); - mCaptureFrames.push_back(std::shared_ptr(image, Auto_AImage_delete)); - m_locker.unlock(); +#ifdef OUTPUT_DBG_INFO + if (mWidth == 1920) + { + std::string dt = FormatLocalDateTime("%d%02d%02d%02d%02d%02d", time(NULL)); + std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt; + fileName += "_" + mCameraId + std::to_string(frameTs) + ".yuv"; + saveYuvToFile(image, fileName.c_str()); + } +#endif + AImage_delete(image); - ALOGD("Capture Image Received"); - } + bool captureCompleted = false; + bool captureDispatchable = false; - bool captureCompleted = false; - bool captureDispatchable = false; + m_locker.lock(); + if (!frame.empty()) + { + mOneFrame.push_back(std::make_pair<>(frameTs, frame)); + numberOfFrames = mOneFrame.size(); + } + if (numberOfFrames >= burstCaptures) + { + captureCompleted = true; + } - m_locker.lock(); - captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes; - if (captureCompleted && !mCaptureDispatched) - { - mCaptureDispatched = true; - captureDispatchable = true; - } - m_locker.unlock(); + if (captureCompleted && !mCaptureDispatched) + { + mCaptureDispatched = true; + captureDispatchable = true; + } + m_locker.unlock(); - if (captureCompleted && captureDispatchable) - { - FireBurstCapture(); - } - } - } + if (captureCompleted && captureDispatchable) + { + XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable"); + camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session); + uint64_t ts = GetMicroTimeStamp(); + FireOneCapture(ts); + + + break; + } + } } void NdkCamera::on_error(const std::string& msg) @@ -1705,77 +1464,85 @@ void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) { + m_locker.lock(); + int64_t minTs = m_minTimestamp; + m_locker.unlock(); + int64_t resultTimestamp = GetTimestamp(result); + + if (minTs > 0) + { + uint32_t burstCaptures = getBurstCaptures(); + ACameraMetadata* pCopy = ACameraMetadata_copy(result); + std::shared_ptr captureResult(pCopy, ACameraMetadata_free); + + mCaptureResultMap[resultTimestamp] = captureResult; + return; + } + void* context = NULL; ACaptureRequest_getUserContext(request, &context); CaptureRequest* pCaptureRequest = reinterpret_cast(context); - if (pCaptureRequest->request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request) - { - if (mCaptureTriggered) - { - return; - } - - bool readyForCapture = true; - camera_status_t status = ACAMERA_ERROR_BASE; - unsigned long long ts = GetMicroTimeStamp(); + bool readyForCapture = true; + camera_status_t status = ACAMERA_ERROR_BASE; + uint64_t ts = GetMicroTimeStamp(); - uint8_t aeState = ACAMERA_CONTROL_AE_STATE_INACTIVE; - uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE; - uint8_t afState = ACAMERA_CONTROL_AF_STATE_INACTIVE; + uint8_t aeState = ACAMERA_CONTROL_AE_STATE_INACTIVE; + uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE; + uint8_t afState = ACAMERA_CONTROL_AF_STATE_INACTIVE; - ACameraMetadata_const_entry val = { 0 }; - val = { 0 }; - status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); - aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; + ACameraMetadata_const_entry val = { 0 }; + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); + aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; - val = { 0 }; - status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val); - awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE; + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val); + awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE; - val = { 0 }; - status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); - afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; + val = { 0 }; + status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val); + afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE; - // XYLOG(XYLOG_SEVERITY_DEBUG, "Preview State AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); + // XYLOG(XYLOG_SEVERITY_DEBUG, "Preview State AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); - // Check if timeout - if (ts - m_startTime < m_params.focusTimeout) - { - if (afSupported && (m_params.autoFocus != 0)) - { - /* - if (afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED) - { - // Will lock it - if (mResult.afLockSetted == 0) - { - uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; - status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); + // Check if timeout + if (ts - m_startTime < m_params.focusTimeout) + { + if (afSupported && (m_params.autoFocus != 0)) + { + /* + if (afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED) + { + // Will lock it + if (mResult.afLockSetted == 0) + { + uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START; + status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); - mResult.afLockSetted = 1; - //XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger AF AFS=%u", (uint32_t)mResult.afState); - readyForCapture = false; - } - } - */ + mResult.afLockSetted = 1; + //XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger AF AFS=%u", (uint32_t)mResult.afState); + readyForCapture = false; + } + } + */ - if (afState != ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED && - afState != ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED/* && + if (afState != ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED && + afState != ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED/* && afState != ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED*/) - // if (afState != ACAMERA_CONTROL_AF_STATE_INACTIVE) - { - //XYLOG(XYLOG_SEVERITY_DEBUG, "AF Enabled And Focused"); - readyForCapture = false; - } - } + // if (afState != ACAMERA_CONTROL_AF_STATE_INACTIVE) + { + //XYLOG(XYLOG_SEVERITY_DEBUG, "AF Enabled And Focused"); + readyForCapture = false; + } + } - if (m_params.autoExposure != 0) - { - if (aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE) - { + if (m_params.autoExposure != 0) + { + if (aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE) + { #if 0 - uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; + uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; @@ -1784,120 +1551,98 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque AASSERT(status == ACAMERA_OK, "Failed to call PRECAPTURE_TRIGGER, status=%d", status); #endif - readyForCapture = false; - numberOfPrecaptures = 0; - m_precaptureStartTime = ts; - } + readyForCapture = false; + numberOfPrecaptures = 0; + m_precaptureStartTime = ts; + } - if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) - { - if (aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { - readyForCapture = false; - } - else - { + if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED)) + { + if (aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { + readyForCapture = false; + } + else + { #if 0 - //XYLOG(XYLOG_SEVERITY_DEBUG, "AE Locked"); + //XYLOG(XYLOG_SEVERITY_DEBUG, "AE Locked"); #endif - } - } - else - { - if (aeState != ACAMERA_CONTROL_AE_STATE_CONVERGED && - aeState != ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED && - aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { - readyForCapture = false; - } - else { + } + } + else + { + if (aeState != ACAMERA_CONTROL_AE_STATE_CONVERGED && + aeState != ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED && + aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) { + readyForCapture = false; + } + else { #if 0 - XYLOG(XYLOG_SEVERITY_DEBUG, "AWB CONVERGED Or Locked"); + XYLOG(XYLOG_SEVERITY_DEBUG, "AWB CONVERGED Or Locked"); #endif - } - } - } + } + } + } - if (awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) - { - if (awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) { - if (awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) - { - readyForCapture = false; - } - else - { + if (awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) + { + if (awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) { + if (awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) + { + readyForCapture = false; + } + else + { #if 0 - //XYLOG(XYLOG_SEVERITY_DEBUG, "AWB Locked"); + //XYLOG(XYLOG_SEVERITY_DEBUG, "AWB Locked"); #endif - } - } - else - { - if (awbState != ACAMERA_CONTROL_AWB_STATE_CONVERGED && - awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) - { - readyForCapture = false; - } - else - { + } + } + else + { + if (awbState != ACAMERA_CONTROL_AWB_STATE_CONVERGED && + awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED) + { + readyForCapture = false; + } + else + { #if 0 - XYLOG(XYLOG_SEVERITY_DEBUG, "AE CONVERGED Or Locked"); + XYLOG(XYLOG_SEVERITY_DEBUG, "AE CONVERGED Or Locked"); #endif - } - } - } - } - else - { + } + } + } + } + else + { #if 0 - XYLOG(XYLOG_SEVERITY_WARNING, "Prepare Capture Timeout for 3A And will Capture AFS=%u AES=%u AWBS=%u Time=%u", + XYLOG(XYLOG_SEVERITY_WARNING, "Prepare Capture Timeout for 3A And will Capture AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime)); #endif - } + } - if (readyForCapture/* && mCaptureRequests.size() > 1*/) - { - // Must update mFinalLdr As getBurstCaptures getOutputFormat depends mFinalLdr - if (mLdr != ~0) - { - mFinalLdr = mLdr; - } + if (readyForCapture/* && mCaptureRequests.size() > 1*/) + { - XYLOG(XYLOG_SEVERITY_INFO, "Ready for Capture AFS=%u AES=%u AWBS=%u LDR=%u Time=%u", - (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, mFinalLdr, (unsigned int)(ts - m_startTime)); - if (m_params.burstRawCapture == 1) - { - if (mFinalLdr > 50) - { - XYLOG(XYLOG_SEVERITY_WARNING, "Switch to OneFrame Capture(YUV) As LDR=%u", mFinalLdr); - mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888; - mFinalBurstCaptures = 1; - } - } + // Must update mFinalLdr As getBurstCaptures getOutputFormat depends mFinalLdr + if (mLdr != ~0) + { + mFinalLdr = mLdr; + } - uint32_t burstCaptures = getBurstCaptures(); - if (burstCaptures == 0) - { - burstCaptures = 1; - } + XYLOG(XYLOG_SEVERITY_INFO, "Ready for Capture AFS=%u AES=%u AWBS=%u LDR=%u Time=%u", + (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, mFinalLdr, (unsigned int)(ts - m_startTime)); - std::vector requests; - int sequenceId = 0; - requests.reserve(burstCaptures); + uint32_t burstCaptures = getBurstCaptures(); + + + m_locker.lock(); + m_minTimestamp = resultTimestamp; + m_locker.unlock(); - for (int idx = 0; idx < burstCaptures; idx++) - { - CaptureRequest* request = CreateRequest(false); - if (m_params.burstRawCapture == 0 && m_params.customHdr != 0) - { - SetupMFNR(mCharacteristics.get(), request->request); - } - mCaptureRequests.push_back(request); - // CopyPreviewRequest(mCaptureRequests[idx]->request, result); - requests.push_back(request->request); - } #if 0 - if (m_params.customHdr && burstCaptures > 1) + if (m_params.customHdr && burstCaptures > 1) { int32_t hdrStep = m_params.hdrStep; if (hdrStep == 0) @@ -1939,108 +1684,8 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque } #endif - // ALOGW("Will Stop Repeating Request"); - // status = ACameraCaptureSession_stopRepeating(capture_session); - // ALOGW("Finished Repeating Request"); - - ACameraCaptureSession_captureCallbacks capture_session_capture_cb; - capture_session_capture_cb.context = this; - capture_session_capture_cb.onCaptureStarted = 0; - capture_session_capture_cb.onCaptureProgressed = ::onCaptureProgressed; - capture_session_capture_cb.onCaptureCompleted = ::onCaptureCompleted; - capture_session_capture_cb.onCaptureFailed = ::onCaptureFailed; - capture_session_capture_cb.onCaptureSequenceCompleted = onCaptureSequenceCompleted; - capture_session_capture_cb.onCaptureSequenceAborted = onCaptureSequenceAborted; - capture_session_capture_cb.onCaptureBufferLost = 0; - - int numberOfRequests = requests.size(); - status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb, - numberOfRequests, &requests[0], &sequenceId); - AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_capture, status=%d", status); - - ALOGW("Capture num = %d sequenceId=%d", numberOfRequests, sequenceId); - for (int idx = 1; idx < mCaptureRequests.size(); idx++) - { - mCaptureRequests[idx]->sessionSequenceId = sequenceId; - } - - mCaptureTriggered = true; - } - } - else - { -#ifdef _DEBUG - uint64_t tid = getThreadIdOfULL(); - ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid); -#endif - - unsigned long long ts = GetMicroTimeStamp(); - - ACameraMetadata* pCopy = ACameraMetadata_copy(result); - bool captureCompleted = false; - bool captureDispatchable = false; - size_t expectedTimes = mCaptureRequests.size() - 1; - - int64_t resultTimestamp = GetTimestamp(result); - std::shared_ptr captureResult(pCopy, ACameraMetadata_free); - - if (m_params.burstRawCapture == 0) - { - m_locker.lock(); - mCaptureResults.push_back(captureResult); - mCaptureResultMap[resultTimestamp] = captureResult; - - if (mOneFrame.size() >= expectedTimes) - { - bool allExisted = true; - for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame) - { - if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend()) - { - allExisted = false; - break; - } - } - if (allExisted) - { - captureCompleted = true; - } - } - - if (captureCompleted && !mCaptureDispatched) - { - mCaptureDispatched = true; - captureDispatchable = true; - } - m_locker.unlock(); - - if (captureCompleted && captureDispatchable) - { - XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onCaptureCompleted"); - camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session); - - FireOneCapture(ts); - } - } - else - { - m_locker.lock(); - mCaptureResults.push_back(captureResult); - captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes; - if (captureCompleted && !mCaptureDispatched) - { - mCaptureDispatched = true; - captureDispatchable = true; - } - m_locker.unlock(); - - if (captureCompleted && captureDispatchable) - { - FireBurstCapture(); - } - } - - } + mCaptureTriggered = true; + } } int64_t NdkCamera::GetTimestamp(const ACameraMetadata* result) @@ -2069,18 +1714,18 @@ void NdkCamera::FireOneCapture(uint64_t ts) { std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt; size_t idx = std::distance(mOneFrame.cbegin(), it); - std::shared_ptr result = mCaptureResults[idx]; + auto itResult = mCaptureResultMap.find(it->first); CAPTURE_RESULT captureResult = { 0 }; - EnumCameraResult(result.get(), captureResult); + EnumCameraResult(itResult->second.get(), captureResult); fileName += "_" + mCameraId + "_" + std::to_string(captureResult.aeState) + "_" + std::to_string(idx) + ".jpg"; cv::imwrite(fileName, it->second, params); } } - - #endif - onOneCapture(mCharacteristics, mCaptureResults.back(), mFinalLdr, ts - m_startTime, mOneFrame.back().second); + auto it = mOneFrame.back(); + auto itResult = mCaptureResultMap.find(it.first); + onOneCapture(mCharacteristics, itResult->second, mFinalLdr, ts - m_startTime, it.second); } void NdkCamera::FireBurstCapture() @@ -2095,13 +1740,23 @@ void NdkCamera::FireBurstCapture() std::vector > captureFrames; m_locker.lock(); + ldr = mFinalLdr; if (ldr == 0 && mLdr != ~0) { ldr = mLdr; } - captureResults.swap(mCaptureResults); captureFrames.swap(mCaptureFrames); + for (auto it = captureFrames.cbegin(); it != captureFrames.cend(); ++it) + { + int64_t imgTs = 0; + AImage_getTimestamp(it->get(), &imgTs); + auto itResult = mCaptureResultMap.find(imgTs); + if (itResult != mCaptureResultMap.end()) + { + captureResults.push_back(itResult->second); + } + } m_locker.unlock(); media_status_t mstatus; @@ -2236,7 +1891,7 @@ int32_t NdkCamera::getOutputFormat() const int32_t NdkCamera::getBurstCaptures() const { - return mFinalBurstCaptures; + return m_params.burstCaptures; } void NdkCamera::CreateSession(ANativeWindow* previewWindow, @@ -2537,4 +2192,74 @@ void NdkCamera::EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captur val = {0}; status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val); captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0; -} \ No newline at end of file +} + +void NdkCamera::SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request) +{ + // 1. 设置基础的相机参数 + // ACaptureRequest_setEntry_i32(request, ACAMERA_CONTROL_MODE, 1, ACAMERA_CONTROL_MODE_AUTO); + camera_status_t status; + + // __system_property_set("vendor.mfll.force", "1"); + + +#if 0 + int32_t tagCount = 0; + const uint32_t* tags = nullptr; + ACameraMetadata_getAllTags(characteristics, &tagCount, &tags); + for (int32_t i = 0; i < tagCount; i++) { + if (MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES == tags[i]) + { + ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES Tag ID: 0x%x\n", tags[i]); + } + } + + ACameraMetadata_const_entry entry; + status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry); + if (status == ACAMERA_OK) + { + for (int i = 0; i < entry.count; i++) + { + ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]); + } + } +#endif + + uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY; + status = ACaptureRequest_setEntry_u8(request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set ACAMERA_NOISE_REDUCTION_MODE, status: %d", status); + } + + uint8_t reqRemosaicEnable = 1; + status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable); + + // int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR; + // status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning); + + uint8_t aeMode = MTK_CONTROL_AE_MODE_ON; + status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode); + + // 2. 设置 MediaTek 特定的 MFNR 参数 + // 使用 vendor tag 描述符 + // int32_t mfbMode = MTK_MFNR_FEATURE_MFB_AUTO; // 1 Enable MFNR + int32_t mfbMode = 1; // 1 Enable MFNR + status = ACaptureRequest_setEntry_i32(request, MTK_MFNR_FEATURE_MFB_MODE, 1, &mfbMode); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_MFNR_FEATURE_MFB_MODE, status: %d", status); + } + + if (m_params.compensation != 0) + { + int32_t compensation = m_params.compensation; + status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation); + if (status != ACAMERA_OK) + { + ALOGE("Failed to set MTK_CONTROL_AE_EXPOSURE_COMPENSATION, status: %d", status); + } + } + + +} diff --git a/app/src/main/cpp/camera2/ndkcamera.h b/app/src/main/cpp/camera2/ndkcamera.h index fe4384eb..99cfd891 100644 --- a/app/src/main/cpp/camera2/ndkcamera.h +++ b/app/src/main/cpp/camera2/ndkcamera.h @@ -202,6 +202,9 @@ public: static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height); static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult); +protected: + void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request); + protected: std::mutex m_locker; std::set m_availableCameras; @@ -258,33 +261,21 @@ protected: ACameraOutputTarget* mPreviewOutputTarget; ACaptureSessionOutput* mPreviewSessionOutput; - AImageReader* mImageReader; - ANativeWindow* mImageWindow; - ACameraOutputTarget* mOutputTarget; - ACaptureSessionOutput* mSessionOutput; - - AImageReader* mImageReader2; - ANativeWindow* mImageWindow2; - ACameraOutputTarget* mOutputTarget2; - ACaptureSessionOutput* mSessionOutput2; - std::shared_ptr mCharacteristics; std::vector mCaptureRequests; ACameraCaptureSession* capture_session; - std::shared_ptr mPreviewResults; - std::vector > mCaptureResults; std::map > mCaptureResultMap; uint32_t mLdr; uint32_t mFinalLdr; - uint32_t mFinalBurstCaptures; int32_t mFinalOutputFormat; std::vector > mCaptureFrames; // cv::Mat mOneFrame; std::vector > mOneFrame; std::vector > mRawFrames; + int64_t m_minTimestamp; // AImageReader* image_reader; // ANativeWindow* image_reader_surface;