You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
TermApp/app/src/main/cpp/camera2/ndkcamera.cpp

2942 lines
95 KiB
C++

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#include "ndkcamera.h"
#include <string>
#include <thread>
#include <numeric>
#include <fstream>
#include <android/log.h>
#include <sys/system_properties.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui.hpp>
#include "mat.h"
#include "gpu.h"
#include "Camera2Helper.h"
#include <AndroidHelper.h>
#include <LogThread.h>
#include "DngCreator.h"
#include "mtk_platform_metadata_tag.h"
#include "mtk_metadata_tag.h"
void saveYuvToFile(AImage* image, const std::string& filePath) {
int32_t width, height;
AImage_getWidth(image, &width);
AImage_getHeight(image, &height);
// 获取 YUV 数据
uint8_t* yPlane = nullptr;
uint8_t* uPlane = nullptr;
uint8_t* vPlane = nullptr;
int yLength, uLength, vLength;
AImage_getPlaneData(image, 0, &yPlane, &yLength); // Y 分量
AImage_getPlaneData(image, 1, &uPlane, &uLength); // U 分量
AImage_getPlaneData(image, 2, &vPlane, &vLength); // V 分量
int32_t yStride, uStride, vStride;
AImage_getPlaneRowStride(image, 0, &yStride); // Y 分量的 Stride
AImage_getPlaneRowStride(image, 1, &uStride); // U 分量的 Stride
AImage_getPlaneRowStride(image, 2, &vStride); // V 分量的 Stride
// 打开文件
std::ofstream file(filePath, std::ios::binary);
if (!file.is_open()) {
// 文件打开失败
return;
}
// 写入 Y 分量(逐行复制,处理 Stride
for (int i = 0; i < height; i++) {
file.write(reinterpret_cast<const char*>(yPlane + i * yStride), width);
}
// 写入 U 分量(逐行复制,处理 Stride
for (int i = 0; i < height / 2; i++) {
file.write(reinterpret_cast<const char*>(uPlane + i * uStride), width / 2);
}
// 写入 V 分量(逐行复制,处理 Stride
for (int i = 0; i < height / 2; i++) {
file.write(reinterpret_cast<const char*>(vPlane + i * vStride), width / 2);
}
// 关闭文件
file.close();
}
#ifdef _DEBUG
void Auto_AImage_delete(AImage* image)
{
XYLOG(XYLOG_SEVERITY_DEBUG,"delete image");
AImage_delete(image);
}
#else
#define Auto_AImage_delete AImage_delete
#endif
static void onAvailabilityCallback(void* context, const char* cameraId)
{
((NdkCamera*)context)->onAvailabilityCallback(cameraId);
// ALOGI("CameraStatus::onAvailability CameraId: %s", cameraId);
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onAvailability CameraId: %s", cameraId);
}
static void onUnavailabilityCallback(void* context, const char* cameraId)
{
((NdkCamera*)context)->onUnavailabilityCallback(cameraId);
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onUnavailability CameraId: %s", cameraId);
}
static void onDisconnected(void* context, ACameraDevice* device)
{
((NdkCamera*)context)->onDisconnected(device);
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onDisconnected CameraId: %s", ACameraDevice_getId(device));
}
static void onError(void* context, ACameraDevice* device, int error)
{
((NdkCamera*)context)->onError(device, error);
}
static void onImageAvailable(void* context, AImageReader* reader)
{
NdkCamera* pThis = reinterpret_cast<NdkCamera*>(context);
pThis->onImageAvailable(reader);
}
static void onSessionActive(void* context, ACameraCaptureSession *session)
{
ALOGD("onSessionActive %p", session);
}
static void onSessionReady(void* context, ACameraCaptureSession *session)
{
ALOGD("onSessionReady %p", session);
((NdkCamera*)context)->onSessionReady(session);
}
static void onSessionClosed(void* context, ACameraCaptureSession *session)
{
XYLOG(XYLOG_SEVERITY_DEBUG, "onSessionClosed %p", session);
}
void onCaptureFailed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
{
// XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason);
((NdkCamera*)context)->onCaptureFailed(session, request, failure);
}
void onCaptureSequenceCompleted(void* context, ACameraCaptureSession* session, int sequenceId, int64_t frameNumber)
{
ALOGD("onCaptureSequenceCompleted %p sequenceId=%d frameNumber=%ld", session, sequenceId, frameNumber);
}
void onCaptureSequenceAborted(void* context, ACameraCaptureSession* session, int sequenceId)
{
ALOGD("onCaptureSequenceAborted %p sequenceId=%d", session, sequenceId);
}
void onCaptureProgressed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
((NdkCamera*)context)->onCaptureProgressed(session, request, result);
}
void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
((NdkCamera*)context)->onCaptureCompleted(session, request, result);
}
inline uint8_t GetCaptureIntent(ACameraDevice_request_template templateId)
{
/*
ACAMERA_CONTROL_CAPTURE_INTENT_CUSTOM = 0,
ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW = 1,
ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE = 2,
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD = 3,
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT = 4,
ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG = 5,
ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL = 6,
ACAMERA_CONTROL_CAPTURE_INTENT_MOTION_TRACKING = 7,
*/
uint8_t captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
switch (templateId)
{
case TEMPLATE_PREVIEW: // = 1,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW;
break;
case TEMPLATE_STILL_CAPTURE: // = 2,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
case TEMPLATE_RECORD: // = 3,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
break;
case TEMPLATE_VIDEO_SNAPSHOT: // = 4,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
break;
case TEMPLATE_ZERO_SHUTTER_LAG: // = 5,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
break;
case TEMPLATE_MANUAL: // = 6,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL;
break;
default:
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
}
return captureIntent;
}
NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params)
{
camera_facing = 0;
camera_orientation = 0;
m_params = params;
if (m_params.burstCaptures == 0)
{
m_params.burstCaptures = 1;
}
m_firstFrame = true;
m_photoTaken = false;
mWidth = width;
mHeight = height;
mCaptureTriggered = false;
mFocusTriggered = false;
mCaptureDispatched = false;
maxFrameDuration = 0;
afSupported = false;
awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO;
aeLockAvailable = false;
awbLockAvailable = false;
m_fatalError = false;
sceneModeSupported = false;
numberOfPrecaptures = 0;
m_precaptureStartTime = 0;
m_minTimestamp = 0;
activeArraySize[0] = 0;
activeArraySize[1] = 0;
maxRegions[0] = 0;
maxRegions[1] = 0;
maxRegions[2] = 0;
camera_manager_cb.context = this;
camera_manager_cb.onCameraAvailable = ::onAvailabilityCallback;
camera_manager_cb.onCameraUnavailable = ::onUnavailabilityCallback;
mPreviewImageReader = NULL;
mPreviewImageWindow = NULL;
mPreviewOutputTarget = NULL;
mPreviewSessionOutput = NULL;
camera_device = 0;
mImageReader = NULL;
mImageWindow = NULL;
mOutputTarget = NULL;
mSessionOutput = NULL;
capture_session_output_container = 0;
capture_session = 0;
lightDetected = false;
mStableFrameCount = 0;
mResult = { 0 };
mLdr = ~0;
mFinalLdr = 0;
mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888;
}
NdkCamera::~NdkCamera()
{
XYLOG(XYLOG_SEVERITY_DEBUG, "NdkCamera::~NdkCamera %s", mCameraId.c_str());
close();
}
int NdkCamera::selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY)
{
camera_manager.Create();
// ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb);
// find camera
bool foundIt = false;
// DisplayDimension disp(mWidth, mHeight);
// DisplayDimension foundRes = disp;
camera_status_t status = ACAMERA_OK;
ACameraIdList* cameraIdList = NULL;
status = ACameraManager_getCameraIdList(camera_manager, &cameraIdList);
if (status != ACAMERA_OK)
{
return 1;
}
for (int i = 0; i < cameraIdList->numCameras; ++i)
{
const char *id = cameraIdList->cameraIds[i];
if (cameraId.compare(id) == 0) {
foundIt = true;
break;
}
}
ACameraManager_deleteCameraIdList(cameraIdList);
if (!foundIt)
{
return 2;
}
ACameraMetadata * camera_metadata = 0;
status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata);
if (status != ACAMERA_OK)
{
return 3;
}
{
ACameraMetadata_const_entry e = { 0 };
camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e);
// format of the data: format, width, height, input?, type int32
maxResolutionX = 0;
maxResolutionY = 0;
for (int i = 0; i < e.count; i += 4)
{
int32_t input = e.data.i32[i + 3];
int32_t format = e.data.i32[i + 0];
if (input) continue;
if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/)
{
if (e.data.i32[i + 1] * e.data.i32[i + 2] > (maxResolutionX * maxResolutionY))
{
maxResolutionX = e.data.i32[i + 1];
maxResolutionY = e.data.i32[i + 2];
}
}
}
}
ACameraMetadata_free(camera_metadata);
return 0;
}
int NdkCamera::open(const std::string& cameraId) {
// XYLOG(XYLOG_SEVERITY_DEBUG, "DBG::try open %s", cameraId.c_str());
// camera_facing = _camera_facing;
camera_manager.Create();
// ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb);
// find camera
bool foundIt = false;
DisplayDimension disp(mWidth, mHeight);
DisplayDimension foundRes = disp;
camera_status_t status = ACAMERA_OK;
int32_t previewWidth = 0;
int32_t previewHeight = 0;
ALOGD("Start ACameraManager_getCameraIdList");
{
ACameraIdList *camera_id_list = 0;
for (int retry = 0; retry < 100; retry++)
{
status = ACameraManager_getCameraIdList(camera_manager, &camera_id_list);
AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraIdList return error, %d", status);
for (int i = 0; i < camera_id_list->numCameras; ++i) {
const char *id = camera_id_list->cameraIds[i];
if (cameraId.compare(id) == 0) {
foundIt = true;
break;
}
}
ACameraManager_deleteCameraIdList(camera_id_list);
if (foundIt)
{
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(16));
}
ALOGD("End ACameraManager_getCameraIdList");
// ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb);
if (!foundIt)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Camera Not Found on ID: %s", cameraId.c_str());
return 1;
}
mCameraId = cameraId;
ACameraMetadata * camera_metadata = 0;
status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata);
AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraCharacteristics return error, %d", status);
mCharacteristics = std::shared_ptr<ACameraMetadata>(camera_metadata, ACameraMetadata_free);
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e);
// format of the data: format, width, height, input?, type int32
// DisplayDimension foundRes(4000, 4000);
// DisplayDimension maxJPG(0, 0);
foundIt = false;
DisplayDimension temp;
for (int i = 0; i < e.count; i += 4)
{
int32_t input = e.data.i32[i + 3];
if (input) continue;
int32_t format = e.data.i32[i + 0];
if (format == AIMAGE_FORMAT_RAW16)
{
if (mFinalOutputFormat == AIMAGE_FORMAT_RAW16)
{
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
if (!disp.IsSameRatio(res))
{
if (res.width() >= mWidth && res.height() >= mHeight)
{
temp = res;
}
continue;
}
if (res > disp)
{
foundIt = true;
foundRes = res;
}
}
}
else if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/)
{
if (previewWidth == 0 || previewHeight == 0)
{
previewWidth = e.data.i32[i + 1];
previewHeight = e.data.i32[i + 2];
}
if (mFinalOutputFormat == AIMAGE_FORMAT_YUV_420_888)
{
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
// XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height());
if (!disp.IsSameRatio(res))
{
if (res.width() >= mWidth && res.height() >= mHeight)
{
temp = res;
}
continue;
}
if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp)
{
foundIt = true;
foundRes = res;
}
}
}
}
if (!foundIt)
{
foundRes = temp;
foundIt = true;
}
}
if (!foundIt || foundRes.width() == 0 || foundRes.height() == 0)
{
// ACameraMetadata_free(camera_metadata);
XYLOG(XYLOG_SEVERITY_ERROR, "Camera RES(%d, %d) Not Found on ID: %s", mWidth, mHeight, cameraId.c_str());
return 1;
}
// foundRes.Flip();
// query faceing
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e);
AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_LENS_FACING return error, %d", status);
if (status == ACAMERA_OK)
{
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
}
}
camera_facing = facing;
// query orientation
int orientation = 0;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e);
AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_ORIENTATION return error, %d", status);
if (status == ACAMERA_OK)
{
orientation = (int)e.data.i32[0];
}
}
camera_orientation = orientation;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &e);
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AF_AVAILABLE_MODES, &e);
// AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status);
#ifdef _DEBUG
std::string afModes;
for (int idx = 0; idx < e.count; idx++)
{
afModes += std::to_string(e.data.u8[idx]) + " ";
}
XYLOG(XYLOG_SEVERITY_DEBUG, "Available AF Mode: ", afModes.c_str());
#endif
afSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AF_MODE_OFF));
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AWB_AVAILABLE_MODES, &e);
// AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status);
if (status == ACAMERA_OK)
{
for (int idx = 0; idx < e.count; idx++)
{
if (m_params.awbMode == e.data.u8[idx])
{
awbMode = m_params.awbMode;
break;
}
// unsigned int m = e.data.u8[idx];
// XYLOG(XYLOG_SEVERITY_DEBUG, "Available AWB Mode %u", m);
}
}
// awbSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AWB_MODE_OFF));
}
if (!afSupported)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AF not Supported");
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val);
// AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE return error, %d", status);
if (status == ACAMERA_OK)
{
exposureRange.min_ = val.data.i64[0];
exposureRange.max_ = val.data.i64[1];
}
else
{
ALOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE");
exposureRange.min_ = exposureRange.max_ = 0l;
// exposureTime_ = 0l;
}
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_LOCK_AVAILABLE, &e);
// AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status);
aeLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AE_LOCK_AVAILABLE_TRUE) : false;
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AWB_LOCK_AVAILABLE, &e);
awbLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_TRUE) : false;
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_ZOOM_RATIO_RANGE, &val);
if (status == ACAMERA_OK)
{
float zoomRatioMin = val.data.f[0];
float zoomRatioMax = val.data.f[1];
ALOGI("Zoom Ratio Range: [%f,%f]", zoomRatioMin, zoomRatioMax);
}
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_RANGE, &val);
if (status == ACAMERA_OK)
{
aeCompensationRange.min_ = val.data.i32[0];
aeCompensationRange.max_ = val.data.i32[1];
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_RANGE [%d,%d]", aeCompensationRange.min_, aeCompensationRange.max_);
}
else
{
ALOGW("Unsupported ACAMERA_CONTROL_AE_COMPENSATION_RANGE");
aeCompensationRange.min_ = aeCompensationRange.max_ = 0l;
}
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_STEP, &val);
if (status == ACAMERA_OK)
{
aeCompensationStep = val.data.r[0];
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_STEP num=%d den=%d", aeCompensationStep.numerator, aeCompensationStep.denominator);
}
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION, &e);
maxFrameDuration = (status == ACAMERA_OK) ? *e.data.i64 : 0;
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val);
if (status == ACAMERA_OK)
{
sensitivityRange.min_ = val.data.i32[0];
sensitivityRange.max_ = val.data.i32[1];
}
else
{
ALOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE");
sensitivityRange.min_ = sensitivityRange.max_ = 0;
}
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &val);
if (status == ACAMERA_OK)
{
activeArraySize[0] = val.data.i32[2];
activeArraySize[1] = val.data.i32[3];
}
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_MAX_REGIONS, &val);
if (status == ACAMERA_OK)
{
maxRegions[0] = val.data.i32[0];
maxRegions[1] = val.data.i32[1];
maxRegions[2] = val.data.i32[2];
}
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e);
if (status == ACAMERA_OK)
{
for (int i = 0; i < e.count; i++)
{
if (m_params.sceneMode == e.data.u8[i])
{
sceneModeSupported = true;
break;
}
}
}
}
// ACameraMetadata_free(camera_metadata);
}
// open camera
{
ACameraDevice_StateCallbacks camera_device_state_callbacks;
camera_device_state_callbacks.context = this;
camera_device_state_callbacks.onDisconnected = ::onDisconnected;
camera_device_state_callbacks.onError = ::onError;
status = ACameraManager_openCamera(camera_manager, cameraId.c_str(), &camera_device_state_callbacks, &camera_device);
if (status != ACAMERA_OK)
{
if (status == ACAMERA_ERROR_MAX_CAMERA_IN_USE)
{
m_fatalError = true;
}
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to open camera %s res=%d", cameraId.c_str(), status);
return 1;
}
}
XYLOG(XYLOG_SEVERITY_DEBUG, "CAM Open %s Orientation=%d width=%d height=%d", cameraId.c_str(), camera_orientation, foundRes.width(), foundRes.height());
status = ACaptureSessionOutputContainer_create(&capture_session_output_container);
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
{
burstCaptures = 1;
}
// setup imagereader and its surface
media_status_t mstatus = AImageReader_new(previewWidth, previewHeight, AIMAGE_FORMAT_YUV_420_888, 4, &mPreviewImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new preview, status=%d", status);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mPreviewImageReader, &listener);
mstatus = AImageReader_getWindow(mPreviewImageReader, &mPreviewImageWindow);
ANativeWindow_acquire(mPreviewImageWindow);
}
status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget);
status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 1, &mImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new, status=%d", status);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mImageReader, &listener);
mstatus = AImageReader_getWindow(mImageReader, &mImageWindow);
ANativeWindow_acquire(mImageWindow);
}
status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraOutputTarget_create, status=%d", status);
status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput);
AASSERT(status == ACAMERA_OK, "Failed to call ACaptureSessionOutput_create, status=%d", status);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput);
CaptureRequest *request = CreateRequest(true);
mCaptureRequests.push_back(request);
// capture session
ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks;
camera_capture_session_state_callbacks.context = this;
camera_capture_session_state_callbacks.onActive = onSessionActive;
camera_capture_session_state_callbacks.onReady = ::onSessionReady;
camera_capture_session_state_callbacks.onClosed = onSessionClosed;
status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session);
if (status != ACAMERA_OK)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to call ACameraDevice_createCaptureSession, status=%d", status);
return 1;
}
ACameraCaptureSession_captureCallbacks capture_session_capture_callbacks;
capture_session_capture_callbacks.context = this;
capture_session_capture_callbacks.onCaptureStarted = 0;
capture_session_capture_callbacks.onCaptureProgressed = ::onCaptureProgressed;
capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted;
capture_session_capture_callbacks.onCaptureFailed = ::onCaptureFailed;
capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted;
capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted;
capture_session_capture_callbacks.onCaptureBufferLost = 0;
status = ACameraCaptureSession_setRepeatingRequest(capture_session, &capture_session_capture_callbacks, 1, &(mCaptureRequests[PREVIEW_REQUEST_IDX]->request), &(mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId));
AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_setRepeatingRequest, status=%d", status);
ALOGW("Preview Request: seqId=%d", mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId);
m_startTime = GetMicroTimeStamp();
m_precaptureStartTime = m_startTime;
return status == ACAMERA_OK ? 0 : 1;
}
NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest, int32_t sensitivity/* = -1*/)
{
camera_status_t status = ACAMERA_OK;
CaptureRequest *request = new CaptureRequest();
std::memset(request, 0, sizeof(CaptureRequest));
request->pThis = this;
request->imageReader = isPreviewRequest ? mPreviewImageReader : mImageReader;
request->imageWindow = isPreviewRequest ? mPreviewImageWindow : mImageWindow;
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget;
request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : mSessionOutput;
request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
// request->templateId = (ACameraDevice_request_template)m_params.requestTemplate;
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : GetCaptureIntent((ACameraDevice_request_template)m_params.requestTemplate);
#if 0
bool forceToPreview = false;
if (!isPreviewRequest && sensitivity >= 150 && sensitivity <= 400 && (m_params.burstRawCapture == 2 || m_params.burstRawCapture == 3))
{
if (request->templateId == TEMPLATE_STILL_CAPTURE)
{
request->templateId = TEMPLATE_PREVIEW;
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW;
forceToPreview = true;
XYLOG(XYLOG_SEVERITY_WARNING, "Force to use preview mode to avoid pink issue ISO=%d CameraId=%s", sensitivity, mCameraId.c_str());
}
}
#endif
// capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraDevice_createCaptureRequest, status=%d", status);
ACaptureRequest_setUserContext(request->request, request);
// uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO;
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_FLASH_MODE, 1, &flashMode);
uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode);
uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_EDGE_MODE, 1, &edgeMode);
if (afSupported && m_params.autoFocus)
{
if (!m_params.zoom)
{
if (maxRegions[2] > 0)
{
int32_t centerX = activeArraySize[0] >> 1;
int32_t centerY = activeArraySize[1] >> 1;
int32_t sizeX = activeArraySize[0] >> 4;
int32_t sizeY = activeArraySize[1] >> 4;
int32_t afRegions[] = { centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000 };
// status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions);
if (status == ACAMERA_OK)
{
#ifdef _DEBUG
int aa = 0;
#endif
}
}
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_MODE, 1, &afMode);
#if 0
// For ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE, we SHOULD NOT call ACAMERA_CONTROL_AF_TRIGGER_START
// Refer to: https://source.android.google.cn/docs/core/camera/camera3_3Amodes?hl=zh-cn
if (isPreviewRequest)
{
// uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
// trig = ACAMERA_CONTROL_AF_TRIGGER_START;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
}
#endif
}
}
else
{
uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
}
if (m_params.sceneMode != 0)
{
uint8_t sceneMode = m_params.sceneMode;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode);
}
if (m_params.autoExposure)
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
// ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_);
if (m_params.minFps != 0)
{
int32_t fpsRange[2] = {m_params.minFps, 60};
// status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_TARGET_FPS_RANGE, 2, fpsRange);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set ACAMERA_CONTROL_AE_TARGET_FPS_RANGE: %d", status);
}
}
if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;
if (compensation < aeCompensationRange.min_)
{
compensation = aeCompensationRange.min_;
}
if (compensation > aeCompensationRange.max_)
{
compensation = aeCompensationRange.max_;
}
// int32_t aeCompensation = aeCompensationRange.max_;
status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation);
if (status != ACAMERA_OK)
{
int aa = 0;
}
}
if (maxRegions[0] > 0)
{
int32_t aeRegions[] = { 0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000 };
// status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions);
if (status == ACAMERA_OK)
{
#ifdef _DEBUG
int aa = 0;
#endif
}
}
if (isPreviewRequest)
{
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE");
mResult.aeLockSetted = 1;
}
else
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported");
}
uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status);
m_precaptureStartTime = m_startTime;
// ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff);
}
}
else
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
if (m_params.sensitivity > 0)
{
int32_t sensitivity = m_params.sensitivity;
status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
}
if (m_params.exposureTime > 0)
{
int64_t exposureTime = m_params.exposureTime;
status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime);
}
int64_t frameDuration = maxFrameDuration / 2;
// status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration);
}
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode);
if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED))
{
uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock);
mResult.awbLockSetted = 1;
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState);
}
#if 0
uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode);
uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker);
#endif
if (m_params.zoom)
{
float zoomRatio = m_params.zoomRatio;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_float(request->request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio);
if (status != ACAMERA_OK)
{
}
}
status = ACaptureRequest_addTarget(request->request, request->imageTarget);
AASSERT(status == ACAMERA_OK, "Failed to call ACaptureRequest_addTarget, status=%d", status);
// status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput);
// status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput);
{
#if 0
uint8_t colorMode = ACAMERA_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_COLOR_CORRECTION_MODE, 1, &colorMode);
// 设置均衡的RGGB增益
float rggbGains[4] = {1.0f, 1.0f, 1.0f, 1.0f};
status = ACaptureRequest_setEntry_float(request->request, ACAMERA_COLOR_CORRECTION_GAINS, 4, rggbGains);
// 设置单位色彩变换矩阵
float colorMatrix[9] = {
1.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 1.0f
};
status = ACaptureRequest_setEntry_float(request->request, ACAMERA_COLOR_CORRECTION_TRANSFORM, 9, colorMatrix);
#endif
if (m_params.burstRawCapture == 1)
{
SetupHDR(mCharacteristics.get(), request->request, sensitivity);
}
if (m_params.burstRawCapture == 2)
{
SetupMFNR(mCharacteristics.get(), request->request, false, sensitivity);
}
else if (m_params.burstRawCapture == 3)
{
SetupMFNR(mCharacteristics.get(), request->request, true, sensitivity);
}
else if (m_params.burstRawCapture == 4)
{
Setup3DNR(mCharacteristics.get(), request->request, sensitivity);
}
else if (m_params.burstRawCapture == 5)
{
SetupTonemapCurve(mCharacteristics.get(), request->request);
}
}
return request;
}
void NdkCamera::DestroyRequest(CaptureRequest* request)
{
}
void NdkCamera::close()
{
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str());
camera_status_t res = ACAMERA_OK;
/*
if (mPreviewImageReader != NULL)
{
AImageReader_setImageListener(mPreviewImageReader, NULL);
}
if (mImageReader != NULL)
{
AImageReader_setImageListener(mImageReader, NULL);
}
if (mImageReader2 != NULL)
{
AImageReader_setImageListener(mImageReader2, NULL);
}
*/
mPreviewResults.reset();
mCaptureResults.clear();
mCaptureFrames.clear();
mCaptureResultMap.clear();
if ((ACameraManager *)camera_manager != NULL)
{
// res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb);
}
if (capture_session)
{
res = ACameraCaptureSession_stopRepeating(capture_session);
std::this_thread::sleep_for(std::chrono::milliseconds(512));
ACameraCaptureSession_close(capture_session);
capture_session = 0;
}
for (auto it = mCaptureRequests.begin(); it != mCaptureRequests.end(); ++it)
{
CaptureRequest* request = *it;
if (request->request)
{
res = ACaptureRequest_removeTarget(request->request, request->imageTarget);
ACaptureRequest_free(request->request);
request->request = 0;
}
/*
if (request->imageTarget)
{
ACameraOutputTarget_free(request->imageTarget);
request->imageTarget = 0;
}
*/
delete request;
}
mCaptureRequests.clear();
if (mPreviewOutputTarget != NULL)
{
ACameraOutputTarget_free(mPreviewOutputTarget);
mPreviewOutputTarget = 0;
}
if (mPreviewImageWindow != NULL)
{
ANativeWindow_release(mPreviewImageWindow);
mPreviewImageWindow = 0;
}
if (mPreviewImageReader != NULL)
{
#ifdef _DEBUG
ALOGD("Will Free mPreviewImageReader");
#endif
AImage* image = NULL;
media_status_t mstatus;
while ((mstatus = AImageReader_acquireNextImage(mPreviewImageReader, &image)) == AMEDIA_OK)
{
AImage_delete(image);
image = NULL;
}
AImageReader_setImageListener(mPreviewImageReader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mPreviewImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mPreviewImageReader = 0;
#ifdef _DEBUG
ALOGD("After Free mPreviewImageReader");
#endif
}
if (mOutputTarget != NULL)
{
ACameraOutputTarget_free(mOutputTarget);
mOutputTarget = 0;
}
if (mImageWindow != NULL)
{
ANativeWindow_release(mImageWindow);
mImageWindow = 0;
}
if (mImageReader != NULL)
{
#ifdef _DEBUG
ALOGD("Will Free mImageReader");
#endif
AImage* image = NULL;
media_status_t mstatus;
while ((mstatus = AImageReader_acquireNextImage(mImageReader, &image)) == AMEDIA_OK)
{
AImage_delete(image);
image = NULL;
}
AImageReader_setImageListener(mImageReader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mImageReader = 0;
#ifdef _DEBUG
ALOGD("After Free mImageReader");
#endif
}
if (mPreviewSessionOutput != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mPreviewSessionOutput);
}
ACaptureSessionOutput_free(mPreviewSessionOutput);
mPreviewSessionOutput = 0;
}
if (mSessionOutput != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput);
}
ACaptureSessionOutput_free(mSessionOutput);
mSessionOutput = 0;
}
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_free(capture_session_output_container);
capture_session_output_container = 0;
}
if (camera_device)
{
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::close device %s, %p", mCameraId.c_str(), camera_device);
ACameraDevice_close(camera_device);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed device %s, %p", mCameraId.c_str(), camera_device);
camera_device = 0;
}
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed %s", mCameraId.c_str());
}
void NdkCamera::onImageAvailable(AImageReader* reader)
{
AImage* image = 0;
media_status_t mstatus = AMEDIA_OK;
if (reader == mPreviewImageReader)
{
mstatus = AImageReader_acquireLatestImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Preview AImageReader_acquireLatestImage error: %d", mstatus);
}
return;
}
if (!mCaptureTriggered)
{
uint8_t* y_data = 0;
int y_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
#if __cplusplus >= 201703L
uint64_t avgY = std::reduce(y_data, y_data + y_len, 0);
#else
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif
avgY = avgY / (uint64_t)y_len;
m_locker.lock();
mLdr = (uint8_t)avgY;
m_locker.unlock();
}
AImage_delete(image);
return;
}
else
{
uint32_t burstCaptures = getBurstCaptures();
uint64_t ts = GetMicroTimeStamp();
size_t expectedTimes = mCaptureRequests.size() - 1;
if (burstCaptures == 0)
{
burstCaptures = 1;
}
if (true)
{
while (1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus);
}
}
break;
}
int32_t format;
mstatus = AImage_getFormat(image, &format);
cv::Mat frame;
if (format == AIMAGE_FORMAT_YUV_420_888)
{
int32_t width;
int32_t height;
mstatus = AImage_getWidth(image, &width);
mstatus = AImage_getHeight(image, &height);
int32_t y_pixelStride = 0;
int32_t u_pixelStride = 0;
int32_t v_pixelStride = 0;
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
int32_t y_rowStride = 0;
int32_t u_rowStride = 0;
int32_t v_rowStride = 0;
AImage_getPlaneRowStride(image, 0, &y_rowStride);
AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t* y_data = 0;
uint8_t* u_data = 0;
uint8_t* v_data = 0;
int y_len = 0;
int u_len = 0;
int v_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
{
// already nv21
ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation,
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame);
}
else
{
// construct nv21
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
{
// Y
uint8_t* yptr = nv21;
for (int y = 0; y < height; y++)
{
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
for (int x = 0; x < width; x++)
{
yptr[0] = y_data_ptr[0];
yptr++;
y_data_ptr += y_pixelStride;
}
}
// UV
uint8_t* uvptr = nv21 + width * height;
for (int y = 0; y < height / 2; y++)
{
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
for (int x = 0; x < width / 2; x++)
{
uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0];
uvptr += 2;
v_data_ptr += v_pixelStride;
u_data_ptr += u_pixelStride;
}
}
}
ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation,
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, frame);
delete[] nv21;
}
}
m_photoTaken = true;
int64_t frameTs = 0;
mstatus = AImage_getTimestamp(image, &frameTs);
#ifdef OUTPUT_DBG_INFO
#if 0
if (mWidth == 1920)
{
std::string dt = FormatLocalDateTime("%d%02d%02d%02d%02d%02d", time(NULL));
std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt;
fileName += "_" + mCameraId + std::to_string(frameTs) + ".yuv";
saveYuvToFile(image, fileName.c_str());
}
#endif
#endif
AImage_delete(image);
bool captureCompleted = false;
bool captureDispatchable = false;
m_locker.lock();
if (!frame.empty())
{
mOneFrame.push_back(std::make_pair<>(frameTs, frame));
}
if (mOneFrame.size() >= expectedTimes)
{
bool allExisted = true;
for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame)
{
if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend())
{
allExisted = false;
break;
}
}
if (allExisted)
{
captureCompleted = true;
}
}
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable");
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
FireOneCapture(ts);
// onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame);
break;
}
}
}
else
{
while (1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus);
}
}
break;
}
m_photoTaken = true;
m_locker.lock();
mCaptureFrames.push_back(std::shared_ptr<AImage>(image, Auto_AImage_delete));
m_locker.unlock();
ALOGD("Capture Image Received");
}
bool captureCompleted = false;
bool captureDispatchable = false;
m_locker.lock();
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
FireBurstCapture();
}
}
}
}
void NdkCamera::on_error(const std::string& msg)
{
}
void NdkCamera::onDisconnected(ACameraDevice* device)
{
}
bool NdkCamera::on_image(cv::Mat rgb)
{
return false;
}
bool NdkCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, uint32_t duration, cv::Mat rgb)
{
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames)
{
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames)
{
return false;
}
void NdkCamera::on_image(const unsigned char* nv21, int nv21_width, int nv21_height)
{
// ALOGW("nv21 size: %d x %d", nv21_width, nv21_height);
// rotate nv21
int w = 0;
int h = 0;
int rotate_type = 0;
cv::Mat nv21_rotated;
const unsigned char* yuv420data = nv21;
// TODO !!!???
/*
if (camera_->GetSensorOrientation(&facing, &angle)) {
if (facing == ACAMERA_LENS_FACING_FRONT) {
imageRotation = (angle + rotation_) % 360;
imageRotation = (360 - imageRotation) % 360;
} else {
imageRotation = (angle - rotation_ + 360) % 360;
}
}
*/
int orgWidth = mWidth;
int orgHeight = mHeight;
// int co = camera_orientation > 0 ? camera_orientation + 90 : camera_orientation;
if (m_params.orientation != 0)
{
int co = 0;
if (camera_facing == ACAMERA_LENS_FACING_FRONT)
{
co = (camera_orientation + (m_params.orientation - 1) * 90) % 360;
co = (360 - co) % 360;
}
else
{
co = (camera_orientation - (m_params.orientation - 1) * 90 + 360) % 360;
}
XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing);
// int co = 0;
if (co == 0)
{
w = nv21_width;
h = nv21_height;
rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 2 : 1;
}
else if (co == 90)
{
w = nv21_height;
h = nv21_width;
orgWidth = mHeight;
orgHeight = mWidth;
rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 5 : 6;
}
else if (co == 180)
{
w = nv21_width;
h = nv21_height;
rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 4 : 3;
}
else if (co == 270)
{
w = nv21_height;
h = nv21_width;
orgWidth = mHeight;
orgHeight = mWidth;
rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 7 : 8;
}
nv21_rotated.create(h + h / 2, w, CV_8UC1);
ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type);
yuv420data = nv21_rotated.data;
}
else
{
w = nv21_width;
h = nv21_height;
XYLOG(XYLOG_SEVERITY_DEBUG, "NO Orientation Facing=%d", camera_facing);
}
// nv21_rotated to rgb
cv::Mat rgb;
if (w == orgWidth && h == orgHeight)
{
rgb.create(h, w, CV_8UC3);
// ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data);
ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data);
}
else
{
cv::Mat org(h, w, CV_8UC3);
ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data);
if (w * orgHeight == h * orgWidth) // Same Ratio
{
cv::resize(org, rgb, cv::Size(orgWidth, orgHeight));
}
else
{
// Crop image
if (w > orgWidth && h >= orgHeight)
{
int left = (w - orgWidth) / 2;
int top = (h - orgHeight) / 2;
rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth));
}
else
{
rgb = org;
}
}
}
on_image(rgb);
}
void NdkCamera::onSessionReady(ACameraCaptureSession *session)
{
}
void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
}
void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
void* context = NULL;
ACaptureRequest_getUserContext(request, &context);
CaptureRequest* pCaptureRequest = reinterpret_cast<CaptureRequest *>(context);
if (pCaptureRequest->request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request)
{
if (mCaptureTriggered)
{
return;
}
bool readyForCapture = true;
camera_status_t status = ACAMERA_ERROR_BASE;
unsigned long long ts = GetMicroTimeStamp();
uint8_t aeState = ACAMERA_CONTROL_AE_STATE_INACTIVE;
uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE;
uint8_t afState = ACAMERA_CONTROL_AF_STATE_INACTIVE;
int32_t sensitivity = -1;
ACameraMetadata_const_entry val = { 0 };
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val);
aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val);
awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val);
afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val);
sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : -1;
// XYLOG(XYLOG_SEVERITY_DEBUG, "Preview State AFS=%u AES=%u AWBS=%u Time=%u", (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime));
// Check if timeout
if (ts - m_startTime < m_params.focusTimeout)
{
if (afSupported && (m_params.autoFocus != 0))
{
/*
if (afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED)
{
// Will lock it
if (mResult.afLockSetted == 0)
{
uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
mResult.afLockSetted = 1;
//XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger AF AFS=%u", (uint32_t)mResult.afState);
readyForCapture = false;
}
}
*/
if (afState != ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
afState != ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED/* &&
afState != ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED*/)
// if (afState != ACAMERA_CONTROL_AF_STATE_INACTIVE)
{
//XYLOG(XYLOG_SEVERITY_DEBUG, "AF Enabled And Focused");
readyForCapture = false;
}
}
if (m_params.autoExposure != 0)
{
if (aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE)
{
#if 0
uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
//XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)aeState);
AASSERT(status == ACAMERA_OK, "Failed to call PRECAPTURE_TRIGGER, status=%d", status);
#endif
readyForCapture = false;
numberOfPrecaptures = 0;
m_precaptureStartTime = ts;
}
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
{
if (aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) {
readyForCapture = false;
}
else
{
#if 0
//XYLOG(XYLOG_SEVERITY_DEBUG, "AE Locked");
#endif
}
}
else
{
if (aeState != ACAMERA_CONTROL_AE_STATE_CONVERGED &&
aeState != ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED &&
aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) {
readyForCapture = false;
}
else {
#if 0
XYLOG(XYLOG_SEVERITY_DEBUG, "AWB CONVERGED Or Locked");
#endif
}
}
}
if (awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO)
{
if (awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) {
if (awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED)
{
readyForCapture = false;
}
else
{
#if 0
//XYLOG(XYLOG_SEVERITY_DEBUG, "AWB Locked");
#endif
}
}
else
{
if (awbState != ACAMERA_CONTROL_AWB_STATE_CONVERGED &&
awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED)
{
readyForCapture = false;
}
else
{
#if 0
XYLOG(XYLOG_SEVERITY_DEBUG, "AE CONVERGED Or Locked");
#endif
}
}
}
}
else
{
#if 0
XYLOG(XYLOG_SEVERITY_WARNING, "Prepare Capture Timeout for 3A And will Capture AFS=%u AES=%u AWBS=%u Time=%u",
(unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime));
#endif
}
if (readyForCapture)
{
mStableFrameCount++;
if (mStableFrameCount >= 3) { // 确保连续3帧稳定
// 进行实际的静态抓拍
mStableFrameCount = 0;
} else {
readyForCapture = false; // 继续等待
}
}
if (readyForCapture/* && mCaptureRequests.size() > 1*/)
{
// Must update mFinalLdr As getBurstCaptures getOutputFormat depends mFinalLdr
if (mLdr != ~0)
{
mFinalLdr = mLdr;
}
XYLOG(XYLOG_SEVERITY_INFO, "Ready for Capture AFS=%u AES=%u AWBS=%u LDR=%u ISO=%d Time=%u",
(unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, mFinalLdr, sensitivity, (unsigned int)(ts - m_startTime));
uint32_t burstCaptures = getBurstCaptures();
std::vector<ACaptureRequest*> requests;
int sequenceId = 0;
requests.reserve(burstCaptures);
for (int idx = 0; idx < burstCaptures; idx++)
{
CaptureRequest* request = CreateRequest(false, sensitivity);
mCaptureRequests.push_back(request);
// CopyPreviewRequest(mCaptureRequests[idx]->request, result);
requests.push_back(request->request);
}
#if 0
if (m_params.customHdr && burstCaptures > 1)
{
int32_t hdrStep = m_params.hdrStep;
if (hdrStep == 0)
{
hdrStep = 1;
}
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val);
int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val);
int32_t sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
XYLOG(XYLOG_SEVERITY_INFO, "HDR: Base Exp=%lld ISO=%d", exTime / 1000, sensitivity);
if (exTime != -1 && sensitivity > 0)
{
uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
// for (int idx = 0; idx < burstCaptures; idx++)
{
ACaptureRequest_setEntry_u8(requests[burstCaptures - 1], ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff);
// int64_t expt = (idx == 0) ? exTime : (exTime * (hdrStep + idx));
int64_t expt = exTime + ((exTime * hdrStep) >> 1);
ACaptureRequest_setEntry_i64(requests[burstCaptures - 1], ACAMERA_SENSOR_EXPOSURE_TIME, 1, &expt);
int32_t newSensitivity = sensitivity - ((sensitivity * hdrStep) >> 2);
if (m_params.sensitivity != 0)
{
newSensitivity = m_params.sensitivity;
}
if (newSensitivity < sensitivityRange.min_)
{
newSensitivity = sensitivityRange.min_;
}
ACaptureRequest_setEntry_i32(requests[burstCaptures - 1], ACAMERA_SENSOR_SENSITIVITY, 1, &newSensitivity);
}
}
}
#endif
// ALOGW("Will Stop Repeating Request");
// status = ACameraCaptureSession_stopRepeating(capture_session);
// ALOGW("Finished Repeating Request");
ACameraCaptureSession_captureCallbacks capture_session_capture_cb;
capture_session_capture_cb.context = this;
capture_session_capture_cb.onCaptureStarted = 0;
capture_session_capture_cb.onCaptureProgressed = ::onCaptureProgressed;
capture_session_capture_cb.onCaptureCompleted = ::onCaptureCompleted;
capture_session_capture_cb.onCaptureFailed = ::onCaptureFailed;
capture_session_capture_cb.onCaptureSequenceCompleted = onCaptureSequenceCompleted;
capture_session_capture_cb.onCaptureSequenceAborted = onCaptureSequenceAborted;
capture_session_capture_cb.onCaptureBufferLost = 0;
int numberOfRequests = requests.size();
status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb,
numberOfRequests, &requests[0], &sequenceId);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_capture, status=%d", status);
ALOGW("Capture num = %d sequenceId=%d", numberOfRequests, sequenceId);
for (int idx = 1; idx < mCaptureRequests.size(); idx++)
{
mCaptureRequests[idx]->sessionSequenceId = sequenceId;
}
mCaptureTriggered = true;
}
}
else
{
#ifdef _DEBUG
uint64_t tid = getThreadIdOfULL();
ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid);
#endif
unsigned long long ts = GetMicroTimeStamp();
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
bool captureCompleted = false;
bool captureDispatchable = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
int64_t resultTimestamp = GetTimestamp(result);
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
if (true)
{
m_locker.lock();
mCaptureResults.push_back(captureResult);
mCaptureResultMap[resultTimestamp] = captureResult;
if (mOneFrame.size() >= expectedTimes)
{
bool allExisted = true;
for (auto itFrame = mOneFrame.cbegin(); itFrame != mOneFrame.cend(); ++itFrame)
{
if (mCaptureResultMap.find(itFrame->first) == mCaptureResultMap.cend())
{
allExisted = false;
break;
}
}
if (allExisted)
{
captureCompleted = true;
}
}
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onCaptureCompleted");
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
FireOneCapture(ts);
}
}
else
{
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes;
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted && captureDispatchable)
{
FireBurstCapture();
}
}
}
}
int64_t NdkCamera::GetTimestamp(const ACameraMetadata* result)
{
ACameraMetadata_const_entry entry;
camera_status_t status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_TIMESTAMP, &entry);
if (status == ACAMERA_OK && entry.count > 0) {
return entry.data.i64[0];
}
return 0;
}
void NdkCamera::FireOneCapture(uint64_t ts)
{
#ifdef OUTPUT_DBG_INFO
#if 0
if (mWidth == 1920 && mOneFrame.size() > 1)
{
std::string dt = FormatLocalDateTime("%d%02d%02d%02d%02d%02d", ts / 1000);
std::vector<int> params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
params.push_back(50);
for (auto it = mOneFrame.cbegin(); it != mOneFrame.cend(); ++it)
{
std::string fileName = "/sdcard/com.xypower.mpapp/tmp/" + dt;
size_t idx = std::distance(mOneFrame.cbegin(), it);
std::shared_ptr<ACameraMetadata> result = mCaptureResults[idx];
CAPTURE_RESULT captureResult = { 0 };
EnumCameraResult(result.get(), captureResult);
fileName += "_" + mCameraId + "_" + std::to_string(captureResult.aeState) + "_" + std::to_string(idx) + ".jpg";
cv::imwrite(fileName, it->second, params);
}
}
#endif
#endif
onOneCapture(mCharacteristics, mCaptureResultMap[mOneFrame.back().first], mFinalLdr, ts - m_startTime, mOneFrame.back().second);
}
void NdkCamera::FireBurstCapture()
{
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
unsigned long long ts = GetMicroTimeStamp();
size_t expectedTimes = getBurstCaptures();
std::vector<std::shared_ptr<ACameraMetadata> > captureResults;
uint32_t ldr;
std::vector<std::shared_ptr<AImage> > captureFrames;
m_locker.lock();
ldr = mFinalLdr;
if (ldr == 0 && mLdr != ~0)
{
ldr = mLdr;
}
captureResults.swap(mCaptureResults);
captureFrames.swap(mCaptureFrames);
m_locker.unlock();
media_status_t mstatus;
std::vector<std::vector<uint8_t> > frames;
for (int idx = 0; idx < expectedTimes; idx++)
{
std::shared_ptr<AImage> spImage = captureFrames[idx];
std::shared_ptr<ACameraMetadata> spResult = captureResults[idx];
auto it = frames.insert(frames.end(), std::vector<uint8_t>());
int32_t width = 0;
int32_t height = 0;
mstatus = AImage_getWidth(spImage.get(), &width);
mstatus = AImage_getHeight(spImage.get(), &height);
int32_t planeCount = 0;
mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
AASSERT(mstatus == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *planeData = NULL;
int planeDataLen = 0;
mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
ALOGD("Start Converting Dng");
DngCreator dngCreator(mCharacteristics.get(), spResult.get());
dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0);
ALOGD("End Converting Dng");
}
captureFrames.clear();
onBurstCapture(mCharacteristics, captureResults, ldr, ts - m_startTime, frames);
#ifdef _DEBUG
ALOGD("Frames Size: %u", (uint32_t)frames.size());
#endif
}
void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult)
{
camera_status_t status = ACAMERA_ERROR_BASE;
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(previewResult, ACAMERA_SENSOR_EXPOSURE_TIME, &val);
int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1;
val = {0};
status = ACameraMetadata_getConstEntry(previewResult, ACAMERA_SENSOR_SENSITIVITY, &val);
int32_t sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
if (exTime != -1 && sensitivity != 0)
{
uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff);
ACaptureRequest_setEntry_i64(request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exTime);
ACaptureRequest_setEntry_i32(request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
}
/*
val = { 0 };
float focusDistance = NAN;
status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val);
if (status == ACAMERA_OK)
{
focusDistance = *val.data.f;
}
*/
// 添加AWB和色彩校正参数的复制
ACameraMetadata_const_entry entry;
if (ACameraMetadata_getConstEntry(previewResult, ACAMERA_COLOR_CORRECTION_GAINS, &entry) == ACAMERA_OK) {
ACaptureRequest_setEntry_float(request, ACAMERA_COLOR_CORRECTION_GAINS, entry.count, entry.data.f);
}
if (ACameraMetadata_getConstEntry(previewResult, ACAMERA_COLOR_CORRECTION_TRANSFORM, &entry) == ACAMERA_OK) {
ACaptureRequest_setEntry_float(request, ACAMERA_COLOR_CORRECTION_TRANSFORM, entry.count, entry.data.f);
}
}
void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
{
bool isPreview = (request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request);
XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d CameraId=%s PhotoTaken=%d Preview=%d", session, request, failure->reason, mCameraId.c_str(), m_photoTaken ? 1 : 0, isPreview ? 1 : 0);
if (isPreview)
{
return;
}
char msg[64] = { 0 };
snprintf(msg, sizeof(msg), "CaptureFailed reason=%d PhotoTaken=%d", failure->reason, m_photoTaken ? 1 : 0);
if (!m_photoTaken)
{
on_error(msg);
}
}
void NdkCamera::onError(ACameraDevice* device, int error)
{
if (ACAMERA_ERROR_CAMERA_DEVICE == error)
{
}
XYLOG(XYLOG_SEVERITY_ERROR, "CameraStatus::onError CameraId: %s err=%d PhotoTaken=%d", ACameraDevice_getId(device), error, m_photoTaken ? 1 : 0);
if (!m_photoTaken)
{
std::string msg = "NdkCamera error code=" + std::to_string(error);
on_error(msg);
}
}
void NdkCamera::onAvailabilityCallback(const char* cameraId)
{
std::string s(cameraId);
m_locker.lock();
m_availableCameras.insert(s);
m_locker.unlock();
}
void NdkCamera::onUnavailabilityCallback(const char* cameraId)
{
std::string s(cameraId);
m_locker.lock();
m_availableCameras.erase(s);
m_locker.unlock();
}
bool NdkCamera::IsCameraAvailable(const std::string& cameraId)
{
bool existed = false;
m_locker.lock();
existed = (m_availableCameras.find(cameraId) != m_availableCameras.cend());
m_locker.unlock();
return existed;
}
int32_t NdkCamera::getOutputFormat() const
{
return mFinalOutputFormat;
// return m_params.burstRawCapture ? AIMAGE_FORMAT_RAW16 : AIMAGE_FORMAT_YUV_420_888;
}
int32_t NdkCamera::getBurstCaptures() const
{
return m_params.burstCaptures;
}
void NdkCamera::CreateSession(ANativeWindow* previewWindow,
ANativeWindow* jpgWindow, bool manualPreview,
int32_t imageRotation, int32_t width, int32_t height) {
media_status_t status;
/*
// Create output from this app's ANativeWindow, and add into output container
requests[PREVIEW_REQUEST_IDX].outputNativeWindow = previewWindow;
requests[PREVIEW_REQUEST_IDX].templateId = TEMPLATE_PREVIEW;
//requests_[JPG_CAPTURE_REQUEST_IDX].outputNativeWindow_ = jpgWindow;
//requests_[JPG_CAPTURE_REQUEST_IDX].template_ = TEMPLATE_STILL_CAPTURE;
ACaptureSessionOutputContainer_create(&capture_session_output_container);
for (auto& req : requests) {
if (!req.outputNativeWindow) continue;
ANativeWindow_acquire(req.outputNativeWindow);
ACaptureSessionOutput_create(req.outputNativeWindow, &req.sessionOutput);
ACaptureSessionOutputContainer_add(capture_session_output_container, req.sessionOutput);
ACameraOutputTarget_create(req.outputNativeWindow, &req.target);
ACameraDevice_createCaptureRequest(camera_device, req.templateId, &req.request);
ACaptureRequest_addTarget(req.request, req.target);
// To capture images
media_status_t mstatus = AImageReader_new(width, height, getOutputFormat(), 1, &req.imageReader);
if (mstatus == AMEDIA_OK) {
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(req.imageReader, &listener);
}
// req.imageReader = createJpegReader();
status = AImageReader_getWindow(req.imageReader, &req.imageWindow);
ANativeWindow_acquire(req.outputNativeWindow);
ACameraOutputTarget_create(req.imageWindow, &req.imageTarget);
ACaptureRequest_addTarget(req.request, req.imageTarget);
ACaptureSessionOutput_create(req.imageWindow, &req.imageOutput);
ACaptureSessionOutputContainer_add(capture_session_output_container, req.imageOutput);
//ACameraOutputTarget_create(imageWindow, &imageTarget);
//ACaptureRequest_addTarget(req.request_, imageTarget);
//ACaptureSessionOutput_create(imageWindow, &imageOutput);
//ACaptureSessionOutputContainer_add(outputContainer_, imageOutput);
}
// Create a capture session for the given preview request
ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks;
camera_capture_session_state_callbacks.context = this;
camera_capture_session_state_callbacks.onActive = onSessionActive;
camera_capture_session_state_callbacks.onReady = ::onSessionReady;
camera_capture_session_state_callbacks.onClosed = onSessionClosed;
ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session);
if (jpgWindow) {
ACaptureRequest_setEntry_i32(requests[JPG_CAPTURE_REQUEST_IDX].request,
ACAMERA_JPEG_ORIENTATION, 1, &imageRotation);
}
if (!manualPreview) {
return;
}
//
// Only preview request is in manual mode, JPG is always in Auto mode
// JPG capture mode could also be switch into manual mode and control
// the capture parameters, this sample leaves JPG capture to be auto mode
// (auto control has better effect than author's manual control)
//uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
//ACaptureRequest_setEntry_u8(requests[PREVIEW_REQUEST_IDX].request,
// ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff));
//ACaptureRequest_setEntry_i32(requests[PREVIEW_REQUEST_IDX].request,
// ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity));
//ACaptureRequest_setEntry_i64(requests[PREVIEW_REQUEST_IDX].request,
// ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime));
*/
}
void NdkCamera::CreateSession(ANativeWindow* previewWindow) {
CreateSession(previewWindow, NULL, false, 0, 1920, 1080);
}
void NdkCamera::DestroySession()
{
/*
for (auto& req : requests)
{
if (!req.outputNativeWindow) continue;
ACaptureRequest_removeTarget(req.request, req.target);
ACaptureRequest_free(req.request);
ACameraOutputTarget_free(req.target);
ACaptureSessionOutputContainer_remove(capture_session_output_container, req.sessionOutput);
ACaptureSessionOutput_free(req.sessionOutput);
ANativeWindow_release(req.outputNativeWindow);
AImageReader_delete(req.imageReader);
req.imageReader = nullptr;
}
*/
}
void NdkCamera::writeJpegFile(AImage *image, const char* path)
{
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
// ASSERT(status == AMEDIA_OK && planeCount == 1,
// "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
FILE *file = fopen(path, "wb");
if (file) {
if (data && len)
{
fwrite(data, 1, len, file);
}
fclose(file);
}
}
void NdkCamera::writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path)
{
// dngCreator.
int32_t width;
int32_t height;
AImage_getWidth(image, &width);
AImage_getHeight(image, &height);
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
// ASSERT(status == AMEDIA_OK && planeCount == 1,
// "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
DngCreator dngCreator(characteristics, result);
std::vector<uint8_t> dngFile;
// std::vector<uint8_t>& out, const uint8_t* rawBuffer, size_t bufferLen, uint32_t width, uint32_t height, long offset);
dngCreator.writeInputBuffer(dngFile, data, len, width, height, 0);
if (dngFile.empty())
{
return;
}
FILE *file = fopen(path, "wb");
if (file) {
if (data && len)
{
fwrite(&dngFile[0], 1, dngFile.size(), file);
}
fclose(file);
}
}
bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height)
{
media_status_t status;
status = AImage_getWidth(image, &width);
status = AImage_getHeight(image, &height);
int32_t y_pixelStride = 0;
int32_t u_pixelStride = 0;
int32_t v_pixelStride = 0;
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
int32_t y_rowStride = 0;
int32_t u_rowStride = 0;
int32_t v_rowStride = 0;
AImage_getPlaneRowStride(image, 0, &y_rowStride);
AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t *y_data = 0;
uint8_t *u_data = 0;
uint8_t *v_data = 0;
int y_len = 0;
int u_len = 0;
int v_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 &&
u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width &&
v_rowStride == width) {
// already nv21 :)
// on_image((unsigned char*)y_data, (int)width, (int)height);
} else {
// construct nv21
unsigned char *nv21 = new unsigned char[width * height + width * height / 2];
{
// Y
unsigned char *yptr = nv21;
for (int y = 0; y < height; y++) {
const unsigned char *y_data_ptr = y_data + y_rowStride * y;
for (int x = 0; x < width; x++) {
yptr[0] = y_data_ptr[0];
yptr++;
y_data_ptr += y_pixelStride;
}
}
// UV
unsigned char *uvptr = nv21 + width * height;
for (int y = 0; y < height / 2; y++) {
const unsigned char *v_data_ptr = v_data + v_rowStride * y;
const unsigned char *u_data_ptr = u_data + u_rowStride * y;
for (int x = 0; x < width / 2; x++) {
uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0];
uvptr += 2;
v_data_ptr += v_pixelStride;
u_data_ptr += u_pixelStride;
}
}
}
}
return true;
}
void NdkCamera::EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult)
{
camera_status_t status = ACAMERA_ERROR_BASE;
ACameraMetadata_const_entry val = { 0 };
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val);
captureResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val);
captureResult.awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val);
captureResult.afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val);
int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1;
captureResult.exposureTime = exTime;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_MODE, &val);
captureResult.autoFocus = (status == ACAMERA_OK) ? *(val.data.u8) : 0;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_MODE, &val);
uint8_t aeMode = (status == ACAMERA_OK) ? val.data.u8[0] : 0;
captureResult.autoExposure = aeMode;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_FRAME_DURATION, &val);
int64_t frameDuration = (status == ACAMERA_OK) ? val.data.i64[0] : 0;
captureResult.frameDuration = frameDuration;
val = { 0 };
float focusDistance = NAN;
status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val);
if (status == ACAMERA_OK)
{
focusDistance = *val.data.f;
}
captureResult.FocusDistance = focusDistance;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_ZOOM_RATIO, &val);
if (status == ACAMERA_OK)
{
captureResult.zoomRatio = *val.data.f;
}
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val);
captureResult.sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_SCENE_MODE, &val);
captureResult.sceneMode = (status == ACAMERA_OK) ? *(val.data.u8) : 0;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val);
captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
val = {0};
status = ACameraMetadata_getConstEntry(result, MTK_HDR_FEATURE_HDR_DETECTION_RESULT, &val);
ALOGI("HDR Detection Result: %d", val.data.i32[0]);
val = {0};
status = ACameraMetadata_getConstEntry(result, MTK_HDR_FEATURE_HDR_MODE, &val);
if (status == ACAMERA_OK && val.count > 0) {
int32_t appliedHdrMode = val.data.i32[0];
ALOGI("Applied HDR Mode: %d", appliedHdrMode);
// 判断是否与请求的HDR模式一致
if (appliedHdrMode == MTK_HDR_FEATURE_HDR_MODE_AUTO ||
appliedHdrMode == MTK_HDR_FEATURE_HDR_MODE_ON) {
ALOGI("HDR mode successfully applied");
}
}
// 检查 HDR 是否激活(最重要的指标) dd
// 从结果中获取 MTK_HDR_FEATURE_HDR_HAL_MODE
val = {0};
status = ACameraMetadata_getConstEntry(result, MTK_HDR_FEATURE_HDR_HAL_MODE, &val);
if (status == ACAMERA_OK && val.count > 0) {
int32_t hdrHalMode = val.data.i32[0];
ALOGI("HDR HAL Mode: %d", hdrHalMode);
if (hdrHalMode != MTK_HDR_FEATURE_HDR_HAL_MODE_OFF) {
ALOGI("HDR is actively working on hardware level");
}
}
val = {0};
status = ACameraMetadata_getConstEntry(result, MTK_3A_ISP_FUS_NUM, &val);
if (status == ACAMERA_OK && val.count > 0) {
int32_t fusionFrames = val.data.i32[0];
ALOGI("多帧融合数量: %d", fusionFrames);
if (fusionFrames > 1) {
ALOGI("正在使用多帧融合,这通常表明 HDR 处理正在进行");
}
}
int aa = 0;
}
void NdkCamera::SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity)
{
// 1. 设置基础的相机参数
camera_status_t status;
// __system_property_set("vendor.mfll.force", "1");
#if 0
int32_t tagCount = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(characteristics, &tagCount, &tags);
for (int32_t i = 0; i < tagCount; i++) {
if (MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES == tags[i])
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES Tag ID: 0x%x\n", tags[i]);
}
}
ACameraMetadata_const_entry entry;
status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]);
}
}
ACameraMetadata_const_entry entry = { 0 };
status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]);
}
}
#endif
// 2. 设置 MediaTek 特定的 MFNR 参数
// 使用 vendor tag 描述符
// int32_t mfbMode = MTK_MFNR_FEATURE_MFB_AUTO; // 1 Enable MFNR
int32_t mfbMode = ais ? 2 : 1; // 1 Enable MFNR
uint8_t aeMode = MTK_CONTROL_AE_MODE_ON;
// status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode);
// int32_t mfbMode = ais ? 2 : 1; // 1 Enable MFNR
status = ACaptureRequest_setEntry_i32(request, MTK_MFNR_FEATURE_MFB_MODE, 1, &mfbMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_MFNR_FEATURE_MFB_MODE, status: %d", status);
}
int32_t aeTargetMode = 1; //MTK_3A_FEATURE_AE_TARGET_MODE_LE_FIX;
status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_TARGET_MODE, 1, &aeTargetMode);
if (status != ACAMERA_OK) {
ALOGE("Failed to set MTK_3A_FEATURE_AE_TARGET_MODE: %d", status);
}
// 设置为长曝光级别(3)
int32_t exposureLevel = MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG; // MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG
status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, 1,&exposureLevel);
if (status != ACAMERA_OK) {
ALOGE("Failed to set exposure level: %d", status);
}
int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_MFNR;
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning);
uint8_t reqRemosaicEnable = 1;
status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_HAL_REQUEST_REMOSAIC_ENABLE, status: %d", status);
}
if (m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_CONTROL_AE_EXPOSURE_COMPENSATION, status: %d", status);
}
}
}
void NdkCamera::Setup3DNR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity)
{
// 1. 设置基础的相机参数
camera_status_t status;
#if 0
int32_t tagCount = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(characteristics, &tagCount, &tags);
for (int32_t i = 0; i < tagCount; i++) {
if (MTK_NR_FEATURE_AVAILABLE_3DNR_MODES == tags[i])
{
ALOGI("MTK_NR_FEATURE_AVAILABLE_3DNR_MODES Tag ID: 0x%x\n", tags[i]);
}
}
ACameraMetadata_const_entry entry;
status = ACameraMetadata_getConstEntry(characteristics, MTK_NR_FEATURE_AVAILABLE_3DNR_MODES, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_NR_FEATURE_AVAILABLE_3DNR_MODES: 0x%x\n", entry.data.i32[i]);
}
}
#endif
int32_t nrMode = MTK_NR_FEATURE_3DNR_MODE_ON;
status = ACaptureRequest_setEntry_i32(request, MTK_NR_FEATURE_3DNR_MODE, 1, &nrMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_NR_FEATURE_3DNR_MODE, status: %d", status);
}
uint8_t reqRemosaicEnable = 1;
status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_HAL_REQUEST_REMOSAIC_ENABLE, status: %d", status);
}
if (m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_CONTROL_AE_EXPOSURE_COMPENSATION, status: %d", status);
}
}
}
void NdkCamera::SetupHDR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity)
{
// 1. 设置基础的相机参数
camera_status_t status;
__system_property_set("vendor.forceset.hdrmode", "1");
#if 1
// 首先检查相机是否支持 HDR
ACameraMetadata_const_entry entry = { 0 };
status = ACameraMetadata_getConstEntry(characteristics, MTK_HDR_FEATURE_AVAILABLE_HDR_MODES_PHOTO, &entry);
if (status == ACAMERA_OK) {
bool hdrSupported = false;
for (int i = 0; i < entry.count; i++) {
ALOGI("支持的 HDR 模式: 0x%x", entry.data.i32[i]);
if (entry.data.i32[i] == MTK_HDR_FEATURE_HDR_MODE_AUTO ||
entry.data.i32[i] == MTK_HDR_FEATURE_HDR_MODE_ON) {
hdrSupported = true;
}
}
if (!hdrSupported) {
ALOGI("警告: 相机不支持 AUTO 或 ON 模式的 HDR");
return;
}
} else {
ALOGI("警告: 无法获取支持的 HDR 模式列表");
}
int32_t tagCount = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(characteristics, &tagCount, &tags);
for (int32_t i = 0; i < tagCount; i++) {
if (MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES == tags[i])
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES Tag ID: 0x%x\n", tags[i]);
}
}
entry = { 0 };
status = ACameraMetadata_getConstEntry(characteristics, MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_MFNR_FEATURE_AVAILABLE_MFB_MODES: 0x%x\n", entry.data.i32[i]);
}
}
#endif
entry = { 0 };
status = ACameraMetadata_getConstEntry(characteristics, MTK_HDR_FEATURE_AVAILABLE_HDR_MODES_PHOTO, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_HDR_FEATURE_AVAILABLE_HDR_MODES_PHOTO: 0x%x\n", entry.data.i32[i]);
}
}
// 2. 设置 MediaTek 特定的 HDR 参数
// 使用 vendor tag 描述符
uint8_t aeMode = MTK_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request, MTK_CONTROL_AE_MODE, 1, &aeMode);
uint8_t sceneMode = ACAMERA_CONTROL_SCENE_MODE_HDR;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode);
if (status == ACAMERA_OK) {
ALOGI("已设置场景模式为 HDR");
// 启用场景模式控制
uint8_t sceneModeControl = ACAMERA_CONTROL_MODE_USE_SCENE_MODE;
ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_MODE, 1, &sceneModeControl);
}
int32_t hdrMode = MTK_HDR_FEATURE_HDR_MODE_AUTO; // 1 Enable HDR
ALOGI("Try to set MTK_HDR_FEATURE_HDR_MODE = %d", hdrMode);
status = ACaptureRequest_setEntry_i32(request, MTK_HDR_FEATURE_HDR_MODE, 1, &hdrMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_HDR_FEATURE_HDR_MODE, status: %d", status);
}
int32_t halHdrMode = MTK_HDR_FEATURE_HDR_HAL_MODE_MSTREAM_CAPTURE;
status = ACaptureRequest_setEntry_i32(request, MTK_HDR_FEATURE_HDR_HAL_MODE, 1, &halHdrMode);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_HDR_FEATURE_HDR_HAL_MODE, status: %d", status);
}
// int32_t ispTuning = MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING_AIHDR;
// status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_CAPTURE_HINT_FOR_ISP_TUNING, 1, &ispTuning);
uint8_t reqRemosaicEnable = 1;
status = ACaptureRequest_setEntry_u8(request, MTK_HAL_REQUEST_REMOSAIC_ENABLE, 1, &reqRemosaicEnable);
if (status != ACAMERA_OK)
{
ALOGE("Failed to set MTK_HAL_REQUEST_REMOSAIC_ENABLE, status: %d", status);
}
// 设置HDR的AE目标模式
int32_t aeTargetMode = MTK_3A_FEATURE_AE_TARGET_MODE_NORMAL;
status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_TARGET_MODE, 1, &aeTargetMode);
// 设置更高的曝光等级
int32_t exposureLevel = MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG; // 选择长曝光模式
status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, 1, &exposureLevel);
int32_t hdrModeParam = 1; // 偏向于亮部
status = ACaptureRequest_setEntry_i32(request, MTK_3A_HDR_MODE, 1, &hdrModeParam);
if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;
if (compensation < aeCompensationRange.min_)
{
compensation = aeCompensationRange.min_;
}
if (compensation > aeCompensationRange.max_)
{
compensation = aeCompensationRange.max_;
}
ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation);
if (compensation > 0)
{
// 调整HDR混合ISO模式优化暗部
int32_t hdrMixedIso = 1; // 启用混合ISO
status = ACaptureRequest_setEntry_i32(request, MTK_3A_AE_HDR_MIXED_ISO, 1, &hdrMixedIso);
// 降低高光恢复强度,允许更多过曝 - 这个设置没有问题
float hlrRatio = 0.3f;
status = ACaptureRequest_setEntry_float(request, MTK_ISP_HLR_RATIO, 1, &hlrRatio);
int32_t aeTargetMode = MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR; // 多帧HDR模式
status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_TARGET_MODE, 1, &aeTargetMode);
int32_t ispBrightness = MTK_CONTROL_ISP_BRIGHTNESS_HIGH; // 范围通常是0-10值越大越亮
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_ISP_BRIGHTNESS, 1, &ispBrightness);
}
}
int aa = 0;
}
bool NdkCamera::SetupTonemapCurve(ACameraMetadata* characteristics, ACaptureRequest* request)
{
camera_status_t status;
#if 1
int32_t tagCount = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(characteristics, &tagCount, &tags);
for (int32_t i = 0; i < tagCount; i++) {
if (MTK_3A_FEATURE_AE_TARGET_MODE == tags[i])
{
ALOGI("MTK_3A_FEATURE_AE_TARGET_MODE Tag ID: 0x%x\n", tags[i]);
}
}
ACameraMetadata_const_entry entry;
status = ACameraMetadata_getConstEntry(characteristics, MTK_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, &entry);
if (status == ACAMERA_OK)
{
for (int i = 0; i < entry.count; i++)
{
ALOGI("MTK_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES: %d\n", entry.data.i32[i]);
}
}
#endif
// MTK_CONTROL_AE_TARGET_FPS_RANGE
// 4. 构建范围值
int32_t fpsRange[2] = {60, 60};
// 5. 设置请求参数
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_AE_TARGET_FPS_RANGE, 2, fpsRange);
if (status != ACAMERA_OK) {
ALOGE("Failed to set MTK_CONTROL_AE_TARGET_FPS_RANGE: %d", status);
}
int32_t aeTargetMode = 1; // MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_3EXP;
status = ACaptureRequest_setEntry_i32(request, MTK_3A_FEATURE_AE_TARGET_MODE, 1, &aeTargetMode);
if (status != ACAMERA_OK) {
ALOGE("Failed to set MTK_3A_FEATURE_AE_TARGET_MODE: %d", status);
}
int32_t zsl = MTK_CONTROL_ENABLE_ZSL_TRUE;
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_ENABLE_ZSL, 1, &zsl);
if (status != ACAMERA_OK) {
ALOGE("Failed to set MTK_CONTROL_ENABLE_ZSL: %d", status);
}
int32_t brightness = MTK_CONTROL_ISP_BRIGHTNESS_LOW;
status = ACaptureRequest_setEntry_i32(request, MTK_CONTROL_ISP_BRIGHTNESS, 1, &brightness);
if (status != ACAMERA_OK) {
ALOGE("Failed to set MTK_CONTROL_ISP_BRIGHTNESS_LOW: %d", status);
}
#if 0
// 创建色调映射曲线点 - 每个点包含(x,y)坐标,表示输入和输出亮度值的映射
// 这个曲线用于压制高光,减少过曝
const int numPoints = 5;
float curve[numPoints * 2] = {
0.0f, 0.0f, // 阴影部分保持不变
0.25f, 0.22f, // 稍微压暗中间调
0.5f, 0.45f, // 中间调较明显压暗
0.75f, 0.65f, // 高光部分明显压暗
1.0f, 0.85f // 最亮部分显著压暗
};
// 红色通道曲线
ACameraMetadata_const_entry entry;
int result = ACaptureRequest_setEntry_float(request,
ACAMERA_TONEMAP_CURVE_RED,
numPoints * 2,
curve);
if (result != ACAMERA_OK) {
ALOGE("Failed to set red tonemap curve: %d", result);
return false;
}
// 绿色通道曲线 (使用相同的曲线)
result = ACaptureRequest_setEntry_float(request,
ACAMERA_TONEMAP_CURVE_GREEN,
numPoints * 2,
curve);
if (result != ACAMERA_OK) {
ALOGE("Failed to set green tonemap curve: %d", result);
return false;
}
// 蓝色通道曲线 (使用相同的曲线)
result = ACaptureRequest_setEntry_float(request,
ACAMERA_TONEMAP_CURVE_BLUE,
numPoints * 2,
curve);
if (result != ACAMERA_OK) {
ALOGE("Failed to set blue tonemap curve: %d", result);
return false;
}
// 设置色调映射模式为曲线模式
uint8_t tonemapMode = ACAMERA_TONEMAP_MODE_CONTRAST_CURVE;
result = ACaptureRequest_setEntry_u8(request,
ACAMERA_TONEMAP_MODE,
1,
&tonemapMode);
if (result != ACAMERA_OK) {
ALOGE("Failed to set tonemap mode: %d", result);
return false;
}
#endif
return true;
}