You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
TermApp/app/src/main/cpp/camera2/ndkcamera.cpp

2290 lines
73 KiB
C++

// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#include "ndkcamera.h"
#include <string>
#include <thread>
#include <numeric>
#include <android/log.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui.hpp>
#include "mat.h"
#include "gpu.h"
#include "Camera2Helper.h"
#include <AndroidHelper.h>
1 year ago
#include <LogThread.h>
9 months ago
#include "DngCreator.h"
1 year ago
#ifdef _DEBUG
void Auto_AImage_delete(AImage* image)
{
AImage_delete(image);
}
#else
#define Auto_AImage_delete AImage_delete
#endif
static void onAvailabilityCallback(void* context, const char* cameraId)
{
((NdkCamera*)context)->onAvailabilityCallback(cameraId);
// ALOGI("CameraStatus::onAvailability CameraId: %s", cameraId);
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onAvailability CameraId: %s", cameraId);
}
static void onUnavailabilityCallback(void* context, const char* cameraId)
{
((NdkCamera*)context)->onUnavailabilityCallback(cameraId);
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onUnavailability CameraId: %s", cameraId);
}
static void onDisconnected(void* context, ACameraDevice* device)
{
((NdkCamera*)context)->onDisconnected(device);
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::onDisconnected CameraId: %s", ACameraDevice_getId(device));
}
static void onError(void* context, ACameraDevice* device, int error)
{
((NdkCamera*)context)->onError(device, error);
}
static void onImageAvailable(void* context, AImageReader* reader)
{
NdkCamera* pThis = reinterpret_cast<NdkCamera*>(context);
pThis->onImageAvailable(reader);
}
static void onSessionActive(void* context, ACameraCaptureSession *session)
{
ALOGD("onSessionActive %p", session);
}
static void onSessionReady(void* context, ACameraCaptureSession *session)
{
ALOGD("onSessionReady %p", session);
((NdkCamera*)context)->onSessionReady(session);
}
static void onSessionClosed(void* context, ACameraCaptureSession *session)
{
XYLOG(XYLOG_SEVERITY_INFO, "onSessionClosed %p", session);
}
void onCaptureFailed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
{
// XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason);
((NdkCamera*)context)->onCaptureFailed(session, request, failure);
}
void onCaptureSequenceCompleted(void* context, ACameraCaptureSession* session, int sequenceId, int64_t frameNumber)
{
ALOGD("onCaptureSequenceCompleted %p sequenceId=%d frameNumber=%ld", session, sequenceId, frameNumber);
}
void onCaptureSequenceAborted(void* context, ACameraCaptureSession* session, int sequenceId)
{
ALOGD("onCaptureSequenceAborted %p sequenceId=%d", session, sequenceId);
}
void onCaptureProgressed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
((NdkCamera*)context)->onCaptureProgressed(session, request, result);
}
void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
((NdkCamera*)context)->onCaptureCompleted(session, request, result);
}
NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params)
{
camera_facing = 0;
camera_orientation = 0;
m_params = params;
m_firstFrame = true;
m_photoTaken = false;
mWidth = width;
mHeight = height;
mCaptureTriggered = false;
maxFrameDuration = 0;
afSupported = false;
awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO;
aeLockAvailable = false;
awbLockAvailable = false;
sceneModeSupported = false;
numberOfPrecaptures = 0;
m_precaptureStartTime = 0;
activeArraySize[0] = 0;
activeArraySize[1] = 0;
maxRegions[0] = 0;
maxRegions[1] = 0;
maxRegions[2] = 0;
camera_manager_cb.context = this;
camera_manager_cb.onCameraAvailable = ::onAvailabilityCallback;
camera_manager_cb.onCameraUnavailable = ::onUnavailabilityCallback;
mPreviewImageReader = NULL;
mPreviewImageWindow = NULL;
mPreviewOutputTarget = NULL;
mPreviewSessionOutput = NULL;
mImageReader = NULL;
mImageWindow = NULL;
mOutputTarget = NULL;
mSessionOutput = NULL;
mImageReader2 = NULL;
mImageWindow2 = NULL;
mOutputTarget2 = NULL;
mSessionOutput2 = NULL;
camera_device = 0;
capture_session_output_container = 0;
capture_session = 0;
lightDetected = false;
mResult = { 0 };
mLdr = ~0;
mFinalLdr = 0;
mFinalBurstCaptures = m_params.burstCaptures;
if (mFinalBurstCaptures == 0)
{
mFinalBurstCaptures = 1;
}
mFinalOutputFormat = (m_params.burstRawCapture == 0) ? AIMAGE_FORMAT_YUV_420_888 : AIMAGE_FORMAT_RAW16;
}
NdkCamera::~NdkCamera()
{
close();
}
int NdkCamera::selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY)
1 year ago
{
camera_manager.Create();
// ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb);
// find camera
bool foundIt = false;
// DisplayDimension disp(mWidth, mHeight);
// DisplayDimension foundRes = disp;
camera_status_t status = ACAMERA_OK;
ACameraIdList* cameraIdList = NULL;
status = ACameraManager_getCameraIdList(camera_manager, &cameraIdList);
if (status != ACAMERA_OK)
{
return 1;
}
for (int i = 0; i < cameraIdList->numCameras; ++i)
{
const char *id = cameraIdList->cameraIds[i];
if (cameraId.compare(id) == 0) {
foundIt = true;
break;
}
}
ACameraManager_deleteCameraIdList(cameraIdList);
if (!foundIt)
{
return 2;
}
ACameraMetadata * camera_metadata = 0;
status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata);
if (status != ACAMERA_OK)
{
return 3;
}
{
ACameraMetadata_const_entry e = { 0 };
camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e);
// format of the data: format, width, height, input?, type int32
maxResolutionX = 0;
maxResolutionY = 0;
for (int i = 0; i < e.count; i += 4)
{
int32_t input = e.data.i32[i + 3];
int32_t format = e.data.i32[i + 0];
if (input) continue;
if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/)
{
if (e.data.i32[i + 1] * e.data.i32[i + 2] > (maxResolutionX * maxResolutionY))
{
maxResolutionX = e.data.i32[i + 1];
maxResolutionY = e.data.i32[i + 2];
}
}
}
}
ACameraMetadata_free(camera_metadata);
return 0;
1 year ago
}
int NdkCamera::open(const std::string& cameraId) {
XYLOG(XYLOG_SEVERITY_DEBUG, "DBG::try open %s", cameraId.c_str());
// camera_facing = _camera_facing;
camera_manager.Create();
// ACameraManager_registerAvailabilityCallback(camera_manager, &camera_manager_cb);
// find camera
bool foundIt = false;
DisplayDimension disp(mWidth, mHeight);
DisplayDimension foundRes = disp;
camera_status_t status = ACAMERA_OK;
ALOGD("Start ACameraManager_getCameraIdList");
{
ACameraIdList *camera_id_list = 0;
for (int retry = 0; retry < 100; retry++)
{
status = ACameraManager_getCameraIdList(camera_manager, &camera_id_list);
AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraIdList return error, %d", status);
for (int i = 0; i < camera_id_list->numCameras; ++i) {
const char *id = camera_id_list->cameraIds[i];
if (cameraId.compare(id) == 0) {
foundIt = true;
break;
}
}
ACameraManager_deleteCameraIdList(camera_id_list);
if (foundIt)
{
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(16));
}
ALOGD("End ACameraManager_getCameraIdList");
// ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb);
if (!foundIt)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Camera Not Found on ID: %s", cameraId.c_str());
return 1;
}
mCameraId = cameraId;
ACameraMetadata * camera_metadata = 0;
status = ACameraManager_getCameraCharacteristics(camera_manager, cameraId.c_str(), &camera_metadata);
AASSERT(status == ACAMERA_OK, "ACameraManager_getCameraCharacteristics return error, %d", status);
9 months ago
mCharacteristics = std::shared_ptr<ACameraMetadata>(camera_metadata, ACameraMetadata_free);
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e);
// format of the data: format, width, height, input?, type int32
// DisplayDimension foundRes(4000, 4000);
// DisplayDimension maxJPG(0, 0);
foundIt = false;
DisplayDimension temp;
for (int i = 0; i < e.count; i += 4)
{
int32_t input = e.data.i32[i + 3];
if (input) continue;
int32_t format = e.data.i32[i + 0];
if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/)
{
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
// XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height());
if (!disp.IsSameRatio(res))
{
if (res.width() >= mWidth && res.height() >= mHeight)
{
temp = res;
}
continue;
}
if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp)
{
foundIt = true;
foundRes = res;
}
}
}
if (!foundIt)
{
foundRes = temp;
foundIt = true;
}
}
if (!foundIt || foundRes.width() == 0 || foundRes.height() == 0)
{
9 months ago
// ACameraMetadata_free(camera_metadata);
XYLOG(XYLOG_SEVERITY_ERROR, "Camera RES(%d, %d) Not Found on ID: %s", mWidth, mHeight, cameraId.c_str());
return 1;
}
// foundRes.Flip();
// query faceing
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e);
AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_LENS_FACING return error, %d", status);
if (status == ACAMERA_OK)
{
facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0];
}
}
camera_facing = facing;
// query orientation
int orientation = 0;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e);
AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_ORIENTATION return error, %d", status);
if (status == ACAMERA_OK)
{
orientation = (int)e.data.i32[0];
}
}
camera_orientation = orientation;
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &e);
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AF_AVAILABLE_MODES, &e);
// AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status);
#ifdef _DEBUG
std::string afModes;
for (int idx = 0; idx < e.count; idx++)
{
afModes += std::to_string(e.data.u8[idx]) + " ";
11 months ago
}
XYLOG(XYLOG_SEVERITY_DEBUG, "Available AF Mode: ", afModes.c_str());
#endif
afSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AF_MODE_OFF));
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AWB_AVAILABLE_MODES, &e);
// AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status);
if (status == ACAMERA_OK)
{
for (int idx = 0; idx < e.count; idx++)
{
if (m_params.awbMode == e.data.u8[idx])
{
awbMode = m_params.awbMode;
break;
}
// unsigned int m = e.data.u8[idx];
// XYLOG(XYLOG_SEVERITY_DEBUG, "Available AWB Mode %u", m);
}
}
// awbSupported = (status == ACAMERA_OK) && !(e.count == 0 || (e.count == 1 && e.data.u8[0] == ACAMERA_CONTROL_AWB_MODE_OFF));
}
if (!afSupported)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AF not Supported");
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val);
// AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE return error, %d", status);
if (status == ACAMERA_OK)
{
exposureRange.min_ = val.data.i64[0];
if (exposureRange.min_ < kMinExposureTime)
{
exposureRange.min_ = kMinExposureTime;
}
exposureRange.max_ = val.data.i64[1];
if (exposureRange.max_ > kMaxExposureTime)
{
exposureRange.max_ = kMaxExposureTime;
}
// exposureTime = exposureRange.value(2);
}
else
{
ALOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE");
exposureRange.min_ = exposureRange.max_ = 0l;
// exposureTime_ = 0l;
}
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_LOCK_AVAILABLE, &e);
// AASSERT(status == ACAMERA_OK, "ACameraMetadata_getConstEntry::ACAMERA_CONTROL_AF_AVAILABLE_MODES return error, %d", status);
aeLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AE_LOCK_AVAILABLE_TRUE) : false;
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AWB_LOCK_AVAILABLE, &e);
awbLockAvailable = (status == ACAMERA_OK) ? (*e.data.u8 == ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_TRUE) : false;
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_ZOOM_RATIO_RANGE, &val);
if (status == ACAMERA_OK)
{
float zoomRatioMin = val.data.f[0];
float zoomRatioMax = val.data.f[1];
ALOGI("Zoom Ratio Range: [%f,%f]", zoomRatioMin, zoomRatioMax);
}
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_RANGE, &val);
if (status == ACAMERA_OK)
{
aeCompensationRange.min_ = val.data.i32[0];
aeCompensationRange.max_ = val.data.i32[1];
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_RANGE [%d,%d]", aeCompensationRange.min_, aeCompensationRange.max_);
}
else
{
ALOGW("Unsupported ACAMERA_CONTROL_AE_COMPENSATION_RANGE");
aeCompensationRange.min_ = aeCompensationRange.max_ = 0l;
}
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AE_COMPENSATION_STEP, &val);
if (status == ACAMERA_OK)
{
aeCompensationStep = val.data.r[0];
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_COMPENSATION_STEP num=%d den=%d", aeCompensationStep.numerator, aeCompensationStep.denominator);
}
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION, &e);
maxFrameDuration = (status == ACAMERA_OK) ? *e.data.i64 : 0;
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val);
if (status == ACAMERA_OK)
{
sensitivityRange.min_ = val.data.i32[0];
sensitivityRange.max_ = val.data.i32[1];
}
else
{
ALOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE");
sensitivityRange.min_ = sensitivityRange.max_ = 0;
}
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &val);
if (status == ACAMERA_OK)
{
activeArraySize[0] = val.data.i32[2];
activeArraySize[1] = val.data.i32[3];
}
}
{
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_MAX_REGIONS, &val);
if (status == ACAMERA_OK)
{
maxRegions[0] = val.data.i32[0];
maxRegions[1] = val.data.i32[1];
maxRegions[2] = val.data.i32[2];
}
}
{
ACameraMetadata_const_entry e = { 0 };
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e);
if (status == ACAMERA_OK)
{
for (int i = 0; i < e.count; i++)
{
if (m_params.sceneMode == e.data.u8[i])
{
sceneModeSupported = true;
break;
}
}
}
}
9 months ago
// ACameraMetadata_free(camera_metadata);
}
// open camera
{
ACameraDevice_StateCallbacks camera_device_state_callbacks;
camera_device_state_callbacks.context = this;
camera_device_state_callbacks.onDisconnected = ::onDisconnected;
camera_device_state_callbacks.onError = ::onError;
status = ACameraManager_openCamera(camera_manager, cameraId.c_str(), &camera_device_state_callbacks, &camera_device);
if (status != ACAMERA_OK)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to open camera %s res=%d", cameraId.c_str(), status);
return 1;
}
}
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::Open %s Orientation=%d width=%d height=%d", cameraId.c_str(), camera_orientation, foundRes.width(), foundRes.height());
status = ACaptureSessionOutputContainer_create(&capture_session_output_container);
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
{
burstCaptures = 1;
}
// setup imagereader and its surface
media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, 4, &mPreviewImageReader);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mPreviewImageReader, &listener);
mstatus = AImageReader_getWindow(mPreviewImageReader, &mPreviewImageWindow);
ANativeWindow_acquire(mPreviewImageWindow);
}
8 months ago
status = ACameraOutputTarget_create(mPreviewImageWindow, &mPreviewOutputTarget);
status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 1, &mImageReader);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mImageReader, &listener);
mstatus = AImageReader_getWindow(mImageReader, &mImageWindow);
ANativeWindow_acquire(mImageWindow);
}
status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget);
8 months ago
status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput);
if (m_params.burstRawCapture == 1) // Auto
{
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, burstCaptures, &mImageReader2);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mImageReader2, &listener);
mstatus = AImageReader_getWindow(mImageReader2, &mImageWindow2);
ANativeWindow_acquire(mImageWindow2);
}
status = ACameraOutputTarget_create(mImageWindow2, &mOutputTarget2);
status = ACaptureSessionOutput_create(mImageWindow2, &mSessionOutput2);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput2);
}
8 months ago
CaptureRequest *request = CreateRequest(true);
mCaptureRequests.push_back(request);
#if 0
for (int idx = 0; idx <= burstCaptures; idx++)
{
CaptureRequest *request = new CaptureRequest();
std::memset(request, 0, sizeof(CaptureRequest));
8 months ago
bool isPreviewRequest = (idx == PREVIEW_REQUEST_IDX);
request->pThis = this;
8 months ago
request->imageReader = isPreviewRequest ? mPreviewImageReader : mImageReader;
request->imageWindow = isPreviewRequest ? mPreviewImageWindow : mImageWindow;
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget;
request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
// capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
ACaptureRequest_setUserContext(request->request, request);
// uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO;
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
8 months ago
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_FLASH_MODE, 1, &flashMode);
uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode);
uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_EDGE_MODE, 1, &edgeMode);
if (afSupported && m_params.autoFocus)
{
if (!m_params.zoom)
{
if (maxRegions[2] > 0)
{
int32_t centerX = activeArraySize[0] >> 1;
int32_t centerY = activeArraySize[1] >> 1;
int32_t sizeX = activeArraySize[0] >> 4;
int32_t sizeY = activeArraySize[1] >> 4;
int32_t afRegions[] = { centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000 };
// status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions);
if (status == ACAMERA_OK)
{
#ifdef _DEBUG
int aa = 0;
#endif
}
}
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_MODE, 1, &afMode);
// uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
// trig = ACAMERA_CONTROL_AF_TRIGGER_START;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
}
}
else
{
uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
}
if (m_params.sceneMode != 0)
{
uint8_t sceneMode = m_params.sceneMode;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode);
}
if (m_params.autoExposure)
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
// ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_);
if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;
if (compensation < aeCompensationRange.min_)
{
compensation = aeCompensationRange.min_;
}
if (compensation > aeCompensationRange.max_)
{
compensation = aeCompensationRange.max_;
}
// int32_t aeCompensation = aeCompensationRange.max_;
status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation);
if (status != ACAMERA_OK)
{
int aa = 0;
}
}
if (maxRegions[0] > 0)
{
int32_t aeRegions[] = { 0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000 };
// status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions);
if (status == ACAMERA_OK)
{
#ifdef _DEBUG
int aa = 0;
#endif
}
}
8 months ago
if (isPreviewRequest)
{
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE");
mResult.aeLockSetted = 1;
}
else
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported");
}
uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status);
m_precaptureStartTime = m_startTime;
// ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff);
}
}
else
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
if (m_params.sensitivity > 0)
{
int32_t sensitivity = m_params.sensitivity;
status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
}
if (m_params.exposureTime > 0)
{
int64_t exposureTime = m_params.exposureTime;
status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime);
}
int64_t frameDuration = maxFrameDuration / 2;
// status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration);
}
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode);
if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED))
{
uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock);
mResult.awbLockSetted = 1;
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState);
}
#if 0
uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode);
uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker);
#endif
if (m_params.zoom)
{
float zoomRatio = m_params.zoomRatio;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_float(request->request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio);
if (status != ACAMERA_OK)
{
}
}
status = ACaptureRequest_addTarget(request->request, request->imageTarget);
status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput);
}
8 months ago
#endif
// capture session
ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks;
camera_capture_session_state_callbacks.context = this;
camera_capture_session_state_callbacks.onActive = onSessionActive;
camera_capture_session_state_callbacks.onReady = ::onSessionReady;
camera_capture_session_state_callbacks.onClosed = onSessionClosed;
status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session);
ACameraCaptureSession_captureCallbacks capture_session_capture_callbacks;
capture_session_capture_callbacks.context = this;
capture_session_capture_callbacks.onCaptureStarted = 0;
capture_session_capture_callbacks.onCaptureProgressed = ::onCaptureProgressed;
capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted;
capture_session_capture_callbacks.onCaptureFailed = ::onCaptureFailed;
capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted;
capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted;
capture_session_capture_callbacks.onCaptureBufferLost = 0;
status = ACameraCaptureSession_setRepeatingRequest(capture_session, &capture_session_capture_callbacks, 1, &(mCaptureRequests[PREVIEW_REQUEST_IDX]->request), &(mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId));
ALOGW("Preview Request: seqId=%d", mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId);
m_startTime = GetMicroTimeStamp();
m_precaptureStartTime = m_startTime;
return status == ACAMERA_OK ? 0 : 1;
}
8 months ago
NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
{
camera_status_t status = ACAMERA_OK;
CaptureRequest *request = new CaptureRequest();
std::memset(request, 0, sizeof(CaptureRequest));
bool autoSwitchToOneFrame = (m_params.burstRawCapture == 1) && (mFinalOutputFormat == AIMAGE_FORMAT_YUV_420_888);
8 months ago
request->pThis = this;
request->imageReader = isPreviewRequest ? mPreviewImageReader : (autoSwitchToOneFrame ? mImageReader2 : mImageReader);
request->imageWindow = isPreviewRequest ? mPreviewImageWindow : (autoSwitchToOneFrame ? mImageWindow2 : mImageWindow);
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : (autoSwitchToOneFrame ? mOutputTarget2 : mOutputTarget);
request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : (autoSwitchToOneFrame ? mSessionOutput2 : mSessionOutput);
8 months ago
request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
// mCaptureRequests.push_back(request);
// capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
ACaptureRequest_setUserContext(request->request, request);
// uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO;
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_FLASH_MODE, 1, &flashMode);
uint8_t nrMode = ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_NOISE_REDUCTION_MODE, 1, &nrMode);
uint8_t edgeMode = ACAMERA_EDGE_MODE_FAST;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_EDGE_MODE, 1, &edgeMode);
if (afSupported && m_params.autoFocus)
{
if (!m_params.zoom)
{
if (maxRegions[2] > 0)
{
int32_t centerX = activeArraySize[0] >> 1;
int32_t centerY = activeArraySize[1] >> 1;
int32_t sizeX = activeArraySize[0] >> 4;
int32_t sizeY = activeArraySize[1] >> 4;
int32_t afRegions[] = { centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000 };
// status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions);
if (status == ACAMERA_OK)
{
#ifdef _DEBUG
int aa = 0;
#endif
}
}
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_MODE, 1, &afMode);
// uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
// trig = ACAMERA_CONTROL_AF_TRIGGER_START;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
}
}
else
{
uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START;
// status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
}
if (m_params.sceneMode != 0)
{
uint8_t sceneMode = m_params.sceneMode;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_SCENE_MODE, 1, &sceneMode);
}
if (m_params.autoExposure)
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
// ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_);
if ((aeCompensationRange.min_ != 0 || aeCompensationRange.max_ != 0) && m_params.compensation != 0)
{
int32_t compensation = m_params.compensation;
if (compensation < aeCompensationRange.min_)
{
compensation = aeCompensationRange.min_;
}
if (compensation > aeCompensationRange.max_)
{
compensation = aeCompensationRange.max_;
}
// int32_t aeCompensation = aeCompensationRange.max_;
status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, 1, &compensation);
if (status != ACAMERA_OK)
{
int aa = 0;
}
}
if (maxRegions[0] > 0)
{
int32_t aeRegions[] = { 0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000 };
// status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions);
if (status == ACAMERA_OK)
{
#ifdef _DEBUG
int aa = 0;
#endif
}
}
if (isPreviewRequest)
{
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AE");
mResult.aeLockSetted = 1;
}
else
{
uint8_t aeLock = ACAMERA_CONTROL_AE_LOCK_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLock);
XYLOG(XYLOG_SEVERITY_DEBUG, "AE_Lock Not Supported");
}
uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d", (int)status);
m_precaptureStartTime = m_startTime;
// ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff);
}
}
else
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
if (m_params.sensitivity > 0)
{
int32_t sensitivity = m_params.sensitivity;
status = ACaptureRequest_setEntry_i32(request->request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
}
if (m_params.exposureTime > 0)
{
int64_t exposureTime = m_params.exposureTime;
status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime);
}
int64_t frameDuration = maxFrameDuration / 2;
// status = ACaptureRequest_setEntry_i64(request->request, ACAMERA_SENSOR_FRAME_DURATION, 1, &frameDuration);
}
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode);
if ((awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO) && awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED))
{
uint8_t awbLock = ACAMERA_CONTROL_AWB_LOCK_ON;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AWB_LOCK, 1, &awbLock);
mResult.awbLockSetted = 1;
XYLOG(XYLOG_SEVERITY_DEBUG, "Try to Lock AWB AWBS=%u", (unsigned int)mResult.awbState);
}
#if 0
uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode);
uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker);
#endif
if (m_params.zoom)
{
float zoomRatio = m_params.zoomRatio;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_float(request->request, ACAMERA_CONTROL_ZOOM_RATIO, 1, &zoomRatio);
if (status != ACAMERA_OK)
{
}
}
status = ACaptureRequest_addTarget(request->request, request->imageTarget);
// status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput);
// status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput);
return request;
}
void NdkCamera::close()
{
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str());
camera_status_t res = ACAMERA_OK;
mCaptureFrames.clear();
if ((ACameraManager *)camera_manager != NULL)
{
// res = ACameraManager_unregisterAvailabilityCallback(camera_manager, &camera_manager_cb);
}
if (capture_session)
{
// res = ACameraCaptureSession_stopRepeating(capture_session);
ACameraCaptureSession_close(capture_session);
capture_session = 0;
}
for (auto it = mCaptureRequests.begin(); it != mCaptureRequests.end(); ++it)
{
CaptureRequest* request = *it;
if (request->request)
{
res = ACaptureRequest_removeTarget(request->request, request->imageTarget);
ACaptureRequest_free(request->request);
request->request = 0;
}
/*
if (request->imageTarget)
{
ACameraOutputTarget_free(request->imageTarget);
request->imageTarget = 0;
}
8 months ago
*/
delete request;
}
mCaptureRequests.clear();
if (mPreviewOutputTarget != NULL)
{
ACameraOutputTarget_free(mPreviewOutputTarget);
mPreviewOutputTarget = 0;
}
if (mPreviewImageWindow != NULL)
{
ANativeWindow_release(mPreviewImageWindow);
mPreviewImageWindow = 0;
}
if (mPreviewImageReader != NULL)
{
// AImageReader_setImageListener(image_reader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mPreviewImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mPreviewImageReader = 0;
}
if (mOutputTarget != NULL)
{
ACameraOutputTarget_free(mOutputTarget);
mOutputTarget = 0;
}
if (mImageWindow != NULL)
{
ANativeWindow_release(mImageWindow);
mImageWindow = 0;
}
if (mImageReader != NULL)
{
// AImageReader_setImageListener(image_reader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mImageReader = 0;
}
if (mOutputTarget2 != NULL)
{
ACameraOutputTarget_free(mOutputTarget2);
mOutputTarget2 = 0;
}
if (mImageWindow2 != NULL)
{
ANativeWindow_release(mImageWindow2);
mImageWindow2 = 0;
}
if (mImageReader2 != NULL)
{
// AImageReader_setImageListener(image_reader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mImageReader2);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mImageReader2 = 0;
}
8 months ago
if (mPreviewSessionOutput != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mPreviewSessionOutput);
}
ACaptureSessionOutput_free(mPreviewSessionOutput);
mPreviewSessionOutput = 0;
}
if (mSessionOutput != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput);
}
ACaptureSessionOutput_free(mSessionOutput);
mSessionOutput = 0;
}
if (mSessionOutput2 != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput2);
}
ACaptureSessionOutput_free(mSessionOutput2);
mSessionOutput2 = 0;
}
8 months ago
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_free(capture_session_output_container);
capture_session_output_container = 0;
}
if (camera_device)
{
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::close device %s, %p", mCameraId.c_str(), camera_device);
ACameraDevice_close(camera_device);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed device %s, %p", mCameraId.c_str(), camera_device);
camera_device = 0;
}
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::closed %s", mCameraId.c_str());
}
void NdkCamera::onImageAvailable(AImageReader* reader)
{
AImage* image = 0;
media_status_t mstatus = AMEDIA_OK;
if (reader == mPreviewImageReader)
{
mstatus = AImageReader_acquireLatestImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Preview AImageReader_acquireLatestImage error: %d", mstatus);
}
return;
}
// if (mLdr == ~0)
{
uint8_t* y_data = 0;
int y_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
#if __cplusplus >= 201703L
uint64_t avgY = std::reduce(y_data, y_data + y_len, 0);
#else
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif
avgY = avgY / (uint64_t)y_len;
m_locker.lock();
mLdr = (uint8_t)avgY;
m_locker.unlock();
}
AImage_delete(image);
return;
}
else
{
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
{
burstCaptures = 1;
}
if (burstCaptures == 1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus);
}
}
return;
}
unsigned long long ts = GetMicroTimeStamp();
int32_t format;
mstatus = AImage_getFormat(image, &format);
if (format == AIMAGE_FORMAT_YUV_420_888)
{
int32_t width;
int32_t height;
mstatus = AImage_getWidth(image, &width);
mstatus = AImage_getHeight(image, &height);
int32_t y_pixelStride = 0;
int32_t u_pixelStride = 0;
int32_t v_pixelStride = 0;
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
int32_t y_rowStride = 0;
int32_t u_rowStride = 0;
int32_t v_rowStride = 0;
AImage_getPlaneRowStride(image, 0, &y_rowStride);
AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t* y_data = 0;
uint8_t* u_data = 0;
uint8_t* v_data = 0;
int y_len = 0;
int u_len = 0;
int v_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
{
// already nv21
ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation,
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame);
}
else
{
// construct nv21
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
{
// Y
uint8_t* yptr = nv21;
for (int y = 0; y < height; y++)
{
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
for (int x = 0; x < width; x++)
{
yptr[0] = y_data_ptr[0];
yptr++;
y_data_ptr += y_pixelStride;
}
}
// UV
uint8_t* uvptr = nv21 + width * height;
for (int y = 0; y < height / 2; y++)
{
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
for (int x = 0; x < width / 2; x++)
{
uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0];
uvptr += 2;
v_data_ptr += v_pixelStride;
u_data_ptr += u_pixelStride;
}
}
}
ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation,
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame);
delete[] nv21;
}
}
m_photoTaken = true;
8 months ago
AImage_delete(image);
std::shared_ptr<ACameraMetadata> result;
bool captureCompleted = false;
m_locker.lock();
if (!mCaptureResults.empty())
{
captureCompleted = true;
result = mCaptureResults[0];
}
m_locker.unlock();
if (captureCompleted)
{
onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame);
}
}
else
{
while (1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus);
}
}
break;
}
8 months ago
m_photoTaken = true;
m_locker.lock();
mCaptureFrames.push_back(std::shared_ptr<AImage>(image, Auto_AImage_delete));
m_locker.unlock();
ALOGD("Capture Image Received");
}
bool captureCompleted = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
m_locker.lock();
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
m_locker.unlock();
if (captureCompleted)
{
FireBurstCapture();
}
}
8 months ago
}
}
void NdkCamera::on_error(const std::string& msg)
{
}
void NdkCamera::onDisconnected(ACameraDevice* device)
{
}
bool NdkCamera::on_image(cv::Mat& rgb)
{
return false;
}
bool NdkCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, uint32_t duration, cv::Mat rgb)
{
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames)
{
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames)
9 months ago
{
return false;
}
void NdkCamera::on_image(const unsigned char* nv21, int nv21_width, int nv21_height)
{
// ALOGW("nv21 size: %d x %d", nv21_width, nv21_height);
// rotate nv21
int w = 0;
int h = 0;
int rotate_type = 0;
cv::Mat nv21_rotated;
const unsigned char* yuv420data = nv21;
// TODO !!!???
/*
if (camera_->GetSensorOrientation(&facing, &angle)) {
if (facing == ACAMERA_LENS_FACING_FRONT) {
imageRotation = (angle + rotation_) % 360;
imageRotation = (360 - imageRotation) % 360;
} else {
imageRotation = (angle - rotation_ + 360) % 360;
}
}
*/
int orgWidth = mWidth;
int orgHeight = mHeight;
// int co = camera_orientation > 0 ? camera_orientation + 90 : camera_orientation;
if (m_params.orientation != 0)
{
int co = 0;
if (camera_facing == ACAMERA_LENS_FACING_FRONT)
{
co = (camera_orientation + (m_params.orientation - 1) * 90) % 360;
co = (360 - co) % 360;
}
else
{
co = (camera_orientation - (m_params.orientation - 1) * 90 + 360) % 360;
}
XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing);
// int co = 0;
if (co == 0)
{
w = nv21_width;
h = nv21_height;
rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 2 : 1;
}
else if (co == 90)
{
w = nv21_height;
h = nv21_width;
orgWidth = mHeight;
orgHeight = mWidth;
rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 5 : 6;
}
else if (co == 180)
{
w = nv21_width;
h = nv21_height;
rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 4 : 3;
}
else if (co == 270)
{
w = nv21_height;
h = nv21_width;
orgWidth = mHeight;
orgHeight = mWidth;
rotate_type = camera_facing == ACAMERA_LENS_FACING_FRONT ? 7 : 8;
}
nv21_rotated.create(h + h / 2, w, CV_8UC1);
ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type);
yuv420data = nv21_rotated.data;
}
else
{
w = nv21_width;
h = nv21_height;
XYLOG(XYLOG_SEVERITY_DEBUG, "NO Orientation Facing=%d", camera_facing);
}
// nv21_rotated to rgb
cv::Mat rgb;
if (w == orgWidth && h == orgHeight)
{
rgb.create(h, w, CV_8UC3);
// ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data);
ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data);
}
else
{
cv::Mat org(h, w, CV_8UC3);
ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data);
if (w * orgHeight == h * orgWidth) // Same Ratio
{
cv::resize(org, rgb, cv::Size(orgWidth, orgHeight));
}
else
{
// Crop image
if (w > orgWidth && h >= orgHeight)
{
int left = (w - orgWidth) / 2;
int top = (h - orgHeight) / 2;
rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth));
}
else
{
rgb = org;
}
}
}
on_image(rgb);
}
void NdkCamera::onSessionReady(ACameraCaptureSession *session)
{
}
void NdkCamera::onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
}
void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
{
void* context = NULL;
ACaptureRequest_getUserContext(request, &context);
CaptureRequest* pCaptureRequest = reinterpret_cast<CaptureRequest *>(context);
if (pCaptureRequest->request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request)
{
if (mCaptureTriggered)
{
return;
}
bool readyForCapture = true;
camera_status_t status = ACAMERA_ERROR_BASE;
unsigned long long ts = GetMicroTimeStamp();
uint8_t aeState = ACAMERA_CONTROL_AE_STATE_INACTIVE;
uint8_t awbState = ACAMERA_CONTROL_AWB_STATE_INACTIVE;
uint8_t afState = ACAMERA_CONTROL_AF_STATE_INACTIVE;
ACameraMetadata_const_entry val = { 0 };
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val);
aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val);
awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val);
afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE;
// ALOGW("Preview State AFS=%u AES=%u AWBS=%u Time=%u",
// (unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime));
// Check if timeout
if (ts - m_startTime < m_params.focusTimeout)
{
if (afSupported && (m_params.autoFocus != 0))
{
/*
if (afState == ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED)
{
// Will lock it
if (mResult.afLockSetted == 0)
{
uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
mResult.afLockSetted = 1;
//XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger AF AFS=%u", (uint32_t)mResult.afState);
readyForCapture = false;
}
}
*/
if (afState != ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
afState != ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED &&
afState != ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
// if (afState != ACAMERA_CONTROL_AF_STATE_INACTIVE)
{
//XYLOG(XYLOG_SEVERITY_DEBUG, "AF Enabled And Focused");
readyForCapture = false;
}
}
if (m_params.autoExposure != 0)
{
if (aeState == ACAMERA_CONTROL_AE_STATE_PRECAPTURE)
{
uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
//XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState);
readyForCapture = false;
numberOfPrecaptures = 0;
m_precaptureStartTime = ts;
}
if (aeLockAvailable && (m_params.wait3ALocked & WAIT_AE_LOCKED))
{
if (aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) {
readyForCapture = false;
}
else
{
#if 0
//XYLOG(XYLOG_SEVERITY_DEBUG, "AE Locked");
#endif
}
}
else
{
if (aeState != ACAMERA_CONTROL_AE_STATE_CONVERGED &&
aeState != ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED &&
aeState != ACAMERA_CONTROL_AE_STATE_LOCKED) {
readyForCapture = false;
}
else {
#if 0
XYLOG(XYLOG_SEVERITY_DEBUG, "AWB CONVERGED Or Locked");
#endif
}
}
}
if (awbMode == ACAMERA_CONTROL_AWB_MODE_AUTO)
{
if (awbLockAvailable && (m_params.wait3ALocked & WAIT_AWB_LOCKED)) {
if (awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED)
{
readyForCapture = false;
}
else
{
#if 0
//XYLOG(XYLOG_SEVERITY_DEBUG, "AWB Locked");
#endif
}
}
else
{
if (awbState != ACAMERA_CONTROL_AWB_STATE_CONVERGED &&
awbState != ACAMERA_CONTROL_AWB_STATE_LOCKED)
{
readyForCapture = false;
}
else
{
#if 0
XYLOG(XYLOG_SEVERITY_DEBUG, "AE CONVERGED Or Locked");
#endif
}
}
}
}
else
{
#if 0
XYLOG(XYLOG_SEVERITY_WARNING, "Prepare Capture Timeout for 3A And will Capture AFS=%u AES=%u AWBS=%u Time=%u",
(unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime));
#endif
}
8 months ago
if (readyForCapture/* && mCaptureRequests.size() > 1*/)
{
// Must update mFinalLdr As getBurstCaptures getOutputFormat depends mFinalLdr
if (mLdr != ~0)
{
mFinalLdr = mLdr;
}
XYLOG(XYLOG_SEVERITY_INFO, "Ready for Capture AFS=%u AES=%u AWBS=%u LDR=%u Time=%u",
(unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, mFinalLdr, (unsigned int)(ts - m_startTime));
if (m_params.burstRawCapture == 1)
{
if (mFinalLdr > 50)
{
XYLOG(XYLOG_SEVERITY_WARNING, "Switch to OneFrame Capture(YUV) As LDR=%u", mFinalLdr);
mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888;
mFinalBurstCaptures = 1;
}
}
8 months ago
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
{
burstCaptures = 1;
}
8 months ago
std::vector<ACaptureRequest*> requests;
int sequenceId = 0;
requests.reserve(burstCaptures);
8 months ago
for (int idx = 0; idx < burstCaptures; idx++)
{
8 months ago
CaptureRequest* request = CreateRequest(false);
mCaptureRequests.push_back(request);
// CopyPreviewRequest(mCaptureRequests[idx]->request, result);
8 months ago
requests.push_back(request->request);
}
8 months ago
// ALOGW("Will Stop Repeating Request");
status = ACameraCaptureSession_stopRepeating(capture_session);
8 months ago
// ALOGW("Finished Repeating Request");
ACameraCaptureSession_captureCallbacks capture_session_capture_cb;
capture_session_capture_cb.context = this;
capture_session_capture_cb.onCaptureStarted = 0;
capture_session_capture_cb.onCaptureProgressed = ::onCaptureProgressed;
capture_session_capture_cb.onCaptureCompleted = ::onCaptureCompleted;
capture_session_capture_cb.onCaptureFailed = ::onCaptureFailed;
capture_session_capture_cb.onCaptureSequenceCompleted = onCaptureSequenceCompleted;
capture_session_capture_cb.onCaptureSequenceAborted = onCaptureSequenceAborted;
capture_session_capture_cb.onCaptureBufferLost = 0;
int numberOfRequests = requests.size();
status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb,
8 months ago
numberOfRequests, &requests[0], &sequenceId);
8 months ago
ALOGW("Capture num = %d sequenceId=%d", numberOfRequests, sequenceId);
for (int idx = 1; idx < mCaptureRequests.size(); idx++)
{
8 months ago
mCaptureRequests[idx]->sessionSequenceId = sequenceId;
}
mCaptureTriggered = true;
}
}
else
{
#ifdef _DEBUG
9 months ago
uint64_t tid = getThreadIdOfULL();
ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid);
#endif
9 months ago
unsigned long long ts = GetMicroTimeStamp();
9 months ago
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
8 months ago
bool captureCompleted = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
if (expectedTimes == 1)
{
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = !mOneFrame.empty();
m_locker.unlock();
if (captureCompleted)
{
onOneCapture(mCharacteristics, captureResult, mFinalLdr, ts - m_startTime, mOneFrame);
}
}
else
{
m_locker.lock();
mCaptureResults.push_back(std::shared_ptr<ACameraMetadata>(pCopy, ACameraMetadata_free));
captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes;
m_locker.unlock();
if (captureCompleted)
{
FireBurstCapture();
}
}
8 months ago
}
}
void NdkCamera::FireBurstCapture()
{
unsigned long long ts = GetMicroTimeStamp();
size_t expectedTimes = mCaptureRequests.size() - 1;
std::vector<std::shared_ptr<ACameraMetadata> > captureResults;
uint32_t ldr;
std::vector<std::shared_ptr<AImage> > captureFrames;
m_locker.lock();
ldr = mFinalLdr;
if (ldr == 0 && mLdr != ~0)
{
ldr = mLdr;
}
captureResults.swap(mCaptureResults);
captureFrames.swap(mCaptureFrames);
m_locker.unlock();
media_status_t mstatus;
std::vector<std::vector<uint8_t> > frames;
for (int idx = 0; idx < expectedTimes; idx++)
{
std::shared_ptr<AImage> spImage = captureFrames[idx];
std::shared_ptr<ACameraMetadata> spResult = captureResults[idx];
auto it = frames.insert(frames.end(), std::vector<uint8_t>());
int32_t width = 0;
int32_t height = 0;
mstatus = AImage_getWidth(spImage.get(), &width);
mstatus = AImage_getHeight(spImage.get(), &height);
int32_t planeCount = 0;
mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
AASSERT(mstatus == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *planeData = NULL;
int planeDataLen = 0;
mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
ALOGD("Start Converting Dng");
DngCreator dngCreator(mCharacteristics.get(), spResult.get());
dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0);
ALOGD("End Converting Dng");
}
captureFrames.clear();
onBurstCapture(mCharacteristics, captureResults, ldr, ts - m_startTime, frames);
#ifdef _DEBUG
ALOGD("Frames Size: %u", (uint32_t)frames.size());
#endif
}
8 months ago
void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult)
{
camera_status_t status = ACAMERA_ERROR_BASE;
ACameraMetadata_const_entry val = { 0 };
status = ACameraMetadata_getConstEntry(previewResult, ACAMERA_SENSOR_EXPOSURE_TIME, &val);
int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1;
val = {0};
status = ACameraMetadata_getConstEntry(previewResult, ACAMERA_SENSOR_SENSITIVITY, &val);
int32_t sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
if (exTime != -1 && sensitivity != 0)
{
uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff);
ACaptureRequest_setEntry_i64(request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exTime);
ACaptureRequest_setEntry_i32(request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
}
/*
val = { 0 };
float focusDistance = NAN;
status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val);
if (status == ACAMERA_OK)
{
focusDistance = *val.data.f;
}
8 months ago
*/
}
void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
{
XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason);
char msg[32] = { 0 };
snprintf(msg, sizeof(msg), "CaptureFailed reason=%d PhotoTaken=%d", failure->reason, m_photoTaken ? 1 : 0);
if (!m_photoTaken)
{
on_error(msg);
}
}
void NdkCamera::onError(ACameraDevice* device, int error)
{
if (ACAMERA_ERROR_CAMERA_DEVICE == error)
{
}
XYLOG(XYLOG_SEVERITY_ERROR, "CameraStatus::onError CameraId: %s err=%d PhotoTaken=%d", ACameraDevice_getId(device), error, m_photoTaken ? 1 : 0);
if (!m_photoTaken)
{
std::string msg = "NdkCamera error code=" + std::to_string(error);
on_error(msg);
}
}
void NdkCamera::onAvailabilityCallback(const char* cameraId)
{
std::string s(cameraId);
m_locker.lock();
m_availableCameras.insert(s);
m_locker.unlock();
}
void NdkCamera::onUnavailabilityCallback(const char* cameraId)
{
std::string s(cameraId);
m_locker.lock();
m_availableCameras.erase(s);
m_locker.unlock();
}
bool NdkCamera::IsCameraAvailable(const std::string& cameraId)
{
bool existed = false;
m_locker.lock();
existed = (m_availableCameras.find(cameraId) != m_availableCameras.cend());
m_locker.unlock();
return existed;
}
int32_t NdkCamera::getOutputFormat() const
{
return mFinalOutputFormat;
// return m_params.burstRawCapture ? AIMAGE_FORMAT_RAW16 : AIMAGE_FORMAT_YUV_420_888;
}
int32_t NdkCamera::getBurstCaptures() const
{
return mFinalBurstCaptures;
}
void NdkCamera::CreateSession(ANativeWindow* previewWindow,
ANativeWindow* jpgWindow, bool manualPreview,
int32_t imageRotation, int32_t width, int32_t height) {
media_status_t status;
/*
// Create output from this app's ANativeWindow, and add into output container
requests[PREVIEW_REQUEST_IDX].outputNativeWindow = previewWindow;
requests[PREVIEW_REQUEST_IDX].templateId = TEMPLATE_PREVIEW;
//requests_[JPG_CAPTURE_REQUEST_IDX].outputNativeWindow_ = jpgWindow;
//requests_[JPG_CAPTURE_REQUEST_IDX].template_ = TEMPLATE_STILL_CAPTURE;
ACaptureSessionOutputContainer_create(&capture_session_output_container);
for (auto& req : requests) {
if (!req.outputNativeWindow) continue;
ANativeWindow_acquire(req.outputNativeWindow);
ACaptureSessionOutput_create(req.outputNativeWindow, &req.sessionOutput);
ACaptureSessionOutputContainer_add(capture_session_output_container, req.sessionOutput);
ACameraOutputTarget_create(req.outputNativeWindow, &req.target);
ACameraDevice_createCaptureRequest(camera_device, req.templateId, &req.request);
ACaptureRequest_addTarget(req.request, req.target);
// To capture images
media_status_t mstatus = AImageReader_new(width, height, getOutputFormat(), 1, &req.imageReader);
if (mstatus == AMEDIA_OK) {
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(req.imageReader, &listener);
}
// req.imageReader = createJpegReader();
status = AImageReader_getWindow(req.imageReader, &req.imageWindow);
ANativeWindow_acquire(req.outputNativeWindow);
ACameraOutputTarget_create(req.imageWindow, &req.imageTarget);
ACaptureRequest_addTarget(req.request, req.imageTarget);
ACaptureSessionOutput_create(req.imageWindow, &req.imageOutput);
ACaptureSessionOutputContainer_add(capture_session_output_container, req.imageOutput);
//ACameraOutputTarget_create(imageWindow, &imageTarget);
//ACaptureRequest_addTarget(req.request_, imageTarget);
//ACaptureSessionOutput_create(imageWindow, &imageOutput);
//ACaptureSessionOutputContainer_add(outputContainer_, imageOutput);
}
// Create a capture session for the given preview request
ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks;
camera_capture_session_state_callbacks.context = this;
camera_capture_session_state_callbacks.onActive = onSessionActive;
camera_capture_session_state_callbacks.onReady = ::onSessionReady;
camera_capture_session_state_callbacks.onClosed = onSessionClosed;
ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session);
if (jpgWindow) {
ACaptureRequest_setEntry_i32(requests[JPG_CAPTURE_REQUEST_IDX].request,
ACAMERA_JPEG_ORIENTATION, 1, &imageRotation);
}
if (!manualPreview) {
return;
}
//
// Only preview request is in manual mode, JPG is always in Auto mode
// JPG capture mode could also be switch into manual mode and control
// the capture parameters, this sample leaves JPG capture to be auto mode
// (auto control has better effect than author's manual control)
//uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
//ACaptureRequest_setEntry_u8(requests[PREVIEW_REQUEST_IDX].request,
// ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff));
//ACaptureRequest_setEntry_i32(requests[PREVIEW_REQUEST_IDX].request,
// ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity));
//ACaptureRequest_setEntry_i64(requests[PREVIEW_REQUEST_IDX].request,
// ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime));
*/
}
void NdkCamera::CreateSession(ANativeWindow* previewWindow) {
CreateSession(previewWindow, NULL, false, 0, 1920, 1080);
}
void NdkCamera::DestroySession()
{
/*
for (auto& req : requests)
{
if (!req.outputNativeWindow) continue;
ACaptureRequest_removeTarget(req.request, req.target);
ACaptureRequest_free(req.request);
ACameraOutputTarget_free(req.target);
ACaptureSessionOutputContainer_remove(capture_session_output_container, req.sessionOutput);
ACaptureSessionOutput_free(req.sessionOutput);
ANativeWindow_release(req.outputNativeWindow);
AImageReader_delete(req.imageReader);
req.imageReader = nullptr;
}
*/
}
void NdkCamera::writeJpegFile(AImage *image, const char* path)
{
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
// ASSERT(status == AMEDIA_OK && planeCount == 1,
// "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
FILE *file = fopen(path, "wb");
if (file) {
if (data && len)
{
fwrite(data, 1, len, file);
}
fclose(file);
}
}
9 months ago
void NdkCamera::writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path)
{
9 months ago
// dngCreator.
int32_t width;
int32_t height;
AImage_getWidth(image, &width);
AImage_getHeight(image, &height);
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
// ASSERT(status == AMEDIA_OK && planeCount == 1,
// "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
9 months ago
DngCreator dngCreator(characteristics, result);
std::vector<uint8_t> dngFile;
// std::vector<uint8_t>& out, const uint8_t* rawBuffer, size_t bufferLen, uint32_t width, uint32_t height, long offset);
dngCreator.writeInputBuffer(dngFile, data, len, width, height, 0);
if (dngFile.empty())
{
return;
}
FILE *file = fopen(path, "wb");
if (file) {
if (data && len)
{
9 months ago
fwrite(&dngFile[0], 1, dngFile.size(), file);
}
fclose(file);
}
9 months ago
}
bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height)
{
media_status_t status;
status = AImage_getWidth(image, &width);
status = AImage_getHeight(image, &height);
int32_t y_pixelStride = 0;
int32_t u_pixelStride = 0;
int32_t v_pixelStride = 0;
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
int32_t y_rowStride = 0;
int32_t u_rowStride = 0;
int32_t v_rowStride = 0;
AImage_getPlaneRowStride(image, 0, &y_rowStride);
AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t *y_data = 0;
uint8_t *u_data = 0;
uint8_t *v_data = 0;
9 months ago
int y_len = 0;
int u_len = 0;
int v_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 &&
u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width &&
v_rowStride == width) {
9 months ago
// already nv21 :)
// on_image((unsigned char*)y_data, (int)width, (int)height);
} else {
9 months ago
// construct nv21
unsigned char *nv21 = new unsigned char[width * height + width * height / 2];
9 months ago
{
// Y
unsigned char *yptr = nv21;
for (int y = 0; y < height; y++) {
const unsigned char *y_data_ptr = y_data + y_rowStride * y;
for (int x = 0; x < width; x++) {
9 months ago
yptr[0] = y_data_ptr[0];
yptr++;
y_data_ptr += y_pixelStride;
}
}
// UV
unsigned char *uvptr = nv21 + width * height;
for (int y = 0; y < height / 2; y++) {
const unsigned char *v_data_ptr = v_data + v_rowStride * y;
const unsigned char *u_data_ptr = u_data + u_rowStride * y;
for (int x = 0; x < width / 2; x++) {
9 months ago
uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0];
uvptr += 2;
v_data_ptr += v_pixelStride;
u_data_ptr += u_pixelStride;
}
}
}
}
}
9 months ago
void NdkCamera::EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult)
{
camera_status_t status = ACAMERA_ERROR_BASE;
ACameraMetadata_const_entry val = { 0 };
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val);
captureResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AWB_STATE, &val);
captureResult.awbState = (status == ACAMERA_OK) ? val.data.u8[0] : ACAMERA_CONTROL_AWB_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_STATE, &val);
captureResult.afState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AF_STATE_INACTIVE;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val);
int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1;
captureResult.exposureTime = exTime;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AF_MODE, &val);
captureResult.autoFocus = (status == ACAMERA_OK) ? *(val.data.u8) : 0;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_MODE, &val);
uint8_t aeMode = (status == ACAMERA_OK) ? val.data.u8[0] : 0;
captureResult.autoExposure = aeMode;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_FRAME_DURATION, &val);
int64_t frameDuration = (status == ACAMERA_OK) ? val.data.i64[0] : 0;
captureResult.frameDuration = frameDuration;
val = { 0 };
float focusDistance = NAN;
status = ACameraMetadata_getConstEntry(result, ACAMERA_LENS_FOCUS_DISTANCE, &val);
if (status == ACAMERA_OK)
{
focusDistance = *val.data.f;
}
captureResult.FocusDistance = focusDistance;
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_ZOOM_RATIO, &val);
if (status == ACAMERA_OK)
{
captureResult.zoomRatio = *val.data.f;
9 months ago
}
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val);
captureResult.sensitivity = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_SCENE_MODE, &val);
captureResult.sceneMode = (status == ACAMERA_OK) ? *(val.data.u8) : 0;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, &val);
captureResult.compensation = (status == ACAMERA_OK) ? *(val.data.i32) : 0;
}