|
|
|
// Tencent is pleased to support the open source community by making ncnn available.
|
|
|
|
//
|
|
|
|
// Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
|
|
|
|
// in compliance with the License. You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// https://opensource.org/licenses/BSD-3-Clause
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software distributed
|
|
|
|
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
|
|
|
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
|
|
|
// specific language governing permissions and limitations under the License.
|
|
|
|
|
|
|
|
#include "ndkcamera.h"
|
|
|
|
|
|
|
|
#include <string>
|
|
|
|
#include <thread>
|
|
|
|
#include <android/log.h>
|
|
|
|
#include <opencv2/core/core.hpp>
|
|
|
|
#include "mat.h"
|
|
|
|
#include "gpu.h"
|
|
|
|
#include "Camera2Helper.h"
|
|
|
|
#include <AndroidHelper.h>
|
|
|
|
|
|
|
|
static void onDisconnected(void* context, ACameraDevice* device)
|
|
|
|
{
|
|
|
|
ALOGW("onDisconnected %p", device);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void onError(void* context, ACameraDevice* device, int error)
|
|
|
|
{
|
|
|
|
std::string msg = "NdkCamera error code=" + std::to_string(error);
|
|
|
|
((NdkCamera*)context)->on_error(msg);
|
|
|
|
// __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onError %p %d", device, error);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void onImageAvailable(void* context, AImageReader* reader)
|
|
|
|
{
|
|
|
|
((NdkCamera*)context)->onImageAvailable(reader);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void onSessionActive(void* context, ACameraCaptureSession *session)
|
|
|
|
{
|
|
|
|
ALOGW("onSessionActive %p", session);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void onSessionReady(void* context, ACameraCaptureSession *session)
|
|
|
|
{
|
|
|
|
ALOGW("onSessionReady %p", session);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void onSessionClosed(void* context, ACameraCaptureSession *session)
|
|
|
|
{
|
|
|
|
ALOGW("onSessionClosed %p", session);
|
|
|
|
}
|
|
|
|
|
|
|
|
void onCaptureFailed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
|
|
|
|
{
|
|
|
|
ALOGW("onCaptureFailed %p %p %p", session, request, failure);
|
|
|
|
}
|
|
|
|
|
|
|
|
void onCaptureSequenceCompleted(void* context, ACameraCaptureSession* session, int sequenceId, int64_t frameNumber)
|
|
|
|
{
|
|
|
|
ALOGW("onCaptureSequenceCompleted %p %d %ld", session, sequenceId, frameNumber);
|
|
|
|
}
|
|
|
|
|
|
|
|
void onCaptureSequenceAborted(void* context, ACameraCaptureSession* session, int sequenceId)
|
|
|
|
{
|
|
|
|
ALOGW("onCaptureSequenceAborted %p %d", session, sequenceId);
|
|
|
|
}
|
|
|
|
|
|
|
|
void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
|
|
|
|
{
|
|
|
|
((NdkCamera*)context)->onCaptureCompleted(session, request, result);
|
|
|
|
}
|
|
|
|
|
|
|
|
NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params)
|
|
|
|
{
|
|
|
|
camera_facing = 0;
|
|
|
|
camera_orientation = 0;
|
|
|
|
|
|
|
|
m_params = params;
|
|
|
|
m_firstFrame = true;
|
|
|
|
mWidth = width;
|
|
|
|
mHeight = height;
|
|
|
|
|
|
|
|
hdrSupported = false;
|
|
|
|
nightModeSupported = false;
|
|
|
|
nightPortraitModeSupported = false;
|
|
|
|
|
|
|
|
camera_manager = 0;
|
|
|
|
camera_device = 0;
|
|
|
|
image_reader = 0;
|
|
|
|
image_reader_surface = 0;
|
|
|
|
image_reader_target = 0;
|
|
|
|
capture_request = 0;
|
|
|
|
capture_session_output_container = 0;
|
|
|
|
capture_session_output = 0;
|
|
|
|
capture_session = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
NdkCamera::~NdkCamera()
|
|
|
|
{
|
|
|
|
close();
|
|
|
|
|
|
|
|
if (image_reader)
|
|
|
|
{
|
|
|
|
AImageReader_delete(image_reader);
|
|
|
|
image_reader = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (image_reader_surface)
|
|
|
|
{
|
|
|
|
ANativeWindow_release(image_reader_surface);
|
|
|
|
image_reader_surface = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
int NdkCamera::open(const char* cameraId) {
|
|
|
|
ALOGW("DBG::open %s", cameraId);
|
|
|
|
|
|
|
|
// camera_facing = _camera_facing;
|
|
|
|
|
|
|
|
camera_manager = ACameraManager_create();
|
|
|
|
|
|
|
|
// find front camera
|
|
|
|
std::string camera_id;
|
|
|
|
bool foundIt = false;
|
|
|
|
DisplayDimension disp(mWidth, mHeight);
|
|
|
|
DisplayDimension foundRes = disp;
|
|
|
|
|
|
|
|
{
|
|
|
|
ACameraIdList *camera_id_list = 0;
|
|
|
|
ACameraManager_getCameraIdList(camera_manager, &camera_id_list);
|
|
|
|
|
|
|
|
for (int i = 0; i < camera_id_list->numCameras; ++i) {
|
|
|
|
const char *id = camera_id_list->cameraIds[i];
|
|
|
|
if (strcmp(id, cameraId) != 0) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
ACameraMetadata * camera_metadata = 0;
|
|
|
|
ACameraManager_getCameraCharacteristics(camera_manager, id, &camera_metadata);
|
|
|
|
|
|
|
|
// query faceing
|
|
|
|
acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT;
|
|
|
|
{
|
|
|
|
ACameraMetadata_const_entry e = {0};
|
|
|
|
ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e);
|
|
|
|
facing = (acamera_metadata_enum_android_lens_facing_t) e.data.u8[0];
|
|
|
|
}
|
|
|
|
|
|
|
|
camera_facing = facing;
|
|
|
|
|
|
|
|
// if (camera_facing == 0 && facing != ACAMERA_LENS_FACING_FRONT)
|
|
|
|
{
|
|
|
|
// ACameraMetadata_free(camera_metadata);
|
|
|
|
// continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// if (camera_facing == 1 && facing != ACAMERA_LENS_FACING_BACK)
|
|
|
|
{
|
|
|
|
// ACameraMetadata_free(camera_metadata);
|
|
|
|
// continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
camera_id = id;
|
|
|
|
|
|
|
|
// query orientation
|
|
|
|
int orientation = 0;
|
|
|
|
{
|
|
|
|
ACameraMetadata_const_entry e = {0};
|
|
|
|
ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e);
|
|
|
|
|
|
|
|
orientation = (int) e.data.i32[0];
|
|
|
|
}
|
|
|
|
|
|
|
|
camera_orientation = orientation;
|
|
|
|
|
|
|
|
|
|
|
|
{
|
|
|
|
ACameraMetadata_const_entry e = {0};
|
|
|
|
ACameraMetadata_getConstEntry(camera_metadata,
|
|
|
|
ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &e);
|
|
|
|
// format of the data: format, width, height, input?, type int32
|
|
|
|
|
|
|
|
// DisplayDimension foundRes(4000, 4000);
|
|
|
|
// DisplayDimension maxJPG(0, 0);
|
|
|
|
|
|
|
|
for (int i = 0; i < e.count; i += 4) {
|
|
|
|
int32_t input = e.data.i32[i + 3];
|
|
|
|
int32_t format = e.data.i32[i + 0];
|
|
|
|
if (input) continue;
|
|
|
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_JPEG) {
|
|
|
|
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
|
|
|
|
if (!disp.IsSameRatio(res)) continue;
|
|
|
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888 && res > disp) {
|
|
|
|
foundIt = true;
|
|
|
|
foundRes = res;
|
|
|
|
}/* else if (format == AIMAGE_FORMAT_JPEG && res > maxJPG) {
|
|
|
|
maxJPG = res;
|
|
|
|
}*/
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
ACameraMetadata_const_entry val = {0};
|
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata,
|
|
|
|
ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE,
|
|
|
|
&val);
|
|
|
|
if (status == ACAMERA_OK) {
|
|
|
|
exposureRange.min_ = val.data.i64[0];
|
|
|
|
if (exposureRange.min_ < kMinExposureTime) {
|
|
|
|
exposureRange.min_ = kMinExposureTime;
|
|
|
|
}
|
|
|
|
exposureRange.max_ = val.data.i64[1];
|
|
|
|
if (exposureRange.max_ > kMaxExposureTime) {
|
|
|
|
exposureRange.max_ = kMaxExposureTime;
|
|
|
|
}
|
|
|
|
// exposureTime = exposureRange.value(2);
|
|
|
|
} else {
|
|
|
|
ALOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE");
|
|
|
|
exposureRange.min_ = exposureRange.max_ = 0l;
|
|
|
|
// exposureTime_ = 0l;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
ACameraMetadata_const_entry val = {0};
|
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata,
|
|
|
|
ACAMERA_CONTROL_AE_COMPENSATION_RANGE,
|
|
|
|
&val);
|
|
|
|
if (status == ACAMERA_OK) {
|
|
|
|
aeCompensationRange.min_ = val.data.i32[0];
|
|
|
|
aeCompensationRange.max_ = val.data.i32[1];
|
|
|
|
|
|
|
|
} else {
|
|
|
|
ALOGW("Unsupported ACAMERA_CONTROL_AE_COMPENSATION_RANGE");
|
|
|
|
aeCompensationRange.min_ = aeCompensationRange.max_ = 0l;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
ACameraMetadata_const_entry val = {0};
|
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(camera_metadata,
|
|
|
|
ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE,
|
|
|
|
&val);
|
|
|
|
|
|
|
|
if (status == ACAMERA_OK) {
|
|
|
|
sensitivityRange.min_ = val.data.i32[0];
|
|
|
|
sensitivityRange.max_ = val.data.i32[1];
|
|
|
|
|
|
|
|
// sensitivity = sensitivityRange.value(2);
|
|
|
|
} else {
|
|
|
|
ALOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE");
|
|
|
|
sensitivityRange.min_ = sensitivityRange.max_ = 0;
|
|
|
|
// sensitivity_ = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
ACameraMetadata_const_entry e = {0};
|
|
|
|
ACameraMetadata_getConstEntry(camera_metadata,
|
|
|
|
ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e);
|
|
|
|
|
|
|
|
for (int i = 0; i < e.count; i++) {
|
|
|
|
if (ACAMERA_CONTROL_SCENE_MODE_HDR == e.data.u8[i]) {
|
|
|
|
hdrSupported = true;
|
|
|
|
break;
|
|
|
|
} else if (ACAMERA_CONTROL_SCENE_MODE_NIGHT == e.data.u8[i]) {
|
|
|
|
nightModeSupported = true;
|
|
|
|
} else if (ACAMERA_CONTROL_SCENE_MODE_NIGHT_PORTRAIT == e.data.u8[i]) {
|
|
|
|
nightPortraitModeSupported = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
ACameraMetadata_free(camera_metadata);
|
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
ACameraManager_deleteCameraIdList(camera_id_list);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (camera_id.empty() || !foundIt) {
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: PATCH!!!!
|
|
|
|
if (camera_id == "1") {
|
|
|
|
camera_facing = 1;
|
|
|
|
}
|
|
|
|
if (camera_id == "2")
|
|
|
|
{
|
|
|
|
camera_orientation += 180;
|
|
|
|
}
|
|
|
|
|
|
|
|
mCameraId = camera_id;
|
|
|
|
|
|
|
|
camera_status_t res = ACAMERA_OK;
|
|
|
|
// setup imagereader and its surface
|
|
|
|
{
|
|
|
|
AImageReader_new(foundRes.width(), foundRes.height(), AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader);
|
|
|
|
|
|
|
|
AImageReader_ImageListener listener;
|
|
|
|
listener.context = this;
|
|
|
|
listener.onImageAvailable = ::onImageAvailable;
|
|
|
|
|
|
|
|
AImageReader_setImageListener(image_reader, &listener);
|
|
|
|
|
|
|
|
AImageReader_getWindow(image_reader, &image_reader_surface);
|
|
|
|
|
|
|
|
// ANativeWindow_setBuffersGeometry(image_reader_surface, width, height,WINDOW_FORMAT_RGBX_8888);
|
|
|
|
|
|
|
|
ANativeWindow_acquire(image_reader_surface);
|
|
|
|
}
|
|
|
|
|
|
|
|
ALOGW("open %s %d", camera_id.c_str(), camera_orientation);
|
|
|
|
|
|
|
|
// open camera
|
|
|
|
{
|
|
|
|
ACameraDevice_StateCallbacks camera_device_state_callbacks;
|
|
|
|
camera_device_state_callbacks.context = this;
|
|
|
|
camera_device_state_callbacks.onDisconnected = onDisconnected;
|
|
|
|
camera_device_state_callbacks.onError = onError;
|
|
|
|
|
|
|
|
res = ACameraManager_openCamera(camera_manager, camera_id.c_str(), &camera_device_state_callbacks, &camera_device);
|
|
|
|
if (res != ACAMERA_OK)
|
|
|
|
{
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(128));
|
|
|
|
// capture request
|
|
|
|
{
|
|
|
|
res = ACameraDevice_createCaptureRequest(camera_device, TEMPLATE_STILL_CAPTURE, &capture_request);
|
|
|
|
|
|
|
|
int32_t fpsRange[2] = {10,30};
|
|
|
|
res = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_TARGET_FPS_RANGE,2,fpsRange);
|
|
|
|
|
|
|
|
if (m_params.autoExposure) {
|
|
|
|
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_ON;
|
|
|
|
res = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
|
|
|
|
// ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_);
|
|
|
|
|
|
|
|
uint8_t aeLockOff = ACAMERA_CONTROL_AE_LOCK_OFF;
|
|
|
|
// ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_LOCK, 1, &aeLockOff);
|
|
|
|
} else {
|
|
|
|
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
|
|
|
|
res = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
|
|
|
|
|
|
|
|
if (hdrSupported && m_params.hdrMode) {
|
|
|
|
uint8_t hdrMode = ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10; // ACAMERA_CONTROL_SCENE_MODE_HDR
|
|
|
|
// res = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_SCENE_MODE_HDR, 1,&hdrMode);
|
|
|
|
}
|
|
|
|
if (m_params.sensibility > 0) {
|
|
|
|
int32_t sensitivity = m_params.sensibility;
|
|
|
|
res = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1,
|
|
|
|
&sensitivity);
|
|
|
|
}
|
|
|
|
if (m_params.exposureTime > 0) {
|
|
|
|
int64_t exposureTime = m_params.exposureTime * 1000000;
|
|
|
|
res = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1,
|
|
|
|
&exposureTime);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (afSupported && m_params.autoFocus) {
|
|
|
|
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
|
|
|
|
uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
|
|
|
|
res = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_MODE, 1, &afMode);
|
|
|
|
}
|
|
|
|
|
|
|
|
uint8_t awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO;
|
|
|
|
// res = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode);
|
|
|
|
|
|
|
|
if (hdrSupported && m_params.hdrMode) {
|
|
|
|
// uint8_t hdrMode = ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10; // ACAMERA_CONTROL_SCENE_MODE_HDR
|
|
|
|
uint8_t hdrMode = ACAMERA_CONTROL_SCENE_MODE_HDR; // ACAMERA_CONTROL_SCENE_MODE_HDR
|
|
|
|
res = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_SCENE_MODE_HDR, 1,&hdrMode);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (m_params.nightMode) {
|
|
|
|
if (nightModeSupported) {
|
|
|
|
uint8_t modeEnabled = 1; // ACAMERA_CONTROL_SCENE_MODE_HDR
|
|
|
|
// res = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_SCENE_MODE_NIGHT, 1, &modeEnabled);
|
|
|
|
}
|
|
|
|
if (nightPortraitModeSupported) {
|
|
|
|
uint8_t modeEnabled = 1; // ACAMERA_CONTROL_SCENE_MODE_HDR
|
|
|
|
// res = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, 1, &modeEnabled);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
res = ACameraOutputTarget_create(image_reader_surface, &image_reader_target);
|
|
|
|
res = ACaptureRequest_addTarget(capture_request, image_reader_target);
|
|
|
|
}
|
|
|
|
|
|
|
|
// capture session
|
|
|
|
{
|
|
|
|
ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks;
|
|
|
|
camera_capture_session_state_callbacks.context = this;
|
|
|
|
camera_capture_session_state_callbacks.onActive = onSessionActive;
|
|
|
|
camera_capture_session_state_callbacks.onReady = onSessionReady;
|
|
|
|
camera_capture_session_state_callbacks.onClosed = onSessionClosed;
|
|
|
|
|
|
|
|
res = ACaptureSessionOutputContainer_create(&capture_session_output_container);
|
|
|
|
|
|
|
|
ACaptureSessionOutput_create(image_reader_surface, &capture_session_output);
|
|
|
|
|
|
|
|
ACaptureSessionOutputContainer_add(capture_session_output_container, capture_session_output);
|
|
|
|
|
|
|
|
ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session);
|
|
|
|
|
|
|
|
ACameraCaptureSession_captureCallbacks camera_capture_session_capture_callbacks;
|
|
|
|
camera_capture_session_capture_callbacks.context = this;
|
|
|
|
camera_capture_session_capture_callbacks.onCaptureStarted = 0;
|
|
|
|
camera_capture_session_capture_callbacks.onCaptureProgressed = 0;
|
|
|
|
camera_capture_session_capture_callbacks.onCaptureCompleted = ::onCaptureCompleted;
|
|
|
|
camera_capture_session_capture_callbacks.onCaptureFailed = onCaptureFailed;
|
|
|
|
camera_capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted;
|
|
|
|
camera_capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted;
|
|
|
|
camera_capture_session_capture_callbacks.onCaptureBufferLost = 0;
|
|
|
|
|
|
|
|
// ACameraCaptureSession_setRepeatingRequest(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, nullptr);
|
|
|
|
ACameraCaptureSession_capture(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request,nullptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
void NdkCamera::close()
|
|
|
|
{
|
|
|
|
if (capture_session)
|
|
|
|
{
|
|
|
|
ACameraCaptureSession_stopRepeating(capture_session);
|
|
|
|
ACameraCaptureSession_close(capture_session);
|
|
|
|
capture_session = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (camera_device)
|
|
|
|
{
|
|
|
|
ACameraDevice_close(camera_device);
|
|
|
|
camera_device = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (capture_session_output_container)
|
|
|
|
{
|
|
|
|
ACaptureSessionOutputContainer_free(capture_session_output_container);
|
|
|
|
capture_session_output_container = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (capture_session_output)
|
|
|
|
{
|
|
|
|
ACaptureSessionOutput_free(capture_session_output);
|
|
|
|
capture_session_output = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (capture_request)
|
|
|
|
{
|
|
|
|
ACaptureRequest_free(capture_request);
|
|
|
|
capture_request = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (image_reader_target)
|
|
|
|
{
|
|
|
|
ACameraOutputTarget_free(image_reader_target);
|
|
|
|
image_reader_target = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (camera_manager)
|
|
|
|
{
|
|
|
|
ACameraManager_delete(camera_manager);
|
|
|
|
camera_manager = 0;
|
|
|
|
|
|
|
|
ALOGW("DBG::close %s", mCameraId.c_str());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void NdkCamera::onImageAvailable(AImageReader* reader)
|
|
|
|
{
|
|
|
|
ALOGW("onImageAvailable %p", reader);
|
|
|
|
|
|
|
|
AImage* image = 0;
|
|
|
|
media_status_t status = AImageReader_acquireLatestImage(reader, &image);
|
|
|
|
|
|
|
|
if (status != AMEDIA_OK)
|
|
|
|
{
|
|
|
|
// error
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (m_firstFrame)
|
|
|
|
{
|
|
|
|
// AImage_delete(image);
|
|
|
|
// m_firstFrame = false;
|
|
|
|
// return;
|
|
|
|
}
|
|
|
|
|
|
|
|
int32_t format;
|
|
|
|
AImage_getFormat(image, &format);
|
|
|
|
|
|
|
|
// ASSERT(format == AIMAGE_FORMAT_YUV_420_888);
|
|
|
|
|
|
|
|
int32_t width = 0;
|
|
|
|
int32_t height = 0;
|
|
|
|
AImage_getWidth(image, &width);
|
|
|
|
AImage_getHeight(image, &height);
|
|
|
|
|
|
|
|
int32_t y_pixelStride = 0;
|
|
|
|
int32_t u_pixelStride = 0;
|
|
|
|
int32_t v_pixelStride = 0;
|
|
|
|
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
|
|
|
|
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
|
|
|
|
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
|
|
|
|
|
|
|
|
int32_t y_rowStride = 0;
|
|
|
|
int32_t u_rowStride = 0;
|
|
|
|
int32_t v_rowStride = 0;
|
|
|
|
AImage_getPlaneRowStride(image, 0, &y_rowStride);
|
|
|
|
AImage_getPlaneRowStride(image, 1, &u_rowStride);
|
|
|
|
AImage_getPlaneRowStride(image, 2, &v_rowStride);
|
|
|
|
|
|
|
|
uint8_t* y_data = 0;
|
|
|
|
uint8_t* u_data = 0;
|
|
|
|
uint8_t* v_data = 0;
|
|
|
|
int y_len = 0;
|
|
|
|
int u_len = 0;
|
|
|
|
int v_len = 0;
|
|
|
|
AImage_getPlaneData(image, 0, &y_data, &y_len);
|
|
|
|
AImage_getPlaneData(image, 1, &u_data, &u_len);
|
|
|
|
AImage_getPlaneData(image, 2, &v_data, &v_len);
|
|
|
|
|
|
|
|
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
|
|
|
|
{
|
|
|
|
// already nv21 :)
|
|
|
|
on_image((unsigned char*)y_data, (int)width, (int)height);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// construct nv21
|
|
|
|
unsigned char* nv21 = new unsigned char[width * height + width * height / 2];
|
|
|
|
{
|
|
|
|
// Y
|
|
|
|
unsigned char* yptr = nv21;
|
|
|
|
for (int y=0; y<height; y++)
|
|
|
|
{
|
|
|
|
const unsigned char* y_data_ptr = y_data + y_rowStride * y;
|
|
|
|
for (int x=0; x<width; x++)
|
|
|
|
{
|
|
|
|
yptr[0] = y_data_ptr[0];
|
|
|
|
yptr++;
|
|
|
|
y_data_ptr += y_pixelStride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// UV
|
|
|
|
unsigned char* uvptr = nv21 + width * height;
|
|
|
|
for (int y=0; y<height/2; y++)
|
|
|
|
{
|
|
|
|
const unsigned char* v_data_ptr = v_data + v_rowStride * y;
|
|
|
|
const unsigned char* u_data_ptr = u_data + u_rowStride * y;
|
|
|
|
for (int x=0; x<width/2; x++)
|
|
|
|
{
|
|
|
|
uvptr[0] = v_data_ptr[0];
|
|
|
|
uvptr[1] = u_data_ptr[0];
|
|
|
|
uvptr += 2;
|
|
|
|
v_data_ptr += v_pixelStride;
|
|
|
|
u_data_ptr += u_pixelStride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
on_image((unsigned char*)nv21, (int)width, (int)height);
|
|
|
|
|
|
|
|
delete[] nv21;
|
|
|
|
}
|
|
|
|
|
|
|
|
AImage_delete(image);
|
|
|
|
}
|
|
|
|
|
|
|
|
void NdkCamera::on_error(const std::string& msg)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
bool NdkCamera::on_image(cv::Mat& rgb)
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
void NdkCamera::on_image(const unsigned char* nv21, int nv21_width, int nv21_height)
|
|
|
|
{
|
|
|
|
ALOGW("nv21 size: %d x %d", nv21_width, nv21_height);
|
|
|
|
// rotate nv21
|
|
|
|
int w = 0;
|
|
|
|
int h = 0;
|
|
|
|
int rotate_type = 0;
|
|
|
|
// TODO !!!???
|
|
|
|
// int co = camera_orientation > 0 ? camera_orientation + 90 : camera_orientation;
|
|
|
|
int co = (camera_orientation + m_params.orientation * 90) % 360;
|
|
|
|
// int co = 0;
|
|
|
|
if (co == 0)
|
|
|
|
{
|
|
|
|
w = nv21_width;
|
|
|
|
h = nv21_height;
|
|
|
|
rotate_type = camera_facing == 0 ? 2 : 1;
|
|
|
|
}
|
|
|
|
if (co == 90)
|
|
|
|
{
|
|
|
|
w = nv21_height;
|
|
|
|
h = nv21_width;
|
|
|
|
rotate_type = camera_facing == 0 ? 5 : 6;
|
|
|
|
}
|
|
|
|
if (co == 180)
|
|
|
|
{
|
|
|
|
w = nv21_width;
|
|
|
|
h = nv21_height;
|
|
|
|
rotate_type = camera_facing == 0 ? 4 : 3;
|
|
|
|
}
|
|
|
|
if (co == 270)
|
|
|
|
{
|
|
|
|
w = nv21_height;
|
|
|
|
h = nv21_width;
|
|
|
|
rotate_type = camera_facing == 0 ? 7 : 8;
|
|
|
|
}
|
|
|
|
|
|
|
|
cv::Mat nv21_rotated(h + h / 2, w, CV_8UC1);
|
|
|
|
ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type);
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef _DEBUG
|
|
|
|
if (nv21_rotated.empty())
|
|
|
|
{
|
|
|
|
int aa = 0;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
// nv21_rotated to rgb
|
|
|
|
cv::Mat rgb(h, w, CV_8UC3);
|
|
|
|
// ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data);
|
|
|
|
ncnn::yuv420sp2rgb_nv12(nv21_rotated.data, w, h, rgb.data);
|
|
|
|
|
|
|
|
// cv::Mat rgb(h, w, CV_8UC3);
|
|
|
|
// ncnn::yuv420sp2rgb_nv12(nv21, w, h, rgb.data);
|
|
|
|
|
|
|
|
on_image(rgb);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result)
|
|
|
|
{
|
|
|
|
// CALL_REQUEST(setEntry_i64(requests_[PREVIEW_REQUEST_IDX].request_,
|
|
|
|
// ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime_));
|
|
|
|
|
|
|
|
// ACameraMetadata_getConstEntry(result, )
|
|
|
|
|
|
|
|
ACameraMetadata_const_entry val = { 0 };
|
|
|
|
camera_status_t status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val);
|
|
|
|
int64_t exTime = val.data.i64[0];
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
val = { 0 };
|
|
|
|
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_MODE, &val);
|
|
|
|
uint8_t aeMode = val.data.u8[0];
|
|
|
|
// ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_);
|
|
|
|
|
|
|
|
ALOGD("onCaptureCompleted EXPOSURE_TIME=%lld, camera id=%s, AE=%s", exTime, mCameraId.c_str(), ((aeMode == 1) ? "ON" : "OFF"));
|
|
|
|
|
|
|
|
// __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureCompleted %p %p %p", session, request, result);
|
|
|
|
}
|