|
|
|
@ -1,912 +0,0 @@
|
|
|
|
|
#include "TerminalDevice.h"
|
|
|
|
|
/*
|
|
|
|
|
* Copyright 2018 The Android Open Source Project
|
|
|
|
|
*
|
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
|
*
|
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
*
|
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
|
* limitations under the License.
|
|
|
|
|
*
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
#define LOG_TAG "CameraTestHelpers"
|
|
|
|
|
|
|
|
|
|
#include "PhoneDevice2.h"
|
|
|
|
|
|
|
|
|
|
#include <opencv2/opencv.hpp>
|
|
|
|
|
#include <opencv2/core.hpp>
|
|
|
|
|
#include <opencv2/imgproc.hpp>
|
|
|
|
|
// #include <opencv2/objdetect.hpp>
|
|
|
|
|
// #include <opencv2/features2d.hpp>
|
|
|
|
|
|
|
|
|
|
// #include <opencv2/core/types.hpp>
|
|
|
|
|
#include <opencv2/core/core.hpp>
|
|
|
|
|
#include <opencv2/imgproc/imgproc.hpp>
|
|
|
|
|
|
|
|
|
|
#include <android/log.h>
|
|
|
|
|
|
|
|
|
|
#include <AndroidHelper.h>
|
|
|
|
|
|
|
|
|
|
extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
|
|
|
|
|
// ranges
|
|
|
|
|
// are normalized to eight bits.
|
|
|
|
|
static const int kMaxChannelValue = 262143;
|
|
|
|
|
|
|
|
|
|
static inline uint32_t YUV2RGB(int nY, int nU, int nV) {
|
|
|
|
|
nY -= 16;
|
|
|
|
|
nU -= 128;
|
|
|
|
|
nV -= 128;
|
|
|
|
|
if (nY < 0) nY = 0;
|
|
|
|
|
|
|
|
|
|
// This is the floating point equivalent. We do the conversion in integer
|
|
|
|
|
// because some Android devices do not have floating point in hardware.
|
|
|
|
|
// nR = (int)(1.164 * nY + 1.596 * nV);
|
|
|
|
|
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
|
|
|
|
|
// nB = (int)(1.164 * nY + 2.018 * nU);
|
|
|
|
|
|
|
|
|
|
int nR = (int)(1192 * nY + 1634 * nV);
|
|
|
|
|
int nG = (int)(1192 * nY - 833 * nV - 400 * nU);
|
|
|
|
|
int nB = (int)(1192 * nY + 2066 * nU);
|
|
|
|
|
|
|
|
|
|
nR = std::min(kMaxChannelValue, std::max(0, nR));
|
|
|
|
|
nG = std::min(kMaxChannelValue, std::max(0, nG));
|
|
|
|
|
nB = std::min(kMaxChannelValue, std::max(0, nB));
|
|
|
|
|
|
|
|
|
|
nR = (nR >> 10) & 0xff;
|
|
|
|
|
nG = (nG >> 10) & 0xff;
|
|
|
|
|
nB = (nB >> 10) & 0xff;
|
|
|
|
|
|
|
|
|
|
return 0xff000000 | (nR << 16) | (nG << 8) | nB;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CPhoneDevice2::CPhoneDevice2(JavaVM* vm, jobject service)
|
|
|
|
|
{
|
|
|
|
|
m_vm = vm;
|
|
|
|
|
JNIEnv* env = NULL;
|
|
|
|
|
bool attached = false;
|
|
|
|
|
bool res = GetJniEnv(m_vm, &env, attached);
|
|
|
|
|
if (!res)
|
|
|
|
|
{
|
|
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
|
|
}
|
|
|
|
|
m_javaService = env->NewGlobalRef(service);
|
|
|
|
|
|
|
|
|
|
jclass classService = env->GetObjectClass(m_javaService);
|
|
|
|
|
mRegisterTimerMid = env->GetMethodID(classService, "registerTimer", "(JI)Z");
|
|
|
|
|
mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(I)V");
|
|
|
|
|
mUnregisterTimerMid = env->GetMethodID(classService, "unregisterTimer", "(J)Z");
|
|
|
|
|
mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z");
|
|
|
|
|
|
|
|
|
|
env->DeleteLocalRef(classService);
|
|
|
|
|
|
|
|
|
|
if (attached)
|
|
|
|
|
{
|
|
|
|
|
vm->DetachCurrentThread();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
m_timerUidFeed = time(NULL);
|
|
|
|
|
presentRotation_ = 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CPhoneDevice2::~CPhoneDevice2()
|
|
|
|
|
{
|
|
|
|
|
JNIEnv* env = NULL;
|
|
|
|
|
bool attached = false;
|
|
|
|
|
bool res = GetJniEnv(m_vm, &env, attached);
|
|
|
|
|
if (!res)
|
|
|
|
|
{
|
|
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
|
|
}
|
|
|
|
|
env->DeleteGlobalRef(m_javaService);
|
|
|
|
|
if (attached)
|
|
|
|
|
{
|
|
|
|
|
m_vm->DetachCurrentThread();
|
|
|
|
|
}
|
|
|
|
|
m_javaService = NULL;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void CPhoneDevice2::SetListener(IListener* listener)
|
|
|
|
|
{
|
|
|
|
|
m_listener = listener;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice2::UpdateTime(time_t ts)
|
|
|
|
|
{
|
|
|
|
|
JNIEnv* env = NULL;
|
|
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
|
|
bool attached = false;
|
|
|
|
|
bool res = GetJniEnv(m_vm, &env, attached);
|
|
|
|
|
if (!res)
|
|
|
|
|
{
|
|
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
jlong timeInMillis = ((jlong)ts) * 1000;
|
|
|
|
|
ret = env->CallBooleanMethod(m_javaService, mUpdateTimeMid, timeInMillis);
|
|
|
|
|
if (attached)
|
|
|
|
|
{
|
|
|
|
|
m_vm->DetachCurrentThread();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return (ret == JNI_TRUE);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice2::Reboot()
|
|
|
|
|
{
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
IDevice::timer_uid_t CPhoneDevice2::RegisterTimer(unsigned int timerType, unsigned int timeout)
|
|
|
|
|
{
|
|
|
|
|
IDevice::timer_uid_t uid = m_timerUidFeed.fetch_add(1);
|
|
|
|
|
|
|
|
|
|
ALOGI("NDK RegTimer: uid=%lld Type=%u timeout=%u", uid, timerType, timeout);
|
|
|
|
|
|
|
|
|
|
JNIEnv* env = NULL;
|
|
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
|
|
bool attached = false;
|
|
|
|
|
bool res = GetJniEnv(m_vm, &env, attached);
|
|
|
|
|
if (!res)
|
|
|
|
|
{
|
|
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
ret = env->CallBooleanMethod(m_javaService, mRegisterTimerMid, (jlong)uid, (jint)timeout);
|
|
|
|
|
|
|
|
|
|
if (attached)
|
|
|
|
|
{
|
|
|
|
|
m_vm->DetachCurrentThread();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (ret == JNI_TRUE)
|
|
|
|
|
{
|
|
|
|
|
unsigned long val = timerType;
|
|
|
|
|
mTimers.insert(mTimers.end(), std::pair<IDevice::timer_uid_t, unsigned long>(uid, val));
|
|
|
|
|
return uid;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice2::UnregisterTimer(IDevice::timer_uid_t uid)
|
|
|
|
|
{
|
|
|
|
|
JNIEnv* env = NULL;
|
|
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
|
|
bool attached = false;
|
|
|
|
|
bool res = GetJniEnv(m_vm, &env, attached);
|
|
|
|
|
if (!res)
|
|
|
|
|
{
|
|
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
ret = env->CallBooleanMethod(m_javaService, mUnregisterTimerMid, (jlong)uid);
|
|
|
|
|
if (attached)
|
|
|
|
|
{
|
|
|
|
|
m_vm->DetachCurrentThread();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (ret == JNI_TRUE)
|
|
|
|
|
{
|
|
|
|
|
mTimers.erase(uid);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice2::FireTimer(timer_uid_t uid)
|
|
|
|
|
{
|
|
|
|
|
std::map<IDevice::timer_uid_t, unsigned long>::iterator it = mTimers.find(uid);
|
|
|
|
|
if (it == mTimers.end())
|
|
|
|
|
{
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsigned long timerType = it->second & 0xFFFFFFFF;
|
|
|
|
|
unsigned long times = (it->second & 0xFFFFFFFF00000000) >> 32;
|
|
|
|
|
times++;
|
|
|
|
|
|
|
|
|
|
if (timerType != 100)
|
|
|
|
|
{
|
|
|
|
|
int aa = 0;
|
|
|
|
|
}
|
|
|
|
|
it->second = timerType | (times << 32);
|
|
|
|
|
|
|
|
|
|
if (m_listener == NULL)
|
|
|
|
|
{
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
m_listener->OnTimeout(uid, timerType, NULL, times);
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
IDevice::timer_uid_t CPhoneDevice2::RegisterHeartbeat(unsigned int timerType, unsigned int timeout)
|
|
|
|
|
{
|
|
|
|
|
IDevice::timer_uid_t uid = m_timerUidFeed.fetch_add(1);
|
|
|
|
|
|
|
|
|
|
JNIEnv* env = NULL;
|
|
|
|
|
jboolean ret = JNI_FALSE;
|
|
|
|
|
bool attached = false;
|
|
|
|
|
bool res = GetJniEnv(m_vm, &env, attached);
|
|
|
|
|
if (!res)
|
|
|
|
|
{
|
|
|
|
|
ALOGE("Failed to get JNI Env");
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
env->CallVoidMethod(m_javaService, mRegisterHeartbeatMid, (jint)timeout);
|
|
|
|
|
if (attached)
|
|
|
|
|
{
|
|
|
|
|
m_vm->DetachCurrentThread();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return uid;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice2::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const string& path)
|
|
|
|
|
{
|
|
|
|
|
ALOGI("TAKE_PHOTO: CH=%u PR=%u\n", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset);
|
|
|
|
|
mPhotoInfo = photoInfo;
|
|
|
|
|
mPath = path;
|
|
|
|
|
|
|
|
|
|
mDisplayDimension = DisplayDimension(photoInfo.width, photoInfo.height);
|
|
|
|
|
|
|
|
|
|
ALOGE("Image Buffer Size: %d", photoInfo.width * photoInfo.height * 4);
|
|
|
|
|
imageBuffer_ = (uint8_t*)malloc(photoInfo.width * photoInfo.height * 4);
|
|
|
|
|
AASSERT(imageBuffer_ != nullptr, "Failed to allocate imageBuffer_");
|
|
|
|
|
|
|
|
|
|
int cameraId = (int)photoInfo.channel - 1;
|
|
|
|
|
|
|
|
|
|
ACameraIdList *cameraIdList = NULL;
|
|
|
|
|
ACameraMetadata *cameraMetadata = NULL;
|
|
|
|
|
|
|
|
|
|
const char *selectedCameraId = NULL;
|
|
|
|
|
camera_status_t camera_status = ACAMERA_OK;
|
|
|
|
|
ACameraManager *cameraManager = ACameraManager_create();
|
|
|
|
|
|
|
|
|
|
camera_status = ACameraManager_getCameraIdList(cameraManager, &cameraIdList);
|
|
|
|
|
if (camera_status != ACAMERA_OK) {
|
|
|
|
|
ALOGI("Failed to get camera id list (reason: %d)\n", camera_status);
|
|
|
|
|
TakePhotoCb(false, photoInfo, path, 0);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (cameraIdList->numCameras < 1 ) {
|
|
|
|
|
ALOGI("No camera device detected.\n");
|
|
|
|
|
TakePhotoCb(false, photoInfo, path, 0);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (cameraIdList->numCameras <= cameraId ) {
|
|
|
|
|
ALOGI("No required camera device %d detected.\n", cameraId);
|
|
|
|
|
TakePhotoCb(false, photoInfo, path, 0);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
selectedCameraId = cameraIdList->cameraIds[cameraId];
|
|
|
|
|
|
|
|
|
|
ALOGI("Trying to open Camera2 (id: %s, num of camera : %d)\n", selectedCameraId,
|
|
|
|
|
cameraIdList->numCameras);
|
|
|
|
|
|
|
|
|
|
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId,
|
|
|
|
|
&cameraMetadata);
|
|
|
|
|
|
|
|
|
|
if (camera_status != ACAMERA_OK) {
|
|
|
|
|
ALOGI("Failed to get camera meta data of ID:%s\n", selectedCameraId);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ACameraMetadata_const_entry face, orientation;
|
|
|
|
|
camera_status = ACameraMetadata_getConstEntry(cameraMetadata, ACAMERA_LENS_FACING, &face);
|
|
|
|
|
uint32_t cameraFacing_ = static_cast<int32_t>(face.data.u8[0]);
|
|
|
|
|
|
|
|
|
|
if (cameraFacing_ == ACAMERA_LENS_FACING_FRONT)
|
|
|
|
|
{
|
|
|
|
|
int aa = 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
camera_status = ACameraMetadata_getConstEntry(cameraMetadata, ACAMERA_SENSOR_ORIENTATION, &orientation);
|
|
|
|
|
|
|
|
|
|
ALOGI("====Current SENSOR_ORIENTATION: %8d", orientation.data.i32[0]);
|
|
|
|
|
uint32_t cameraOrientation_ = orientation.data.i32[0];
|
|
|
|
|
if (cameraOrientation_ == 90 || cameraOrientation_ == 270)
|
|
|
|
|
{
|
|
|
|
|
mDisplayDimension.Flip();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ImageFormat resCap = {(int32_t)photoInfo.width, (int32_t)photoInfo.height, AIMAGE_FORMAT_YUV_420_888};
|
|
|
|
|
MatchCaptureSizeRequest(cameraManager, selectedCameraId, photoInfo.width, photoInfo.height, cameraOrientation_, &resCap);
|
|
|
|
|
|
|
|
|
|
deviceStateCallbacks.onDisconnected = camera_device_on_disconnected;
|
|
|
|
|
deviceStateCallbacks.onError = camera_device_on_error;
|
|
|
|
|
|
|
|
|
|
camera_status = ACameraManager_openCamera(cameraManager, selectedCameraId,
|
|
|
|
|
&deviceStateCallbacks, &cameraDevice);
|
|
|
|
|
|
|
|
|
|
if (camera_status != ACAMERA_OK) {
|
|
|
|
|
ALOGI("Failed to open camera device (id: %s)\n", selectedCameraId);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
camera_status = ACameraDevice_createCaptureRequest(cameraDevice, TEMPLATE_STILL_CAPTURE/*TEMPLATE_PREVIEW*/,
|
|
|
|
|
&captureRequest);
|
|
|
|
|
|
|
|
|
|
if (camera_status != ACAMERA_OK) {
|
|
|
|
|
ALOGI("Failed to create preview capture request (id: %s)\n", selectedCameraId);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ACaptureSessionOutputContainer_create(&captureSessionOutputContainer);
|
|
|
|
|
|
|
|
|
|
captureSessionStateCallbacks.onReady = capture_session_on_ready;
|
|
|
|
|
captureSessionStateCallbacks.onActive = capture_session_on_active;
|
|
|
|
|
captureSessionStateCallbacks.onClosed = capture_session_on_closed;
|
|
|
|
|
|
|
|
|
|
ACameraMetadata_free(cameraMetadata);
|
|
|
|
|
ACameraManager_deleteCameraIdList(cameraIdList);
|
|
|
|
|
ACameraManager_delete(cameraManager);
|
|
|
|
|
|
|
|
|
|
media_status_t status;
|
|
|
|
|
// status = AImageReader_new(1920, 1080, AIMAGE_FORMAT_YUV_420_888, 5, &mAImageReader);
|
|
|
|
|
status = AImageReader_new(resCap.width, resCap.height, resCap.format, 5, &mAImageReader);
|
|
|
|
|
if (status != AMEDIA_OK)
|
|
|
|
|
{
|
|
|
|
|
ALOGI("AImageReader_new error\n");
|
|
|
|
|
TakePhotoCb(false, photoInfo, path, 0);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
AImageReader_ImageListener listener{
|
|
|
|
|
.context = this,
|
|
|
|
|
.onImageAvailable = OnImageCallback,
|
|
|
|
|
};
|
|
|
|
|
AImageReader_setImageListener(mAImageReader, &listener);
|
|
|
|
|
|
|
|
|
|
//ANativeWindow *mNativeWindow;
|
|
|
|
|
status = AImageReader_getWindow(mAImageReader, &theNativeWindow);
|
|
|
|
|
if (status != AMEDIA_OK)
|
|
|
|
|
{
|
|
|
|
|
ALOGI("AImageReader_getWindow error\n");
|
|
|
|
|
TakePhotoCb(false, photoInfo, path, 0);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ALOGI("Surface is prepared in %p.\n", theNativeWindow);
|
|
|
|
|
// theNativeWindow
|
|
|
|
|
|
|
|
|
|
ACameraOutputTarget_create(theNativeWindow, &cameraOutputTarget);
|
|
|
|
|
ACaptureRequest_addTarget(captureRequest, cameraOutputTarget);
|
|
|
|
|
|
|
|
|
|
ACaptureSessionOutput_create(theNativeWindow, &sessionOutput);
|
|
|
|
|
ACaptureSessionOutputContainer_add(captureSessionOutputContainer, sessionOutput);
|
|
|
|
|
|
|
|
|
|
ACameraDevice_createCaptureSession(cameraDevice, captureSessionOutputContainer,
|
|
|
|
|
&captureSessionStateCallbacks, &captureSession);
|
|
|
|
|
|
|
|
|
|
// ACameraCaptureSession_setRepeatingRequest(captureSession, NULL, 1, &captureRequest, NULL);
|
|
|
|
|
ACameraCaptureSession_capture(captureSession, NULL, 1, &captureRequest, NULL);
|
|
|
|
|
ALOGI("Surface is prepared in here.\n");
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ACameraCaptureSession_stateCallbacks* CPhoneDevice2::GetSessionListener()
|
|
|
|
|
{
|
|
|
|
|
static ACameraCaptureSession_stateCallbacks sessionListener = {
|
|
|
|
|
.context = this,
|
|
|
|
|
.onClosed = CPhoneDevice2::capture_session_on_closed,
|
|
|
|
|
.onReady = CPhoneDevice2::capture_session_on_ready,
|
|
|
|
|
.onActive = CPhoneDevice2::capture_session_on_active,
|
|
|
|
|
};
|
|
|
|
|
return &sessionListener;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void CPhoneDevice2::ImageCallback(AImageReader *reader)
|
|
|
|
|
{
|
|
|
|
|
bool res = false;
|
|
|
|
|
AImage *image = nullptr;
|
|
|
|
|
media_status_t status = AImageReader_acquireNextImage(reader, &image);
|
|
|
|
|
if (status == AMEDIA_OK && image)
|
|
|
|
|
{
|
|
|
|
|
int32_t srcFormat = -1;
|
|
|
|
|
AImage_getFormat(image, &srcFormat);
|
|
|
|
|
AASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
|
|
|
|
|
int32_t srcPlanes = 0;
|
|
|
|
|
AImage_getNumberOfPlanes(image, &srcPlanes);
|
|
|
|
|
AASSERT(srcPlanes == 3, "Is not 3 planes");
|
|
|
|
|
|
|
|
|
|
AImageCropRect srcRect;
|
|
|
|
|
AImage_getCropRect(image, &srcRect);
|
|
|
|
|
int32_t width = srcRect.right - srcRect.left;
|
|
|
|
|
int32_t height = srcRect.bottom - srcRect.top;
|
|
|
|
|
|
|
|
|
|
// int32_t height = srcRect.right - srcRect.left;
|
|
|
|
|
// int32_t width = srcRect.bottom - srcRect.top;
|
|
|
|
|
|
|
|
|
|
uint8_t *yPixel = nullptr;
|
|
|
|
|
uint8_t *uPixel = nullptr;
|
|
|
|
|
uint8_t *vPixel = nullptr;
|
|
|
|
|
|
|
|
|
|
int32_t yLen = 0;
|
|
|
|
|
int32_t uLen = 0;
|
|
|
|
|
int32_t vLen = 0;
|
|
|
|
|
|
|
|
|
|
AImage_getPlaneData(image, 0, &yPixel, &yLen);
|
|
|
|
|
AImage_getPlaneData(image, 1, &uPixel, &uLen);
|
|
|
|
|
AImage_getPlaneData(image, 2, &vPixel, &vLen);
|
|
|
|
|
|
|
|
|
|
uint8_t * data = new uint8_t[yLen + vLen + uLen];
|
|
|
|
|
memcpy(data, yPixel, yLen);
|
|
|
|
|
memcpy(data+yLen, vPixel, vLen);
|
|
|
|
|
memcpy(data+yLen+vLen, uPixel, uLen);
|
|
|
|
|
|
|
|
|
|
cv::Mat mYUV = cv::Mat(((height * 3) >> 1), width, CV_8UC1, data);
|
|
|
|
|
|
|
|
|
|
// cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
|
|
|
|
|
|
|
|
|
|
// cv::Mat mYUV = cv::Mat(height, yStride, CV_8UC4, data);
|
|
|
|
|
|
|
|
|
|
cv::Mat _yuv_rgb_img(height, width, CV_8UC4), _yuv_gray_img;
|
|
|
|
|
cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
|
|
|
|
|
|
|
|
|
|
cv::rotate(_yuv_rgb_img, _yuv_rgb_img, cv::ROTATE_180);
|
|
|
|
|
|
|
|
|
|
// cv::Mat rgbMat(height, width, CV_8UC3);
|
|
|
|
|
// 通过cv::cvtColor将yuv420转换为rgb格式
|
|
|
|
|
// cvtColor(_yuv_rgb_img, rgbMat, cv::COLOR_YUV2RGB_I420);
|
|
|
|
|
|
|
|
|
|
// cv::Mat mat = cv::Mat(buffer.height, buffer.stride, CV_8UC4, buffer.bits);
|
|
|
|
|
|
|
|
|
|
const char *str = "OSD";
|
|
|
|
|
putText(_yuv_rgb_img, str, cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(0, 0, 0), 4,cv::LINE_AA);
|
|
|
|
|
putText(_yuv_rgb_img, str, cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(255, 255, 255), 2,cv::LINE_AA);
|
|
|
|
|
|
|
|
|
|
vector <int> compression_params;
|
|
|
|
|
compression_params.push_back(cv::IMWRITE_JPEG_QUALITY);
|
|
|
|
|
compression_params.push_back(80);
|
|
|
|
|
|
|
|
|
|
res = cv::imwrite(mPath.c_str(), _yuv_rgb_img, compression_params);
|
|
|
|
|
|
|
|
|
|
// ANativeWindow_unlockAndPost(theNativeWindow);
|
|
|
|
|
|
|
|
|
|
if (res)
|
|
|
|
|
{
|
|
|
|
|
int aa = 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// res = WriteFile(image, GetFileName() + ".org.jpg");
|
|
|
|
|
AImage_delete(image);
|
|
|
|
|
// delete pThis;
|
|
|
|
|
|
|
|
|
|
TakePhotoCb(res, mPhotoInfo, mPath, time(NULL));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
void CPhoneDevice2::OnImageCallback(void *ctx, AImageReader *reader)
|
|
|
|
|
{
|
|
|
|
|
CPhoneDevice2* pThis = reinterpret_cast<CPhoneDevice2*>(ctx);
|
|
|
|
|
if (pThis != NULL)
|
|
|
|
|
{
|
|
|
|
|
pThis->ImageCallback(reader);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice2::WriteFile(AImage *image, const string& path)
|
|
|
|
|
{
|
|
|
|
|
int planeCount = 0;
|
|
|
|
|
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
|
|
|
|
|
|
|
|
|
|
ALOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount);
|
|
|
|
|
if (!(status == AMEDIA_OK && planeCount == 1))
|
|
|
|
|
{
|
|
|
|
|
ALOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
uint8_t *data = nullptr;
|
|
|
|
|
int len = 0;
|
|
|
|
|
AImage_getPlaneData(image, 0, &data, &len);
|
|
|
|
|
|
|
|
|
|
bool res = false;
|
|
|
|
|
FILE *file = fopen(path.c_str(), "wb");
|
|
|
|
|
if (file && data && len)
|
|
|
|
|
{
|
|
|
|
|
fwrite(data, 1, len, file);
|
|
|
|
|
fclose(file);
|
|
|
|
|
|
|
|
|
|
ALOGI("Capture: %s", path.c_str());
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
res = true;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
if (file)
|
|
|
|
|
fclose(file);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return res;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice2::WriteFile(CPhoneDevice2* pThis, AImage *image)
|
|
|
|
|
{
|
|
|
|
|
return pThis->WriteFile(image, pThis->GetFileName());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::string CPhoneDevice2::GetFileName() const
|
|
|
|
|
{
|
|
|
|
|
return mPath;
|
|
|
|
|
}
|
|
|
|
|
/*
|
|
|
|
|
const char *selectedCameraId = NULL;
|
|
|
|
|
|
|
|
|
|
ACameraManager *cameraManager = ACameraManager_create();
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
bool CPhoneDevice2::MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
|
|
|
|
|
ImageFormat* resCap) {
|
|
|
|
|
DisplayDimension disp(resCap->width,resCap->height);
|
|
|
|
|
if (cameraOrientation_ == 90 || cameraOrientation_ == 270) {
|
|
|
|
|
disp.Flip();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ACameraMetadata* metadata;
|
|
|
|
|
camera_status_t camera_status = ACAMERA_OK;
|
|
|
|
|
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId, &metadata);
|
|
|
|
|
ACameraMetadata_const_entry entry;
|
|
|
|
|
camera_status = ACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
|
|
|
|
|
// format of the data: format, width, height, input?, type int32
|
|
|
|
|
bool foundIt = false;
|
|
|
|
|
DisplayDimension foundRes(16384, 16384);
|
|
|
|
|
DisplayDimension maxJPG(0, 0);
|
|
|
|
|
|
|
|
|
|
for (int i = 0; i < entry.count; i += 4) {
|
|
|
|
|
int32_t input = entry.data.i32[i + 3];
|
|
|
|
|
int32_t format = entry.data.i32[i + 0];
|
|
|
|
|
if (input) continue;
|
|
|
|
|
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_JPEG) {
|
|
|
|
|
DisplayDimension res(entry.data.i32[i + 1], entry.data.i32[i + 2]);
|
|
|
|
|
ALOGI("Camera Resolution: %d x %d fmt=%d", res.width(), res.height(), format);
|
|
|
|
|
if (!disp.IsSameRatio(res)) continue;
|
|
|
|
|
if (format == AIMAGE_FORMAT_YUV_420_888 && res > disp) {
|
|
|
|
|
foundIt = true;
|
|
|
|
|
foundRes = res;
|
|
|
|
|
} else if (format == AIMAGE_FORMAT_JPEG && res > maxJPG) {
|
|
|
|
|
maxJPG = res;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (foundIt) {
|
|
|
|
|
// resView->width = foundRes.org_width();
|
|
|
|
|
// resView->height = foundRes.org_height();
|
|
|
|
|
resCap->width = foundRes.org_width();
|
|
|
|
|
resCap->height = foundRes.org_height();
|
|
|
|
|
} else {
|
|
|
|
|
ALOGI("Did not find any compatible camera resolution, taking 640x480");
|
|
|
|
|
resCap->width = disp.org_width();
|
|
|
|
|
resCap->height = disp.org_height();
|
|
|
|
|
// *resCap = *resView;
|
|
|
|
|
}
|
|
|
|
|
// resView->format = AIMAGE_FORMAT_YUV_420_888;
|
|
|
|
|
// resCap->format = AIMAGE_FORMAT_JPEG;
|
|
|
|
|
return foundIt;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Convert yuv image inside AImage into ANativeWindow_Buffer
|
|
|
|
|
* ANativeWindow_Buffer format is guaranteed to be
|
|
|
|
|
* WINDOW_FORMAT_RGBX_8888
|
|
|
|
|
* WINDOW_FORMAT_RGBA_8888
|
|
|
|
|
* @param buf a {@link ANativeWindow_Buffer } instance, destination of
|
|
|
|
|
* image conversion
|
|
|
|
|
* @param image a {@link AImage} instance, source of image conversion.
|
|
|
|
|
* it will be deleted via {@link AImage_delete}
|
|
|
|
|
*/
|
|
|
|
|
bool CPhoneDevice2::DisplayImage(ANativeWindow_Buffer *buf, AImage *image) {
|
|
|
|
|
AASSERT(buf->format == WINDOW_FORMAT_RGBX_8888 ||
|
|
|
|
|
buf->format == WINDOW_FORMAT_RGBA_8888,
|
|
|
|
|
"Not supported buffer format");
|
|
|
|
|
|
|
|
|
|
int32_t srcFormat = -1;
|
|
|
|
|
AImage_getFormat(image, &srcFormat);
|
|
|
|
|
AASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
|
|
|
|
|
int32_t srcPlanes = 0;
|
|
|
|
|
AImage_getNumberOfPlanes(image, &srcPlanes);
|
|
|
|
|
AASSERT(srcPlanes == 3, "Is not 3 planes");
|
|
|
|
|
|
|
|
|
|
switch (presentRotation_) {
|
|
|
|
|
case 0:
|
|
|
|
|
PresentImage(buf, image);
|
|
|
|
|
break;
|
|
|
|
|
case 90:
|
|
|
|
|
PresentImage90(buf, image);
|
|
|
|
|
break;
|
|
|
|
|
case 180:
|
|
|
|
|
PresentImage180(buf, image);
|
|
|
|
|
break;
|
|
|
|
|
case 270:
|
|
|
|
|
PresentImage270(buf, image);
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
AASSERT(0, "NOT recognized display rotation: %d", presentRotation_);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
AImage_delete(image);
|
|
|
|
|
image = nullptr;
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* PresentImage()
|
|
|
|
|
* Converting yuv to RGB
|
|
|
|
|
* No rotation: (x,y) --> (x, y)
|
|
|
|
|
* Refer to:
|
|
|
|
|
* https://mathbits.com/MathBits/TISection/Geometry/Transformations2.htm
|
|
|
|
|
*/
|
|
|
|
|
void CPhoneDevice2::PresentImage(ANativeWindow_Buffer *buf, AImage *image) {
|
|
|
|
|
AImageCropRect srcRect;
|
|
|
|
|
AImage_getCropRect(image, &srcRect);
|
|
|
|
|
|
|
|
|
|
AImage_getPlaneRowStride(image, 0, &yStride);
|
|
|
|
|
AImage_getPlaneRowStride(image, 1, &uvStride);
|
|
|
|
|
yPixel = imageBuffer_;
|
|
|
|
|
AImage_getPlaneData(image, 0, &yPixel, &yLen);
|
|
|
|
|
vPixel = imageBuffer_ + yLen;
|
|
|
|
|
AImage_getPlaneData(image, 1, &vPixel, &vLen);
|
|
|
|
|
uPixel = imageBuffer_ + yLen + vLen;
|
|
|
|
|
AImage_getPlaneData(image, 2, &uPixel, &uLen);
|
|
|
|
|
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
|
|
|
|
|
|
|
|
|
|
int32_t rowStride;
|
|
|
|
|
AImage_getPlaneRowStride(image, 0, &rowStride);
|
|
|
|
|
|
|
|
|
|
int32_t height = std::min(buf->height, (srcRect.bottom - srcRect.top));
|
|
|
|
|
int32_t width = std::min(buf->width, (srcRect.right - srcRect.left));
|
|
|
|
|
|
|
|
|
|
uint32_t *out = static_cast<uint32_t *>(buf->bits);
|
|
|
|
|
|
|
|
|
|
for (int32_t y = 0; y < height; y++) {
|
|
|
|
|
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
|
|
|
|
|
|
|
|
|
|
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
|
|
|
|
|
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
|
|
|
|
|
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
|
|
|
|
|
|
|
|
|
|
for (int32_t x = 0; x < width; x++) {
|
|
|
|
|
const int32_t uv_offset = (x >> 1) * uvPixelStride;
|
|
|
|
|
out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
|
|
|
|
|
}
|
|
|
|
|
out += buf->stride;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* PresentImage90()
|
|
|
|
|
* Converting YUV to RGB
|
|
|
|
|
* Rotation image anti-clockwise 90 degree -- (x, y) --> (-y, x)
|
|
|
|
|
*/
|
|
|
|
|
void CPhoneDevice2::PresentImage90(ANativeWindow_Buffer *buf, AImage *image) {
|
|
|
|
|
AImageCropRect srcRect;
|
|
|
|
|
AImage_getCropRect(image, &srcRect);
|
|
|
|
|
|
|
|
|
|
AImage_getPlaneRowStride(image, 0, &yStride);
|
|
|
|
|
AImage_getPlaneRowStride(image, 1, &uvStride);
|
|
|
|
|
yPixel = imageBuffer_;
|
|
|
|
|
AImage_getPlaneData(image, 0, &yPixel, &yLen);
|
|
|
|
|
vPixel = imageBuffer_ + yLen;
|
|
|
|
|
AImage_getPlaneData(image, 1, &vPixel, &vLen);
|
|
|
|
|
uPixel = imageBuffer_ + yLen + vLen;
|
|
|
|
|
AImage_getPlaneData(image, 2, &uPixel, &uLen);
|
|
|
|
|
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
|
|
|
|
|
|
|
|
|
|
int32_t height = std::min(buf->width, (srcRect.bottom - srcRect.top));
|
|
|
|
|
int32_t width = std::min(buf->height, (srcRect.right - srcRect.left));
|
|
|
|
|
|
|
|
|
|
uint32_t *out = static_cast<uint32_t *>(buf->bits);
|
|
|
|
|
out += height - 1;
|
|
|
|
|
for (int32_t y = 0; y < height; y++) {
|
|
|
|
|
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
|
|
|
|
|
|
|
|
|
|
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
|
|
|
|
|
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
|
|
|
|
|
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
|
|
|
|
|
|
|
|
|
|
for (int32_t x = 0; x < width; x++) {
|
|
|
|
|
const int32_t uv_offset = (x >> 1) * uvPixelStride;
|
|
|
|
|
// [x, y]--> [-y, x]
|
|
|
|
|
int testb = pU[uv_offset];
|
|
|
|
|
int testc = pV[uv_offset];
|
|
|
|
|
int testA = pY[x];
|
|
|
|
|
out[x * buf->stride] = YUV2RGB(testA, testb, testc);
|
|
|
|
|
}
|
|
|
|
|
out -= 1; // move to the next column
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* PresentImage180()
|
|
|
|
|
* Converting yuv to RGB
|
|
|
|
|
* Rotate image 180 degree: (x, y) --> (-x, -y)
|
|
|
|
|
*/
|
|
|
|
|
void CPhoneDevice2::PresentImage180(ANativeWindow_Buffer *buf, AImage *image) {
|
|
|
|
|
AImageCropRect srcRect;
|
|
|
|
|
AImage_getCropRect(image, &srcRect);
|
|
|
|
|
|
|
|
|
|
AImage_getPlaneRowStride(image, 0, &yStride);
|
|
|
|
|
AImage_getPlaneRowStride(image, 1, &uvStride);
|
|
|
|
|
yPixel = imageBuffer_;
|
|
|
|
|
AImage_getPlaneData(image, 0, &yPixel, &yLen);
|
|
|
|
|
vPixel = imageBuffer_ + yLen;
|
|
|
|
|
AImage_getPlaneData(image, 1, &vPixel, &vLen);
|
|
|
|
|
uPixel = imageBuffer_ + yLen + vLen;
|
|
|
|
|
AImage_getPlaneData(image, 2, &uPixel, &uLen);
|
|
|
|
|
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
|
|
|
|
|
|
|
|
|
|
int32_t height = std::min(buf->height, (srcRect.bottom - srcRect.top));
|
|
|
|
|
int32_t width = std::min(buf->width, (srcRect.right - srcRect.left));
|
|
|
|
|
|
|
|
|
|
uint32_t *out = static_cast<uint32_t *>(buf->bits);
|
|
|
|
|
out += (height - 1) * buf->stride;
|
|
|
|
|
for (int32_t y = 0; y < height; y++) {
|
|
|
|
|
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
|
|
|
|
|
|
|
|
|
|
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
|
|
|
|
|
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
|
|
|
|
|
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
|
|
|
|
|
|
|
|
|
|
for (int32_t x = 0; x < width; x++) {
|
|
|
|
|
const int32_t uv_offset = (x >> 1) * uvPixelStride;
|
|
|
|
|
// mirror image since we are using front camera
|
|
|
|
|
out[width - 1 - x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
|
|
|
|
|
// out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
|
|
|
|
|
}
|
|
|
|
|
out -= buf->stride;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* PresentImage270()
|
|
|
|
|
* Converting image from YUV to RGB
|
|
|
|
|
* Rotate Image counter-clockwise 270 degree: (x, y) --> (y, x)
|
|
|
|
|
*/
|
|
|
|
|
void CPhoneDevice2::PresentImage270(ANativeWindow_Buffer *buf, AImage *image) {
|
|
|
|
|
AImageCropRect srcRect;
|
|
|
|
|
AImage_getCropRect(image, &srcRect);
|
|
|
|
|
|
|
|
|
|
AImage_getPlaneRowStride(image, 0, &yStride);
|
|
|
|
|
AImage_getPlaneRowStride(image, 1, &uvStride);
|
|
|
|
|
yPixel = imageBuffer_;
|
|
|
|
|
AImage_getPlaneData(image, 0, &yPixel, &yLen);
|
|
|
|
|
vPixel = imageBuffer_ + yLen;
|
|
|
|
|
AImage_getPlaneData(image, 1, &vPixel, &vLen);
|
|
|
|
|
uPixel = imageBuffer_ + yLen + vLen;
|
|
|
|
|
AImage_getPlaneData(image, 2, &uPixel, &uLen);
|
|
|
|
|
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
|
|
|
|
|
|
|
|
|
|
int32_t height = std::min(buf->width, (srcRect.bottom - srcRect.top));
|
|
|
|
|
int32_t width = std::min(buf->height, (srcRect.right - srcRect.left));
|
|
|
|
|
|
|
|
|
|
uint32_t *out = static_cast<uint32_t *>(buf->bits);
|
|
|
|
|
for (int32_t y = 0; y < height; y++) {
|
|
|
|
|
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
|
|
|
|
|
|
|
|
|
|
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
|
|
|
|
|
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
|
|
|
|
|
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
|
|
|
|
|
|
|
|
|
|
for (int32_t x = 0; x < width; x++) {
|
|
|
|
|
const int32_t uv_offset = (x >> 1) * uvPixelStride;
|
|
|
|
|
int testb = pU[uv_offset];
|
|
|
|
|
int testc = pV[uv_offset];
|
|
|
|
|
int testA = pY[x];
|
|
|
|
|
out[(width - 1 - x) * buf->stride] =
|
|
|
|
|
YUV2RGB(testA, testb, testc);
|
|
|
|
|
}
|
|
|
|
|
out += 1; // move to the next column
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
bool CPhoneDevice2::SendBroadcastMessage(String16 action, int value)
|
|
|
|
|
{
|
|
|
|
|
TM_INFO_LOG("sendBroadcastMessage(): Action: %s, Value: %d ", action.string(), value);
|
|
|
|
|
sp <IServiceManager> sm = defaultServiceManager();
|
|
|
|
|
sp <IBinder> am = sm->getService(String16("activity"));
|
|
|
|
|
if (am != NULL) {
|
|
|
|
|
Parcel data, reply;
|
|
|
|
|
data.writeInterfaceToken(String16("android.app.IActivityManager"));
|
|
|
|
|
data.writeStrongBinder(NULL);
|
|
|
|
|
// intent begin
|
|
|
|
|
data.writeString16(action); // action
|
|
|
|
|
data.writeInt32(0); // URI data type
|
|
|
|
|
data.writeString16(NULL, 0); // type
|
|
|
|
|
data.writeInt32(0); // flags
|
|
|
|
|
data.writeString16(NULL, 0); // package name
|
|
|
|
|
data.writeString16(NULL, 0); // component name
|
|
|
|
|
data.writeInt32(0); // source bound - size
|
|
|
|
|
data.writeInt32(0); // categories - size
|
|
|
|
|
data.writeInt32(0); // selector - size
|
|
|
|
|
data.writeInt32(0); // clipData - size
|
|
|
|
|
data.writeInt32(-2); // contentUserHint: -2 -> UserHandle.USER_CURRENT
|
|
|
|
|
data.writeInt32(-1); // bundle extras length
|
|
|
|
|
data.writeInt32(0x4C444E42); // 'B' 'N' 'D' 'L'
|
|
|
|
|
int oldPos = data.dataPosition();
|
|
|
|
|
data.writeInt32(1); // size
|
|
|
|
|
// data.writeInt32(0); // VAL_STRING, need to remove because of analyze common intent
|
|
|
|
|
data.writeString16(String16("type"));
|
|
|
|
|
data.writeInt32(1); // VAL_INTEGER
|
|
|
|
|
data.writeInt32(value);
|
|
|
|
|
int newPos = data.dataPosition();
|
|
|
|
|
data.setDataPosition(oldPos - 8);
|
|
|
|
|
data.writeInt32(newPos - oldPos); // refill bundle extras length
|
|
|
|
|
data.setDataPosition(newPos);
|
|
|
|
|
// intent end
|
|
|
|
|
data.writeString16(NULL, 0); // resolvedType
|
|
|
|
|
data.writeStrongBinder(NULL); // resultTo
|
|
|
|
|
data.writeInt32(0); // resultCode
|
|
|
|
|
data.writeString16(NULL, 0); // resultData
|
|
|
|
|
data.writeInt32(-1); // resultExtras
|
|
|
|
|
data.writeString16(NULL, 0); // permission
|
|
|
|
|
data.writeInt32(0); // appOp
|
|
|
|
|
data.writeInt32(-1); // option
|
|
|
|
|
data.writeInt32(1); // serialized: != 0 -> ordered
|
|
|
|
|
data.writeInt32(0); // sticky
|
|
|
|
|
data.writeInt32(-2); // userId: -2 -> UserHandle.USER_CURRENT
|
|
|
|
|
|
|
|
|
|
status_t ret = am->transact(IBinder::FIRST_CALL_TRANSACTION + 13, data,
|
|
|
|
|
&reply); // BROADCAST_INTENT_TRANSACTION
|
|
|
|
|
if (ret == NO_ERROR) {
|
|
|
|
|
int exceptionCode = reply.readExceptionCode();
|
|
|
|
|
if (exceptionCode) {
|
|
|
|
|
TM_INFO_LOG("sendBroadcastMessage(%s) caught exception %d\n",
|
|
|
|
|
action.string(), exceptionCode);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
TM_INFO_LOG("getService() couldn't find activity service!\n");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
void CPhoneDevice2::camera_device_on_disconnected(void *context, ACameraDevice *device)
|
|
|
|
|
{
|
|
|
|
|
ALOGI("Camera(id: %s) is diconnected.\n", ACameraDevice_getId(device));
|
|
|
|
|
CPhoneDevice2* pThis = (CPhoneDevice2*)context;
|
|
|
|
|
// delete pThis;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void CPhoneDevice2::camera_device_on_error(void *context, ACameraDevice *device, int error)
|
|
|
|
|
{
|
|
|
|
|
ALOGI("Error(code: %d) on Camera(id: %s).\n", error, ACameraDevice_getId(device));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void CPhoneDevice2::capture_session_on_ready(void *context, ACameraCaptureSession *session)
|
|
|
|
|
{
|
|
|
|
|
ALOGI("Session is ready. %p\n", session);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void CPhoneDevice2::capture_session_on_active(void *context, ACameraCaptureSession *session)
|
|
|
|
|
{
|
|
|
|
|
ALOGI("Session is activated. %p\n", session);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void CPhoneDevice2::capture_session_on_closed(void *context, ACameraCaptureSession *session)
|
|
|
|
|
{
|
|
|
|
|
ALOGI("Session is closed. %p\n", session);
|
|
|
|
|
}
|