实现Service和定时器

serial
Matthew 2 years ago
parent 00433db12f
commit 9952fcbd8c

@ -59,4 +59,8 @@ dependencies {
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
implementation 'com.google.code.gson:gson:2.10.1'
implementation 'com.googlecode.mp4parser:isoparser:1.1.21'
// implementation 'com.tencent.mars:mars-core:1.2.5'
// implementation 'com.tencent:mmkv-static:1.3.0'
}

@ -1,20 +0,0 @@
{
"version": 3,
"artifactType": {
"type": "APK",
"kind": "Directory"
},
"applicationId": "com.xinyingpower.microphoto",
"variantName": "release",
"elements": [
{
"type": "SINGLE",
"filters": [],
"attributes": [],
"versionCode": 1,
"versionName": "1.0",
"outputFile": "app-release.apk"
}
],
"elementType": "File"
}

@ -53,6 +53,7 @@
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission
@ -105,7 +106,38 @@
android:name=".MicroPhotoService"
android:enabled="true"
android:exported="true"
android:process="com.xytech.xymp"></service>
>
<intent-filter android:priority="90000">
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.BOOT_COMPLETED"/>
<action android:name="android.intent.action.SCREEN_ON"/>
<action android:name="android.intent.action.USER_PRESENT"/>
<action android:name="android.intent.action.USER_UNLOCKED"/>
</intent-filter>
</service>
<service android:name=".FloatingWindow">
</service>
<receiver android:name=".MicroPhotoService$AlarmReceiver"
android:exported="true">
<intent-filter>
<action android:name="com.xinyingpower.mp.ScheduleDetailActivity.AlarmReceiver" />
</intent-filter>
</receiver>
<receiver android:name=".ScreenActionReceiver"
android:exported="true">
<intent-filter android:priority="90000">
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.BOOT_COMPLETED"/>
<action android:name="android.intent.action.SCREEN_ON"/>
<action android:name="android.intent.action.USER_PRESENT"/>
<action android:name="android.intent.action.USER_UNLOCKED"/>
</intent-filter>
</receiver>
<activity
android:name=".MainActivity"

@ -14,9 +14,15 @@ add_definitions(-DBOOST_ALL_NO_LIB)
project("microphoto")
set(TERM_CORE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../../../xymp/Core)
SET(TERM_CORE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../../../xymp/Core)
SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp)
SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include)
SET(BREAKPAD_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2)
set(BREAKPAD_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
include_directories(${BREAKPAD_ROOT} ${BREAKPAD_ROOT}/common/android/include)
file(GLOB BREAKPAD_SOURCES_COMMON
@ -61,7 +67,7 @@ add_library( # Sets the name of the library.
# Provides a relative path to your source file(s).
${BREAKPAD_SOURCES_COMMON} ${BREAKPAD_ASM_SOURCE} )
SET(JSONCPP_INCLUDE_DIR jsoncpp/include)
INCLUDE_DIRECTORIES(${JSONCPP_INCLUDE_DIR})
SET(PUBLIC_HEADERS
@ -75,13 +81,23 @@ SET(PUBLIC_HEADERS
${JSONCPP_INCLUDE_DIR}/json/version.h
)
SET(jsoncpp_sources
jsoncpp/src/lib_json/json_tool.h
jsoncpp/src/lib_json/json_reader.cpp
jsoncpp/src/lib_json/json_valueiterator.inl
jsoncpp/src/lib_json/json_value.cpp
jsoncpp/src/lib_json/json_writer.cpp
jsoncpp/version.in)
SET(JSONCPP_SOURCES
${JSONCPP_SRC_DIR}/src/lib_json/json_tool.h
${JSONCPP_SRC_DIR}/src/lib_json/json_reader.cpp
${JSONCPP_SRC_DIR}/src/lib_json/json_valueiterator.inl
${JSONCPP_SRC_DIR}/src/lib_json/json_value.cpp
${JSONCPP_SRC_DIR}/src/lib_json/json_writer.cpp
${JSONCPP_SRC_DIR}/version.in)
SET(CAMERA2_SOURCES
${CAMERA2_ROOT_DIR}/android_main.cpp
${CAMERA2_ROOT_DIR}/camera_engine.cpp
${CAMERA2_ROOT_DIR}/camera_manager.cpp
${CAMERA2_ROOT_DIR}/camera_listeners.cpp
${CAMERA2_ROOT_DIR}/image_reader.cpp
${CAMERA2_ROOT_DIR}/camera_ui.cpp
${CAMERA2_ROOT_DIR}/utils/camera_utils.cpp)
include_directories(${TERM_CORE_ROOT})
# include_directories(${PROJECT_SOURCE_DIR}/../../../../../libs/inc/)
@ -113,7 +129,7 @@ add_library( # Sets the name of the library.
STATIC
# Provides a relative path to your source file(s).
${jsoncpp_sources}
${JSONCPP_SOURCES}
)
add_library( # Sets the name of the library.
@ -125,6 +141,12 @@ add_library( # Sets the name of the library.
# Provides a relative path to your source file(s).
MicroPhoto.cpp
TerminalDevice.cpp
PhoneDevice.cpp
Camera.cpp
Camera2Reader.cpp
# ${CAMERA2_SOURCES}
${TERM_CORE_ROOT}/Factory.cpp
${TERM_CORE_ROOT}/FilePoster.cpp
${TERM_CORE_ROOT}/LogThread.cpp
@ -132,6 +154,8 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/SpecData_I1.cpp
${TERM_CORE_ROOT}/SpecData_I1_AH.cpp
${TERM_CORE_ROOT}/SpecData_I1_HN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN.cpp
${TERM_CORE_ROOT}/SpecData_I1_SHX.cpp
${TERM_CORE_ROOT}/SpecData_XY.cpp
${TERM_CORE_ROOT}/SpecData_ZJ.cpp
${TERM_CORE_ROOT}/TermClient.cpp
@ -140,6 +164,8 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/Utils.cpp
${TERM_CORE_ROOT}/Client/Terminal.cpp
${TERM_CORE_ROOT}/Client/Terminal_HN.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN.cpp
${TERM_CORE_ROOT}/Client/Terminal_SHX.cpp
${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp
)
@ -172,5 +198,11 @@ target_link_libraries( # Specifies the target library.
# Links the target library to the log library
# included in the NDK.
${log-lib}
android
camera2ndk
mediandk
)

@ -0,0 +1,196 @@
#include "TerminalDevice.h"
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#define LOG_TAG "CameraTestHelpers"
#include "Camera.h"
#include <android/log.h>
#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
CCamera::CCamera()
{
}
CCamera::~CCamera()
{
closeCamera();
}
int CCamera::initCamera(ANativeWindow *imgReaderAnw) {
if (imgReaderAnw == nullptr) {
ALOGE("Cannot initialize camera before image reader get initialized.");
return -1;
}
mImgReaderAnw = imgReaderAnw;
mCameraManager = ACameraManager_create();
if (mCameraManager == nullptr) {
ALOGE("Failed to create ACameraManager.");
return -1;
}
int ret = ACameraManager_getCameraIdList(mCameraManager, &mCameraIdList);
if (ret != AMEDIA_OK) {
ALOGE("Failed to get cameraIdList: ret=%d", ret);
return ret;
}
if (mCameraIdList->numCameras < 1) {
ALOGW("Device has no NDK compatible camera.");
return 0;
}
ALOGI("Found %d camera(s).", mCameraIdList->numCameras);
// We always use the first camera.
mCameraId = mCameraIdList->cameraIds[0];
if (mCameraId == nullptr) {
ALOGE("Failed to get cameraId.");
return -1;
}
ret = ACameraManager_openCamera(mCameraManager, mCameraId, &mDeviceCb,&mDevice);
if (ret != AMEDIA_OK || mDevice == nullptr) {
ALOGE("Failed to open camera, ret=%d, mDevice=%p.", ret, mDevice);
ret = ACAMERA_ERROR_INVALID_PARAMETER;
return -1;
}
ret = ACameraManager_getCameraCharacteristics(mCameraManager, mCameraId,
&mCameraMetadata);
if (ret != ACAMERA_OK || mCameraMetadata == nullptr) {
ALOGE("Get camera %s characteristics failure. ret %d, metadata %p",
mCameraId, ret, mCameraMetadata);
return -1;
}
if (!isCapabilitySupported(
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE)) {
ALOGW("Camera does not support BACKWARD_COMPATIBLE.");
return 0;
}
// Create capture session
ret = ACaptureSessionOutputContainer_create(&mOutputs);
if (ret != AMEDIA_OK) {
ALOGE("ACaptureSessionOutputContainer_create failed, ret=%d", ret);
return ret;
}
ret = ACaptureSessionOutput_create(mImgReaderAnw, &mImgReaderOutput);
if (ret != AMEDIA_OK) {
ALOGE("ACaptureSessionOutput_create failed, ret=%d", ret);
return ret;
}
ret = ACaptureSessionOutputContainer_add(mOutputs, mImgReaderOutput);
if (ret != AMEDIA_OK) {
ALOGE("ACaptureSessionOutputContainer_add failed, ret=%d", ret);
return ret;
}
ret = ACameraDevice_createCaptureSession(mDevice, mOutputs, &mSessionCb,
&mSession);
if (ret != AMEDIA_OK) {
ALOGE("ACameraDevice_createCaptureSession failed, ret=%d", ret);
return ret;
}
// Create capture request
ret = ACameraDevice_createCaptureRequest(mDevice, TEMPLATE_RECORD,
&mCaptureRequest);
if (ret != AMEDIA_OK) {
ALOGE("ACameraDevice_createCaptureRequest failed, ret=%d", ret);
return ret;
}
ret = ACameraOutputTarget_create(mImgReaderAnw, &mReqImgReaderOutput);
if (ret != AMEDIA_OK) {
ALOGE("ACameraOutputTarget_create failed, ret=%d", ret);
return ret;
}
ret = ACaptureRequest_addTarget(mCaptureRequest, mReqImgReaderOutput);
if (ret != AMEDIA_OK) {
ALOGE("ACaptureRequest_addTarget failed, ret=%d", ret);
return ret;
}
mIsCameraReady = true;
return 0;
}
bool CCamera::isCapabilitySupported(
acamera_metadata_enum_android_request_available_capabilities_t cap) {
ACameraMetadata_const_entry entry;
ACameraMetadata_getConstEntry(mCameraMetadata,
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES, &entry);
for (uint32_t i = 0; i < entry.count; i++) {
if (entry.data.u8[i] == cap) {
return true;
}
}
return false;
}
void CCamera::closeCamera() {
// Destroy capture request
if (mReqImgReaderOutput) {
ACameraOutputTarget_free(mReqImgReaderOutput);
mReqImgReaderOutput = nullptr;
}
if (mCaptureRequest) {
ACaptureRequest_free(mCaptureRequest);
mCaptureRequest = nullptr;
}
// Destroy capture session
if (mSession != nullptr) {
ACameraCaptureSession_close(mSession);
mSession = nullptr;
}
if (mImgReaderOutput) {
ACaptureSessionOutput_free(mImgReaderOutput);
mImgReaderOutput = nullptr;
}
if (mOutputs) {
ACaptureSessionOutputContainer_free(mOutputs);
mOutputs = nullptr;
}
// Destroy camera device
if (mDevice) {
ACameraDevice_close(mDevice);
mDevice = nullptr;
}
if (mCameraMetadata) {
ACameraMetadata_free(mCameraMetadata);
mCameraMetadata = nullptr;
}
// Destroy camera manager
if (mCameraIdList) {
ACameraManager_deleteCameraIdList(mCameraIdList);
mCameraIdList = nullptr;
}
if (mCameraManager) {
ACameraManager_delete(mCameraManager);
mCameraManager = nullptr;
}
mIsCameraReady = false;
}
int CCamera::takePicture()
{
return ACameraCaptureSession_capture(mSession, nullptr, 1, &mCaptureRequest,
nullptr);
}

@ -0,0 +1,73 @@
#ifndef __CAMERA_H__
#define __CAMERA_H__
// Must come before NdkCameraCaptureSession.h
#include <camera/NdkCaptureRequest.h>
#include <camera/NdkCameraCaptureSession.h>
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraManager.h>
#include <cstdlib>
#include <cstring>
#include <string>
#include <jni.h>
#include <media/NdkImage.h>
#include <media/NdkImageReader.h>
class CCamera
{
public:
CCamera();
~CCamera();
int initCamera(ANativeWindow *imgReaderAnw);
bool isCapabilitySupported(acamera_metadata_enum_android_request_available_capabilities_t cap);
bool isCameraReady() { return mIsCameraReady; }
void closeCamera();
int takePicture();
static void onDeviceDisconnected(void * /*obj*/, ACameraDevice * /*device*/) {}
static void onDeviceError(void * /*obj*/, ACameraDevice * /*device*/,
int /*errorCode*/) {}
static void onSessionClosed(void * /*obj*/,
ACameraCaptureSession * /*session*/) {}
static void onSessionReady(void * /*obj*/,
ACameraCaptureSession * /*session*/) {}
static void onSessionActive(void * /*obj*/,
ACameraCaptureSession * /*session*/) {}
private:
ACameraDevice_StateCallbacks mDeviceCb{ this, onDeviceDisconnected,
onDeviceError };
ACameraCaptureSession_stateCallbacks mSessionCb{
this, onSessionClosed, onSessionReady, onSessionActive };
ANativeWindow *mImgReaderAnw{ nullptr }; // not owned by us.
// Camera manager
ACameraManager *mCameraManager{ nullptr };
ACameraIdList *mCameraIdList{ nullptr };
// Camera device
ACameraMetadata *mCameraMetadata{ nullptr };
ACameraDevice *mDevice{ nullptr };
// Capture session
ACaptureSessionOutputContainer *mOutputs{ nullptr };
ACaptureSessionOutput *mImgReaderOutput{ nullptr };
ACameraCaptureSession *mSession{ nullptr };
// Capture request
ACaptureRequest *mCaptureRequest{ nullptr };
ACameraOutputTarget *mReqImgReaderOutput{ nullptr };
bool mIsCameraReady{ false };
const char *mCameraId{ nullptr };
private:
};
#endif // __CAMERA_H__

@ -0,0 +1,278 @@
/*
* Copyright 2015 Rockchip Electronics Co. LTD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define MODULE_TAG "CameraReader"
// #include "mpp_log.h"
// #include "mpp_mem.h"
#include "Camera2Reader.h"
#include <dirent.h>
#include <iomanip>
#include <string>
#include <sstream>
Camera2Reader::Camera2Reader(const char *device, int bufcnt, int width, int height, int fmt)
: mBufcnt(bufcnt),
mWidth(width),
mHeight(height),
mFmt(fmt)
{
strcpy(mDevice, device);
}
Camera2Reader::Camera2Reader(int cameraId) : mCameraId(cameraId)
{
}
int Camera2Reader::Init(const char *device, int bufcnt, int width, int height, int format)
{
return 0;
}
// Free a context to capture frames from <fname>.
// Returns NULL on error.
int Camera2Reader::Deinit()
{
return 0;
}
bool Camera2Reader::Open(const char* path, const char* fileName)
{
mPath.assign(path);
mFileName.assign(fileName);
ACameraIdList *cameraIdList = NULL;
ACameraMetadata *cameraMetadata = NULL;
const char *selectedCameraId = NULL;
camera_status_t camera_status = ACAMERA_OK;
ACameraManager *cameraManager = ACameraManager_create();
camera_status = ACameraManager_getCameraIdList(cameraManager, &cameraIdList);
if (camera_status != ACAMERA_OK) {
LOGI("Failed to get camera id list (reason: %d)\n", camera_status);
return false;
}
if (cameraIdList->numCameras < 1 ) {
LOGI("No camera device detected.\n");
return false;
}
if (cameraIdList->numCameras <= mCameraId ) {
LOGI("No required camera device %d detected.\n", mCameraId);
return false;
}
selectedCameraId = cameraIdList->cameraIds[mCameraId];
LOGI("Trying to open Camera2 (id: %s, num of camera : %d)\n", selectedCameraId,
cameraIdList->numCameras);
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId,
&cameraMetadata);
if (camera_status != ACAMERA_OK) {
LOGI("Failed to get camera meta data of ID:%s\n", selectedCameraId);
}
deviceStateCallbacks.onDisconnected = camera_device_on_disconnected;
deviceStateCallbacks.onError = camera_device_on_error;
camera_status = ACameraManager_openCamera(cameraManager, selectedCameraId,
&deviceStateCallbacks, &cameraDevice);
if (camera_status != ACAMERA_OK) {
LOGI("Failed to open camera device (id: %s)\n", selectedCameraId);
}
camera_status = ACameraDevice_createCaptureRequest(cameraDevice, TEMPLATE_PREVIEW,
&captureRequest);
if (camera_status != ACAMERA_OK) {
LOGI("Failed to create preview capture request (id: %s)\n", selectedCameraId);
}
ACaptureSessionOutputContainer_create(&captureSessionOutputContainer);
captureSessionStateCallbacks.onReady = capture_session_on_ready;
captureSessionStateCallbacks.onActive = capture_session_on_active;
captureSessionStateCallbacks.onClosed = capture_session_on_closed;
ACameraMetadata_free(cameraMetadata);
ACameraManager_deleteCameraIdList(cameraIdList);
ACameraManager_delete(cameraManager);
media_status_t status;
// status = AImageReader_new(1920, 1080, AIMAGE_FORMAT_YUV_420_888, 5, &mAImageReader);
status = AImageReader_new(1920, 1080, AIMAGE_FORMAT_JPEG, 5, &mAImageReader);
if (status != AMEDIA_OK)
{
LOGI("AImageReader_new error\n");
return false;
}
AImageReader_ImageListener listener{
.context = this,
.onImageAvailable = OnImageCallback,
};
AImageReader_setImageListener(mAImageReader, &listener);
//ANativeWindow *mNativeWindow;
status = AImageReader_getWindow(mAImageReader, &theNativeWindow);
if (status != AMEDIA_OK)
{
LOGI("AImageReader_getWindow error\n");
return false;
}
LOGI("Surface is prepared in %p.\n", theNativeWindow);
ACameraOutputTarget_create(theNativeWindow, &cameraOutputTarget);
ACaptureRequest_addTarget(captureRequest, cameraOutputTarget);
ACaptureSessionOutput_create(theNativeWindow, &sessionOutput);
ACaptureSessionOutputContainer_add(captureSessionOutputContainer, sessionOutput);
ACameraDevice_createCaptureSession(cameraDevice, captureSessionOutputContainer,
&captureSessionStateCallbacks, &captureSession);
// ACameraCaptureSession_setRepeatingRequest(captureSession, NULL, 1, &captureRequest, NULL);
ACameraCaptureSession_capture(captureSession, NULL, 1, &captureRequest, NULL);
LOGI("Surface is prepared in here.\n");
return true;
}
ACameraCaptureSession_stateCallbacks *Camera2Reader::GetSessionListener()
{
static ACameraCaptureSession_stateCallbacks sessionListener = {
.context = this,
.onClosed = Camera2Reader::capture_session_on_closed,
.onReady = Camera2Reader::capture_session_on_ready,
.onActive = Camera2Reader::capture_session_on_active,
};
return &sessionListener;
}
void Camera2Reader::ImageCallback(AImageReader *reader)
{
int32_t format;
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
LOGI("ImageCallback\n");
if (status == AMEDIA_OK && image)
{
LOGI("ImageCallback\n");
AImage_delete(image);
}
}
void Camera2Reader::OnImageCallback(void *ctx, AImageReader *reader)
{
Camera2Reader* pThis = reinterpret_cast<Camera2Reader*>(ctx);
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
if (status == AMEDIA_OK && image)
{
WriteFile(pThis, image, pThis->mPath);
// image.
AImage_delete(image);
pThis->Deinit();
delete pThis;
}
}
bool Camera2Reader::readyToRun()
{
if (!Init(mDevice, mBufcnt, mWidth, mHeight, mFmt))
{
LOGI("Init false\n");
return false;
}
return Open("", "");
}
void Camera2Reader::start()
{
//run();
}
void Camera2Reader::stop()
{
//threadStop();
}
bool Camera2Reader::threadLoop()
{
usleep(1000);
return true;
}
Camera2Reader::~Camera2Reader()
{
LOGI("~CameraReader\n");
}
void *Camera2Reader::readImage(int chn)
{
return NULL;
}
void Camera2Reader::WriteFile(Camera2Reader* pThis, AImage *image, const std::string& path)
{
// static const char *kFileName = "capture";
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
LOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount);
if (!(status == AMEDIA_OK && planeCount == 1))
{
LOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount);
return;
}
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
DIR *dir = opendir(path.c_str());
if (dir)
{
closedir(dir);
}
else
{
std::string cmd = "mkdir -p ";
cmd += path;
system(cmd.c_str());
}
std::string fileName = path + pThis->mFileName;
FILE *file = fopen(fileName.c_str(), "wb");
if (file && data && len)
{
fwrite(data, 1, len, file);
fclose(file);
LOGE("Capture: %s", fileName.c_str());
}
else
{
if (file)
fclose(file);
}
}

@ -0,0 +1,128 @@
/*
* Copyright 2015 Rockchip Electronics Co. LTD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CAMERA2_READER_H__
#define __CAMERA2_READER_H__
#include <stdio.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraMetadataTags.h>
#include <media/NdkImageReader.h>
#include <android/log.h>
#define LOG_TAG "native-camera2-jni"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#define FMT_NUM_PLANES 1
// struct CameraParam
// {
// int id;
// const char *device;
// RK_U32 bufcnt;
// RK_U32 width;
// RK_U32 height;
// MppFrameFormat fmt;
// };
#include <string>
class Camera2Reader
{
private:
AImageReader *mAImageReader;
ANativeWindow *theNativeWindow;
ACameraDevice *cameraDevice;
ACaptureRequest *captureRequest;
ACameraOutputTarget *cameraOutputTarget;
ACaptureSessionOutput *sessionOutput;
ACaptureSessionOutputContainer *captureSessionOutputContainer;
ACameraCaptureSession *captureSession;
ACameraDevice_StateCallbacks deviceStateCallbacks;
ACameraCaptureSession_stateCallbacks captureSessionStateCallbacks;
int sessionSequenceId;
int mCameraId = 0;
char mDevice[20];
int mBufcnt;
int mWidth;
int mHeight;
int mFmt;
std::string mPath;
std::string mFileName;
unsigned char *mYuv720p = NULL;
unsigned char *mYuv420i = NULL;
unsigned char *mArgb1080p = NULL;
// Create a new context to capture frames from <fname>. Returns NULL on error.
int Init(const char *device, int bufcnt, int width, int height, int fmt);
// Stop capturing and free a context.
int Deinit();
bool readyToRun();
bool threadLoop();
ACameraCaptureSession_stateCallbacks *GetSessionListener();
void ImageCallback(AImageReader *reader);
static void OnImageCallback(void *ctx, AImageReader *reader);
static void camera_device_on_disconnected(void *context, ACameraDevice *device)
{
LOGI("Camera(id: %s) is diconnected.\n", ACameraDevice_getId(device));
}
static void camera_device_on_error(void *context, ACameraDevice *device, int error)
{
LOGI("Error(code: %d) on Camera(id: %s).\n", error, ACameraDevice_getId(device));
}
static void capture_session_on_ready(void *context, ACameraCaptureSession *session)
{
LOGI("Session is ready. %p\n", session);
}
static void capture_session_on_active(void *context, ACameraCaptureSession *session)
{
LOGI("Session is activated. %p\n", session);
}
static void capture_session_on_closed(void *context, ACameraCaptureSession *session)
{
LOGI("Session is closed. %p\n", session);
}
static void WriteFile(Camera2Reader* pThis, AImage *image, const std::string& path);
public:
bool Open(const char* path, const char* fileName);
Camera2Reader(const char *device, int bufcnt, int width, int height, int fmt);
Camera2Reader(int cameraId);
void *readImage(int chn);
// Camera2Reader &operator=(CameraParam *cameraParam);
~Camera2Reader();
void start();
void stop();
};
#endif /* __CAMERA_READER_H__ */

@ -2,6 +2,17 @@
#include <string>
#include <TermClient.h>
#include "TerminalDevice.h"
#include "PhoneDevice.h"
#include <camera/NdkCameraCaptureSession.h>
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraManager.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include "Camera.h"
#include "Camera2Reader.h"
// #include "client/linux/handler/exception_handler.h"
// #include "client/linux/handler/minidump_descriptor.h"
@ -14,6 +25,45 @@ Java_com_xinyingpower_microphoto_MainActivity_stringFromJNI(
return env->NewStringUTF(hello.c_str());
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xinyingpower_microphoto_MainActivity_takePhoto(
JNIEnv* env,
jobject pThis, jint channel, jint preset, jstring path, jstring fileName) {
// ANativeWindow *imgReaderAnw = ANativeWindow_fromSurface(env, surface);
/*
CCamera camera;
camera.initCamera(imgReaderAnw);
if (camera.isCameraReady())
{
camera.takePicture();
}
camera.closeCamera();
*/
if (channel < 1 || channel > 0xFF)
{
return JNI_FALSE;
}
unsigned char id = (unsigned char)channel - 1;
Camera2Reader *camera = new Camera2Reader(id);
const char *pathStr = env->GetStringUTFChars(path, 0);
const char *fileNameStr = env->GetStringUTFChars(fileName, 0);
camera->Open(pathStr, fileNameStr);
env->ReleaseStringUTFChars(fileName, fileNameStr);
env->ReleaseStringUTFChars(path, pathStr);
camera->start();
return JNI_TRUE;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xinyingpower_microphoto_MicroPhotoService_init(
JNIEnv* env,
@ -34,7 +84,8 @@ Java_com_xinyingpower_microphoto_MicroPhotoService_init(
jint ret = env->GetJavaVM(&vm);
// const string& appPath, const string& termId, const string& server, unsigned short port, const string& bindIp
CTermClient& service = CTermClient::GetService();
CTerminalDevice* device = new CTerminalDevice(vm, pThis);
// CTerminalDevice* device = new CTerminalDevice(vm, pThis);
CPhoneDevice* device = new CPhoneDevice();
bool res = service.InitService(appPathStr, cmdidStr, ipStr, (unsigned short)port, "", device);
env->ReleaseStringUTFChars(appPath, appPathStr);
@ -44,6 +95,32 @@ Java_com_xinyingpower_microphoto_MicroPhotoService_init(
return res ? JNI_TRUE : JNI_FALSE;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xinyingpower_microphoto_MicroPhotoService_takePhoto(
JNIEnv* env,
jobject pThis, jint channel, jint preset, jstring path, jstring fileName) {
if (channel < 1 || channel > 0xFF)
{
return JNI_FALSE;
}
unsigned char id = (unsigned char)channel - 1;
Camera2Reader *camera = new Camera2Reader(id);
const char *pathStr = env->GetStringUTFChars(path, 0);
const char *fileNameStr = env->GetStringUTFChars(fileName, 0);
camera->Open(pathStr, fileNameStr);
env->ReleaseStringUTFChars(fileName, fileNameStr);
env->ReleaseStringUTFChars(path, pathStr);
camera->start();
return JNI_TRUE;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xinyingpower_microphoto_MicroPhotoService_uninit(
JNIEnv* env,
@ -53,3 +130,15 @@ Java_com_xinyingpower_microphoto_MicroPhotoService_uninit(
return JNI_TRUE;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_xinyingpower_microphoto_MicroPhotoService_getHeartbeatDuration(
JNIEnv* env,
jobject pThis) {
// CTermClient::GetService().ExitService();
return 60000;
}

@ -0,0 +1,269 @@
#include "TerminalDevice.h"
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#define LOG_TAG "CameraTestHelpers"
#include "PhoneDevice.h"
#include <android/log.h>
#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
CPhoneDevice::CPhoneDevice()
{
}
CPhoneDevice::~CPhoneDevice()
{
}
IDevice::timer_uid_t CPhoneDevice::registerTimer(unsigned int timerType, unsigned int timeout)
{
return 0;
}
bool CPhoneDevice::unregisterTimer(IDevice::timer_uid_t uid)
{
return true;
}
bool CPhoneDevice::onTimeout(IDevice::timer_uid_t uid, unsigned int timerType, unsigned int times)
{
return true;
}
bool CPhoneDevice::TakePhoto(unsigned char channel, unsigned char preset, const string& path, bool photo)
{
int cameraId = (int)channel - 1;
ACameraIdList *cameraIdList = NULL;
ACameraMetadata *cameraMetadata = NULL;
const char *selectedCameraId = NULL;
camera_status_t camera_status = ACAMERA_OK;
ACameraManager *cameraManager = ACameraManager_create();
camera_status = ACameraManager_getCameraIdList(cameraManager, &cameraIdList);
if (camera_status != ACAMERA_OK) {
LOGI("Failed to get camera id list (reason: %d)\n", camera_status);
return false;
}
if (cameraIdList->numCameras < 1 ) {
LOGI("No camera device detected.\n");
return false;
}
if (cameraIdList->numCameras <= cameraId ) {
LOGI("No required camera device %d detected.\n", cameraId);
return false;
}
selectedCameraId = cameraIdList->cameraIds[cameraId];
LOGI("Trying to open Camera2 (id: %s, num of camera : %d)\n", selectedCameraId,
cameraIdList->numCameras);
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId,
&cameraMetadata);
if (camera_status != ACAMERA_OK) {
LOGI("Failed to get camera meta data of ID:%s\n", selectedCameraId);
}
deviceStateCallbacks.onDisconnected = camera_device_on_disconnected;
deviceStateCallbacks.onError = camera_device_on_error;
camera_status = ACameraManager_openCamera(cameraManager, selectedCameraId,
&deviceStateCallbacks, &cameraDevice);
if (camera_status != ACAMERA_OK) {
LOGI("Failed to open camera device (id: %s)\n", selectedCameraId);
}
camera_status = ACameraDevice_createCaptureRequest(cameraDevice, TEMPLATE_PREVIEW,
&captureRequest);
if (camera_status != ACAMERA_OK) {
LOGI("Failed to create preview capture request (id: %s)\n", selectedCameraId);
}
ACaptureSessionOutputContainer_create(&captureSessionOutputContainer);
captureSessionStateCallbacks.onReady = capture_session_on_ready;
captureSessionStateCallbacks.onActive = capture_session_on_active;
captureSessionStateCallbacks.onClosed = capture_session_on_closed;
ACameraMetadata_free(cameraMetadata);
ACameraManager_deleteCameraIdList(cameraIdList);
ACameraManager_delete(cameraManager);
media_status_t status;
// status = AImageReader_new(1920, 1080, AIMAGE_FORMAT_YUV_420_888, 5, &mAImageReader);
status = AImageReader_new(1920, 1080, AIMAGE_FORMAT_JPEG, 5, &mAImageReader);
if (status != AMEDIA_OK)
{
LOGI("AImageReader_new error\n");
return false;
}
AImageReader_ImageListener listener{
.context = this,
.onImageAvailable = OnImageCallback,
};
AImageReader_setImageListener(mAImageReader, &listener);
//ANativeWindow *mNativeWindow;
status = AImageReader_getWindow(mAImageReader, &theNativeWindow);
if (status != AMEDIA_OK)
{
LOGI("AImageReader_getWindow error\n");
return false;
}
LOGI("Surface is prepared in %p.\n", theNativeWindow);
ACameraOutputTarget_create(theNativeWindow, &cameraOutputTarget);
ACaptureRequest_addTarget(captureRequest, cameraOutputTarget);
ACaptureSessionOutput_create(theNativeWindow, &sessionOutput);
ACaptureSessionOutputContainer_add(captureSessionOutputContainer, sessionOutput);
ACameraDevice_createCaptureSession(cameraDevice, captureSessionOutputContainer,
&captureSessionStateCallbacks, &captureSession);
// ACameraCaptureSession_setRepeatingRequest(captureSession, NULL, 1, &captureRequest, NULL);
ACameraCaptureSession_capture(captureSession, NULL, 1, &captureRequest, NULL);
LOGI("Surface is prepared in here.\n");
return true;
}
ACameraCaptureSession_stateCallbacks* CPhoneDevice::GetSessionListener()
{
static ACameraCaptureSession_stateCallbacks sessionListener = {
.context = this,
.onClosed = CPhoneDevice::capture_session_on_closed,
.onReady = CPhoneDevice::capture_session_on_ready,
.onActive = CPhoneDevice::capture_session_on_active,
};
return &sessionListener;
}
void CPhoneDevice::ImageCallback(AImageReader *reader)
{
int32_t format;
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
LOGI("ImageCallback\n");
if (status == AMEDIA_OK && image)
{
LOGI("ImageCallback\n");
AImage_delete(image);
}
}
void CPhoneDevice::OnImageCallback(void *ctx, AImageReader *reader)
{
CPhoneDevice* pThis = reinterpret_cast<CPhoneDevice*>(ctx);
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
if (status == AMEDIA_OK && image)
{
WriteFile(pThis, image);
AImage_delete(image);
// delete pThis;
}
}
bool CPhoneDevice::WriteFile(CPhoneDevice* pThis, AImage *image)
{
int planeCount = 0;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
LOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount);
if (!(status == AMEDIA_OK && planeCount == 1))
{
LOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount);
return false;
}
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
std::string path = pThis->GetFileName();
bool res = false;
FILE *file = fopen(path.c_str(), "wb");
if (file && data && len)
{
fwrite(data, 1, len, file);
fclose(file);
LOGE("Capture: %s", path.c_str());
res = true;
}
else
{
if (file)
fclose(file);
}
return res;
}
std::string CPhoneDevice::GetFileName() const
{
return mPath;
}
void CPhoneDevice::camera_device_on_disconnected(void *context, ACameraDevice *device)
{
LOGI("Camera(id: %s) is diconnected.\n", ACameraDevice_getId(device));
CPhoneDevice* pThis = (CPhoneDevice*)context;
delete pThis;
}
void CPhoneDevice::camera_device_on_error(void *context, ACameraDevice *device, int error)
{
LOGI("Error(code: %d) on Camera(id: %s).\n", error, ACameraDevice_getId(device));
}
void CPhoneDevice::capture_session_on_ready(void *context, ACameraCaptureSession *session)
{
LOGI("Session is ready. %p\n", session);
}
void CPhoneDevice::capture_session_on_active(void *context, ACameraCaptureSession *session)
{
LOGI("Session is activated. %p\n", session);
}
void CPhoneDevice::capture_session_on_closed(void *context, ACameraCaptureSession *session)
{
LOGI("Session is closed. %p\n", session);
}

@ -0,0 +1,70 @@
#ifndef __PHONE_DEVICE_H__
#define __PHONE_DEVICE_H__
#include <stdio.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraMetadataTags.h>
#include <media/NdkImageReader.h>
#include <android/log.h>
// #define LOG_TAG "native-camera2-jni"
#define PD_LOG_TAG "PhoneDev"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,PD_LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,PD_LOG_TAG,__VA_ARGS__)
#include <Client/Device.h>
#include <string>
class CPhoneDevice : public IDevice
{
public:
CPhoneDevice();
~CPhoneDevice();
virtual bool TakePhoto(unsigned char channel, unsigned char preset, const string& path, bool photo);
virtual timer_uid_t registerTimer(unsigned int timerType, unsigned int timeout);
virtual bool unregisterTimer(timer_uid_t uid);
virtual bool onTimeout(timer_uid_t uid, unsigned int timerType, unsigned int times);
protected:
ACameraCaptureSession_stateCallbacks *GetSessionListener();
std::string GetFileName() const;
static void camera_device_on_disconnected(void *context, ACameraDevice *device);
static void camera_device_on_error(void *context, ACameraDevice *device, int error);
static void capture_session_on_ready(void *context, ACameraCaptureSession *session);
static void capture_session_on_active(void *context, ACameraCaptureSession *session);
static void capture_session_on_closed(void *context, ACameraCaptureSession *session);
void ImageCallback(AImageReader *reader);
static void OnImageCallback(void *ctx, AImageReader *reader);
static bool WriteFile(CPhoneDevice* pThis, AImage *image);
protected:
std::string mPath;
AImageReader *mAImageReader;
ANativeWindow *theNativeWindow;
ACameraDevice *cameraDevice;
ACaptureRequest *captureRequest;
ACameraOutputTarget *cameraOutputTarget;
ACaptureSessionOutput *sessionOutput;
ACaptureSessionOutputContainer *captureSessionOutputContainer;
ACameraCaptureSession *captureSession;
ACameraDevice_StateCallbacks deviceStateCallbacks;
ACameraCaptureSession_stateCallbacks captureSessionStateCallbacks;
};
#endif // __PHONE_DEVICE_H__

@ -1,5 +1,6 @@
#include "TerminalDevice.h"
#include <dlfcn.h>
#include "Camera.h"
typedef jbyteArray (*TakePhotoFunc)(int, int, int, int);
@ -50,12 +51,24 @@ CTerminalDevice::~CTerminalDevice()
bool CTerminalDevice::TakePhoto(unsigned char channel, unsigned char preset, const string& path, bool photo)
{
jboolean res = JNI_FALSE;
CCamera camera;
camera.initCamera(NULL);
if (camera.isCameraReady())
{
camera.takePicture();
}
camera.closeCamera();
#if 0
JNIEnv* env = NULL;
bool attached = GetJniEnv(m_vm, &env);
jclass serviceClass = env->GetObjectClass(m_javaService);
jmethodID mid = env->GetMethodID(serviceClass, "takePhoto", "(SSLjava/lang/String;)Z");
jstring str = env->NewStringUTF(path.c_str());
jboolean res = env->CallBooleanMethod (m_javaService, mid, (jint)channel, (jint)preset, str);
res = env->CallBooleanMethod (m_javaService, mid, (jint)channel, (jint)preset, str);
env->ReleaseStringUTFChars(str, path.c_str());
env->DeleteLocalRef(serviceClass);
@ -67,6 +80,8 @@ bool CTerminalDevice::TakePhoto(unsigned char channel, unsigned char preset, con
{
m_vm->DetachCurrentThread();
}
#endif
return res == JNI_TRUE;
}

@ -0,0 +1,170 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "camera_engine.h"
#include "utils/native_debug.h"
/*
* SampleEngine global object
*/
static CameraEngine* pEngineObj = nullptr;
CameraEngine* GetAppEngine(void) {
ASSERT(pEngineObj, "AppEngine has not initialized");
return pEngineObj;
}
/**
* Teamplate function for NativeActivity derived applications
* Create/Delete camera object with
* INIT_WINDOW/TERM_WINDOW command, ignoring other event.
*/
static void ProcessAndroidCmd(struct android_app* app, int32_t cmd) {
CameraEngine* engine = reinterpret_cast<CameraEngine*>(app->userData);
switch (cmd) {
case APP_CMD_INIT_WINDOW:
if (engine->AndroidApp()->window != NULL) {
engine->SaveNativeWinRes(ANativeWindow_getWidth(app->window),
ANativeWindow_getHeight(app->window),
ANativeWindow_getFormat(app->window));
engine->OnAppInitWindow();
}
break;
case APP_CMD_TERM_WINDOW:
engine->OnAppTermWindow();
ANativeWindow_setBuffersGeometry(
app->window, engine->GetSavedNativeWinWidth(),
engine->GetSavedNativeWinHeight(), engine->GetSavedNativeWinFormat());
break;
case APP_CMD_CONFIG_CHANGED:
engine->OnAppConfigChange();
break;
case APP_CMD_LOST_FOCUS:
break;
}
}
extern "C" void android_main(struct android_app* state) {
CameraEngine engine(state);
pEngineObj = &engine;
state->userData = reinterpret_cast<void*>(&engine);
state->onAppCmd = ProcessAndroidCmd;
// loop waiting for stuff to do.
while (1) {
// Read all pending events.
int events;
struct android_poll_source* source;
while (ALooper_pollAll(0, NULL, &events, (void**)&source) >= 0) {
// Process this event.
if (source != NULL) {
source->process(state, source);
}
// Check if we are exiting.
if (state->destroyRequested != 0) {
LOGI("CameraEngine thread destroy requested!");
engine.DeleteCamera();
pEngineObj = nullptr;
return;
}
}
pEngineObj->DrawFrame();
}
}
/**
* Handle Android System APP_CMD_INIT_WINDOW message
* Request camera persmission from Java side
* Create camera object if camera has been granted
*/
void CameraEngine::OnAppInitWindow(void) {
if (!cameraGranted_) {
// Not permitted to use camera yet, ask(again) and defer other events
RequestCameraPermission();
return;
}
rotation_ = GetDisplayRotation();
CreateCamera();
ASSERT(camera_, "CameraCreation Failed");
EnableUI();
// NativeActivity end is ready to display, start pulling images
cameraReady_ = true;
camera_->StartPreview(true);
}
/**
* Handle APP_CMD_TEMR_WINDOW
*/
void CameraEngine::OnAppTermWindow(void) {
cameraReady_ = false;
DeleteCamera();
}
/**
* Handle APP_CMD_CONFIG_CHANGED
*/
void CameraEngine::OnAppConfigChange(void) {
int newRotation = GetDisplayRotation();
if (newRotation != rotation_) {
OnAppTermWindow();
rotation_ = newRotation;
OnAppInitWindow();
}
}
/**
* Retrieve saved native window width.
* @return width of native window
*/
int32_t CameraEngine::GetSavedNativeWinWidth(void) {
return savedNativeWinRes_.width;
}
/**
* Retrieve saved native window height.
* @return height of native window
*/
int32_t CameraEngine::GetSavedNativeWinHeight(void) {
return savedNativeWinRes_.height;
}
/**
* Retrieve saved native window format
* @return format of native window
*/
int32_t CameraEngine::GetSavedNativeWinFormat(void) {
return savedNativeWinRes_.format;
}
/**
* Save original NativeWindow Resolution
* @param w width of native window in pixel
* @param h height of native window in pixel
* @param format
*/
void CameraEngine::SaveNativeWinRes(int32_t w, int32_t h, int32_t format) {
savedNativeWinRes_.width = w;
savedNativeWinRes_.height = h;
savedNativeWinRes_.format = format;
}

@ -0,0 +1,175 @@
/**
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Description
* Demonstrate NDK Camera interface added to android-24
*/
#include "camera_engine.h"
#include <cstdio>
#include "utils/native_debug.h"
/**
* constructor and destructor for main application class
* @param app native_app_glue environment
* @return none
*/
CameraEngine::CameraEngine(android_app* app)
: app_(app),
cameraGranted_(false),
rotation_(0),
cameraReady_(false),
camera_(nullptr),
yuvReader_(nullptr),
jpgReader_(nullptr) {
memset(&savedNativeWinRes_, 0, sizeof(savedNativeWinRes_));
}
CameraEngine::~CameraEngine() {
cameraReady_ = false;
DeleteCamera();
}
struct android_app* CameraEngine::AndroidApp(void) const {
return app_;
}
/**
* Create a camera object for onboard BACK_FACING camera
*/
void CameraEngine::CreateCamera(void) {
// Camera needed to be requested at the run-time from Java SDK
// if Not granted, do nothing.
if (!cameraGranted_ || !app_->window) {
LOGW("Camera Sample requires Full Camera access");
return;
}
int32_t displayRotation = GetDisplayRotation();
rotation_ = displayRotation;
camera_ = new NDKCamera();
ASSERT(camera_, "Failed to Create CameraObject");
int32_t facing = 0, angle = 0, imageRotation = 0;
if (camera_->GetSensorOrientation(&facing, &angle)) {
if (facing == ACAMERA_LENS_FACING_FRONT) {
imageRotation = (angle + rotation_) % 360;
imageRotation = (360 - imageRotation) % 360;
} else {
imageRotation = (angle - rotation_ + 360) % 360;
}
}
LOGI("Phone Rotation: %d, Present Rotation Angle: %d", rotation_,
imageRotation);
ImageFormat view{0, 0, 0}, capture{0, 0, 0};
camera_->MatchCaptureSizeRequest(app_->window, &view, &capture);
ASSERT(view.width && view.height, "Could not find supportable resolution");
// Request the necessary nativeWindow to OS
bool portraitNativeWindow =
(savedNativeWinRes_.width < savedNativeWinRes_.height);
ANativeWindow_setBuffersGeometry(
app_->window, portraitNativeWindow ? view.height : view.width,
portraitNativeWindow ? view.width : view.height, WINDOW_FORMAT_RGBA_8888);
yuvReader_ = new ImageReader(&view, AIMAGE_FORMAT_YUV_420_888);
yuvReader_->SetPresentRotation(imageRotation);
jpgReader_ = new ImageReader(&capture, AIMAGE_FORMAT_JPEG);
jpgReader_->SetPresentRotation(imageRotation);
jpgReader_->RegisterCallback(
this, [this](void* ctx, const char* str) -> void {
reinterpret_cast<CameraEngine*>(ctx)->OnPhotoTaken(str);
});
// now we could create session
camera_->CreateSession(yuvReader_->GetNativeWindow(),
jpgReader_->GetNativeWindow(), imageRotation);
}
void CameraEngine::DeleteCamera(void) {
cameraReady_ = false;
if (camera_) {
delete camera_;
camera_ = nullptr;
}
if (yuvReader_) {
delete yuvReader_;
yuvReader_ = nullptr;
}
if (jpgReader_) {
delete jpgReader_;
jpgReader_ = nullptr;
}
}
/**
* Initiate a Camera Run-time usage request to Java side implementation
* [ The request result will be passed back in function
* notifyCameraPermission()]
*/
void CameraEngine::RequestCameraPermission() {
if (!app_) return;
JNIEnv* env;
ANativeActivity* activity = app_->activity;
activity->vm->GetEnv((void**)&env, JNI_VERSION_1_6);
activity->vm->AttachCurrentThread(&env, NULL);
jobject activityObj = env->NewGlobalRef(activity->clazz);
jclass clz = env->GetObjectClass(activityObj);
env->CallVoidMethod(activityObj,
env->GetMethodID(clz, "RequestCamera", "()V"));
env->DeleteGlobalRef(activityObj);
activity->vm->DetachCurrentThread();
}
/**
* Process to user's sensitivity and exposure value change
* all values are represented in int64_t even exposure is just int32_t
* @param code ACAMERA_SENSOR_EXPOSURE_TIME or ACAMERA_SENSOR_SENSITIVITY
* @param val corresponding value from user
*/
void CameraEngine::OnCameraParameterChanged(int32_t code, int64_t val) {
camera_->UpdateCameraRequestParameter(code, val);
}
/**
* The main function rendering a frame. In our case, it is yuv to RGBA8888
* converter
*/
void CameraEngine::DrawFrame(void) {
if (!cameraReady_ || !yuvReader_) return;
AImage* image = yuvReader_->GetNextImage();
if (!image) {
return;
}
ANativeWindow_acquire(app_->window);
ANativeWindow_Buffer buf;
if (ANativeWindow_lock(app_->window, &buf, nullptr) < 0) {
yuvReader_->DeleteImage(image);
return;
}
yuvReader_->DisplayImage(&buf, image);
ANativeWindow_unlockAndPost(app_->window);
ANativeWindow_release(app_->window);
}

@ -0,0 +1,81 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CAMERA_ENGINE_H__
#define __CAMERA_ENGINE_H__
#include <android/native_window.h>
#include <android_native_app_glue.h>
#include <functional>
#include <thread>
#include "camera_manager.h"
/**
* basic CameraAppEngine
*/
class CameraEngine {
public:
explicit CameraEngine(android_app* app);
~CameraEngine();
// Interfaces to android application framework
struct android_app* AndroidApp(void) const;
void OnAppInitWindow(void);
void DrawFrame(void);
void OnAppConfigChange(void);
void OnAppTermWindow(void);
// Native Window handlers
int32_t GetSavedNativeWinWidth(void);
int32_t GetSavedNativeWinHeight(void);
int32_t GetSavedNativeWinFormat(void);
void SaveNativeWinRes(int32_t w, int32_t h, int32_t format);
// UI handlers
void RequestCameraPermission();
void OnCameraPermission(jboolean granted);
void EnableUI(void);
void OnTakePhoto(void);
void OnCameraParameterChanged(int32_t code, int64_t val);
// Manage NDKCamera Object
void CreateCamera(void);
void DeleteCamera(void);
private:
void OnPhotoTaken(const char* fileName);
int GetDisplayRotation(void);
struct android_app* app_;
ImageFormat savedNativeWinRes_;
bool cameraGranted_;
int rotation_;
volatile bool cameraReady_;
NDKCamera* camera_;
ImageReader* yuvReader_;
ImageReader* jpgReader_;
};
/**
* retrieve global singleton CameraEngine instance
* @return the only instance of CameraEngine in the app
*/
CameraEngine* GetAppEngine(void);
#endif // __CAMERA_ENGINE_H__

@ -0,0 +1,241 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <camera/NdkCameraManager.h>
#include <cinttypes>
#include <queue>
#include <thread>
#include <utility>
#include "camera_manager.h"
#include "utils/camera_utils.h"
#include "utils/native_debug.h"
/*
* Camera Manager Listener object
*/
void OnCameraAvailable(void* ctx, const char* id) {
reinterpret_cast<NDKCamera*>(ctx)->OnCameraStatusChanged(id, true);
}
void OnCameraUnavailable(void* ctx, const char* id) {
reinterpret_cast<NDKCamera*>(ctx)->OnCameraStatusChanged(id, false);
}
/**
* OnCameraStatusChanged()
* handles Callback from ACameraManager
*/
void NDKCamera::OnCameraStatusChanged(const char* id, bool available) {
if (valid_) {
cameras_[std::string(id)].available_ = available ? true : false;
}
}
/**
* Construct a camera manager listener on the fly and return to caller
*
* @return ACameraManager_AvailabilityCallback
*/
ACameraManager_AvailabilityCallbacks* NDKCamera::GetManagerListener() {
static ACameraManager_AvailabilityCallbacks cameraMgrListener = {
.context = this,
.onCameraAvailable = ::OnCameraAvailable,
.onCameraUnavailable = ::OnCameraUnavailable,
};
return &cameraMgrListener;
}
/*
* CameraDevice callbacks
*/
void OnDeviceStateChanges(void* ctx, ACameraDevice* dev) {
reinterpret_cast<NDKCamera*>(ctx)->OnDeviceState(dev);
}
void OnDeviceErrorChanges(void* ctx, ACameraDevice* dev, int err) {
reinterpret_cast<NDKCamera*>(ctx)->OnDeviceError(dev, err);
}
ACameraDevice_stateCallbacks* NDKCamera::GetDeviceListener() {
static ACameraDevice_stateCallbacks cameraDeviceListener = {
.context = this,
.onDisconnected = ::OnDeviceStateChanges,
.onError = ::OnDeviceErrorChanges,
};
return &cameraDeviceListener;
}
/**
* Handle Camera DeviceStateChanges msg, notify device is disconnected
* simply close the camera
*/
void NDKCamera::OnDeviceState(ACameraDevice* dev) {
std::string id(ACameraDevice_getId(dev));
LOGW("device %s is disconnected", id.c_str());
cameras_[id].available_ = false;
ACameraDevice_close(cameras_[id].device_);
cameras_.erase(id);
}
/**
* Handles Camera's deviceErrorChanges message, no action;
* mainly debugging purpose
*
*
*/
void NDKCamera::OnDeviceError(ACameraDevice* dev, int err) {
std::string id(ACameraDevice_getId(dev));
LOGI("CameraDevice %s is in error %#x", id.c_str(), err);
PrintCameraDeviceError(err);
CameraId& cam = cameras_[id];
switch (err) {
case ERROR_CAMERA_IN_USE:
cam.available_ = false;
cam.owner_ = false;
break;
case ERROR_CAMERA_SERVICE:
case ERROR_CAMERA_DEVICE:
case ERROR_CAMERA_DISABLED:
case ERROR_MAX_CAMERAS_IN_USE:
cam.available_ = false;
cam.owner_ = false;
break;
default:
LOGI("Unknown Camera Device Error: %#x", err);
}
}
// CaptureSession state callbacks
void OnSessionClosed(void* ctx, ACameraCaptureSession* ses) {
LOGW("session %p closed", ses);
reinterpret_cast<NDKCamera*>(ctx)->OnSessionState(
ses, CaptureSessionState::CLOSED);
}
void OnSessionReady(void* ctx, ACameraCaptureSession* ses) {
LOGW("session %p ready", ses);
reinterpret_cast<NDKCamera*>(ctx)->OnSessionState(ses,
CaptureSessionState::READY);
}
void OnSessionActive(void* ctx, ACameraCaptureSession* ses) {
LOGW("session %p active", ses);
reinterpret_cast<NDKCamera*>(ctx)->OnSessionState(
ses, CaptureSessionState::ACTIVE);
}
ACameraCaptureSession_stateCallbacks* NDKCamera::GetSessionListener() {
static ACameraCaptureSession_stateCallbacks sessionListener = {
.context = this,
.onClosed = ::OnSessionClosed,
.onReady = ::OnSessionReady,
.onActive = ::OnSessionActive,
};
return &sessionListener;
}
/**
* Handles capture session state changes.
* Update into internal session state.
*/
void NDKCamera::OnSessionState(ACameraCaptureSession* ses,
CaptureSessionState state) {
if (!ses || ses != captureSession_) {
LOGW("CaptureSession is %s", (ses ? "NOT our session" : "NULL"));
return;
}
ASSERT(state < CaptureSessionState::MAX_STATE, "Wrong state %d", state);
captureSessionState_ = state;
}
// Capture callbacks, mostly information purpose
void SessionCaptureCallback_OnFailed(void* context,
ACameraCaptureSession* session,
ACaptureRequest* request,
ACameraCaptureFailure* failure) {
std::thread captureFailedThread(&NDKCamera::OnCaptureFailed,
static_cast<NDKCamera*>(context), session,
request, failure);
captureFailedThread.detach();
}
void SessionCaptureCallback_OnSequenceEnd(void* context,
ACameraCaptureSession* session,
int sequenceId, int64_t frameNumber) {
std::thread sequenceThread(&NDKCamera::OnCaptureSequenceEnd,
static_cast<NDKCamera*>(context), session,
sequenceId, frameNumber);
sequenceThread.detach();
}
void SessionCaptureCallback_OnSequenceAborted(void* context,
ACameraCaptureSession* session,
int sequenceId) {
std::thread sequenceThread(&NDKCamera::OnCaptureSequenceEnd,
static_cast<NDKCamera*>(context), session,
sequenceId, static_cast<int64_t>(-1));
sequenceThread.detach();
}
ACameraCaptureSession_captureCallbacks* NDKCamera::GetCaptureCallback() {
static ACameraCaptureSession_captureCallbacks captureListener{
.context = this,
.onCaptureStarted = nullptr,
.onCaptureProgressed = nullptr,
.onCaptureCompleted = nullptr,
.onCaptureFailed = SessionCaptureCallback_OnFailed,
.onCaptureSequenceCompleted = SessionCaptureCallback_OnSequenceEnd,
.onCaptureSequenceAborted = SessionCaptureCallback_OnSequenceAborted,
.onCaptureBufferLost = nullptr,
};
return &captureListener;
}
/**
* Process JPG capture SessionCaptureCallback_OnFailed event
* If this is current JPG capture session, simply resume preview
* @param session the capture session that failed
* @param request the capture request that failed
* @param failure for additional fail info.
*/
void NDKCamera::OnCaptureFailed(ACameraCaptureSession* session,
ACaptureRequest* request,
ACameraCaptureFailure* failure) {
if (valid_ && request == requests_[JPG_CAPTURE_REQUEST_IDX].request_) {
ASSERT(failure->sequenceId ==
requests_[JPG_CAPTURE_REQUEST_IDX].sessionSequenceId_,
"Error jpg sequence id")
StartPreview(true);
}
}
/**
* Process event from JPEG capture
* SessionCaptureCallback_OnSequenceEnd()
* SessionCaptureCallback_OnSequenceAborted()
*
* If this is jpg capture, turn back on preview after a catpure.
*/
void NDKCamera::OnCaptureSequenceEnd(ACameraCaptureSession* session,
int sequenceId, int64_t frameNumber) {
if (sequenceId != requests_[JPG_CAPTURE_REQUEST_IDX].sessionSequenceId_)
return;
// resume preview
CALL_SESSION(setRepeatingRequest(captureSession_, nullptr, 1,
&requests_[PREVIEW_REQUEST_IDX].request_,
nullptr));
}

@ -0,0 +1,490 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "camera_manager.h"
#include <camera/NdkCameraManager.h>
#include <unistd.h>
#include <cinttypes>
#include <queue>
#include <utility>
#include "utils/camera_utils.h"
#include "utils/native_debug.h"
/**
* Range of Camera Exposure Time:
* Camera's capability range have a very long range which may be disturbing
* on camera. For this sample purpose, clamp to a range showing visible
* video on preview: 100000ns ~ 250000000ns
*/
static const uint64_t kMinExposureTime = static_cast<uint64_t>(1000000);
static const uint64_t kMaxExposureTime = static_cast<uint64_t>(250000000);
NDKCamera::NDKCamera()
: cameraMgr_(nullptr),
activeCameraId_(""),
cameraFacing_(ACAMERA_LENS_FACING_BACK),
cameraOrientation_(0),
outputContainer_(nullptr),
captureSessionState_(CaptureSessionState::MAX_STATE),
exposureTime_(static_cast<int64_t>(0)) {
valid_ = false;
requests_.resize(CAPTURE_REQUEST_COUNT);
memset(requests_.data(), 0, requests_.size() * sizeof(requests_[0]));
cameras_.clear();
cameraMgr_ = ACameraManager_create();
ASSERT(cameraMgr_, "Failed to create cameraManager");
// Pick up a back-facing camera to preview
EnumerateCamera();
ASSERT(activeCameraId_.size(), "Unknown ActiveCameraIdx");
// Create back facing camera device
CALL_MGR(openCamera(cameraMgr_, activeCameraId_.c_str(), GetDeviceListener(),
&cameras_[activeCameraId_].device_));
CALL_MGR(registerAvailabilityCallback(cameraMgr_, GetManagerListener()));
// Initialize camera controls(exposure time and sensitivity), pick
// up value of 2% * range + min as starting value (just a number, no magic)
ACameraMetadata* metadataObj;
CALL_MGR(getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(),
&metadataObj));
ACameraMetadata_const_entry val = {
0,
};
camera_status_t status = ACameraMetadata_getConstEntry(
metadataObj, ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val);
if (status == ACAMERA_OK) {
exposureRange_.min_ = val.data.i64[0];
if (exposureRange_.min_ < kMinExposureTime) {
exposureRange_.min_ = kMinExposureTime;
}
exposureRange_.max_ = val.data.i64[1];
if (exposureRange_.max_ > kMaxExposureTime) {
exposureRange_.max_ = kMaxExposureTime;
}
exposureTime_ = exposureRange_.value(2);
} else {
LOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE");
exposureRange_.min_ = exposureRange_.max_ = 0l;
exposureTime_ = 0l;
}
status = ACameraMetadata_getConstEntry(
metadataObj, ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val);
if (status == ACAMERA_OK) {
sensitivityRange_.min_ = val.data.i32[0];
sensitivityRange_.max_ = val.data.i32[1];
sensitivity_ = sensitivityRange_.value(2);
} else {
LOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE");
sensitivityRange_.min_ = sensitivityRange_.max_ = 0;
sensitivity_ = 0;
}
valid_ = true;
}
/**
* A helper class to assist image size comparison, by comparing the absolute
* size
* regardless of the portrait or landscape mode.
*/
class DisplayDimension {
public:
DisplayDimension(int32_t w, int32_t h) : w_(w), h_(h), portrait_(false) {
if (h > w) {
// make it landscape
w_ = h;
h_ = w;
portrait_ = true;
}
}
DisplayDimension(const DisplayDimension& other) {
w_ = other.w_;
h_ = other.h_;
portrait_ = other.portrait_;
}
DisplayDimension(void) {
w_ = 0;
h_ = 0;
portrait_ = false;
}
DisplayDimension& operator=(const DisplayDimension& other) {
w_ = other.w_;
h_ = other.h_;
portrait_ = other.portrait_;
return (*this);
}
bool IsSameRatio(DisplayDimension& other) {
return (w_ * other.h_ == h_ * other.w_);
}
bool operator>(DisplayDimension& other) {
return (w_ >= other.w_ & h_ >= other.h_);
}
bool operator==(DisplayDimension& other) {
return (w_ == other.w_ && h_ == other.h_ && portrait_ == other.portrait_);
}
DisplayDimension operator-(DisplayDimension& other) {
DisplayDimension delta(w_ - other.w_, h_ - other.h_);
return delta;
}
void Flip(void) { portrait_ = !portrait_; }
bool IsPortrait(void) { return portrait_; }
int32_t width(void) { return w_; }
int32_t height(void) { return h_; }
int32_t org_width(void) { return (portrait_ ? h_ : w_); }
int32_t org_height(void) { return (portrait_ ? w_ : h_); }
private:
int32_t w_, h_;
bool portrait_;
};
/**
* Find a compatible camera modes:
* 1) the same aspect ration as the native display window, which should be a
* rotated version of the physical device
* 2) the smallest resolution in the camera mode list
* This is to minimize the later color space conversion workload.
*/
bool NDKCamera::MatchCaptureSizeRequest(ANativeWindow* display,
ImageFormat* resView,
ImageFormat* resCap) {
DisplayDimension disp(ANativeWindow_getWidth(display),
ANativeWindow_getHeight(display));
if (cameraOrientation_ == 90 || cameraOrientation_ == 270) {
disp.Flip();
}
ACameraMetadata* metadata;
CALL_MGR(
getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(), &metadata));
ACameraMetadata_const_entry entry;
CALL_METADATA(getConstEntry(
metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry));
// format of the data: format, width, height, input?, type int32
bool foundIt = false;
DisplayDimension foundRes(4000, 4000);
DisplayDimension maxJPG(0, 0);
for (int i = 0; i < entry.count; i += 4) {
int32_t input = entry.data.i32[i + 3];
int32_t format = entry.data.i32[i + 0];
if (input) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_JPEG) {
DisplayDimension res(entry.data.i32[i + 1], entry.data.i32[i + 2]);
if (!disp.IsSameRatio(res)) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 && foundRes > res) {
foundIt = true;
foundRes = res;
} else if (format == AIMAGE_FORMAT_JPEG && res > maxJPG) {
maxJPG = res;
}
}
}
if (foundIt) {
resView->width = foundRes.org_width();
resView->height = foundRes.org_height();
resCap->width = maxJPG.org_width();
resCap->height = maxJPG.org_height();
} else {
LOGW("Did not find any compatible camera resolution, taking 640x480");
if (disp.IsPortrait()) {
resView->width = 480;
resView->height = 640;
} else {
resView->width = 640;
resView->height = 480;
}
*resCap = *resView;
}
resView->format = AIMAGE_FORMAT_YUV_420_888;
resCap->format = AIMAGE_FORMAT_JPEG;
return foundIt;
}
void NDKCamera::CreateSession(ANativeWindow* previewWindow,
ANativeWindow* jpgWindow, int32_t imageRotation) {
// Create output from this app's ANativeWindow, and add into output container
requests_[PREVIEW_REQUEST_IDX].outputNativeWindow_ = previewWindow;
requests_[PREVIEW_REQUEST_IDX].template_ = TEMPLATE_PREVIEW;
requests_[JPG_CAPTURE_REQUEST_IDX].outputNativeWindow_ = jpgWindow;
requests_[JPG_CAPTURE_REQUEST_IDX].template_ = TEMPLATE_STILL_CAPTURE;
CALL_CONTAINER(create(&outputContainer_));
for (auto& req : requests_) {
ANativeWindow_acquire(req.outputNativeWindow_);
CALL_OUTPUT(create(req.outputNativeWindow_, &req.sessionOutput_));
CALL_CONTAINER(add(outputContainer_, req.sessionOutput_));
CALL_TARGET(create(req.outputNativeWindow_, &req.target_));
CALL_DEV(createCaptureRequest(cameras_[activeCameraId_].device_,
req.template_, &req.request_));
CALL_REQUEST(addTarget(req.request_, req.target_));
}
// Create a capture session for the given preview request
captureSessionState_ = CaptureSessionState::READY;
CALL_DEV(createCaptureSession(cameras_[activeCameraId_].device_,
outputContainer_, GetSessionListener(),
&captureSession_));
ACaptureRequest_setEntry_i32(requests_[JPG_CAPTURE_REQUEST_IDX].request_,
ACAMERA_JPEG_ORIENTATION, 1, &imageRotation);
/*
* Only preview request is in manual mode, JPG is always in Auto mode
* JPG capture mode could also be switch into manual mode and control
* the capture parameters, this sample leaves JPG capture to be auto mode
* (auto control has better effect than author's manual control)
*/
uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
CALL_REQUEST(setEntry_u8(requests_[PREVIEW_REQUEST_IDX].request_,
ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff));
CALL_REQUEST(setEntry_i32(requests_[PREVIEW_REQUEST_IDX].request_,
ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_));
CALL_REQUEST(setEntry_i64(requests_[PREVIEW_REQUEST_IDX].request_,
ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime_));
}
NDKCamera::~NDKCamera() {
valid_ = false;
// stop session if it is on:
if (captureSessionState_ == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession_);
}
ACameraCaptureSession_close(captureSession_);
for (auto& req : requests_) {
CALL_REQUEST(removeTarget(req.request_, req.target_));
ACaptureRequest_free(req.request_);
ACameraOutputTarget_free(req.target_);
CALL_CONTAINER(remove(outputContainer_, req.sessionOutput_));
ACaptureSessionOutput_free(req.sessionOutput_);
ANativeWindow_release(req.outputNativeWindow_);
}
requests_.resize(0);
ACaptureSessionOutputContainer_free(outputContainer_);
for (auto& cam : cameras_) {
if (cam.second.device_) {
CALL_DEV(close(cam.second.device_));
}
}
cameras_.clear();
if (cameraMgr_) {
CALL_MGR(unregisterAvailabilityCallback(cameraMgr_, GetManagerListener()));
ACameraManager_delete(cameraMgr_);
cameraMgr_ = nullptr;
}
}
/**
* EnumerateCamera()
* Loop through cameras on the system, pick up
* 1) back facing one if available
* 2) otherwise pick the first one reported to us
*/
void NDKCamera::EnumerateCamera() {
ACameraIdList* cameraIds = nullptr;
CALL_MGR(getCameraIdList(cameraMgr_, &cameraIds));
for (int i = 0; i < cameraIds->numCameras; ++i) {
const char* id = cameraIds->cameraIds[i];
ACameraMetadata* metadataObj;
CALL_MGR(getCameraCharacteristics(cameraMgr_, id, &metadataObj));
int32_t count = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(metadataObj, &count, &tags);
for (int tagIdx = 0; tagIdx < count; ++tagIdx) {
if (ACAMERA_LENS_FACING == tags[tagIdx]) {
ACameraMetadata_const_entry lensInfo = {
0,
};
CALL_METADATA(getConstEntry(metadataObj, tags[tagIdx], &lensInfo));
CameraId cam(id);
cam.facing_ = static_cast<acamera_metadata_enum_android_lens_facing_t>(
lensInfo.data.u8[0]);
cam.owner_ = false;
cam.device_ = nullptr;
cameras_[cam.id_] = cam;
if (cam.facing_ == ACAMERA_LENS_FACING_BACK) {
activeCameraId_ = cam.id_;
}
break;
}
}
ACameraMetadata_free(metadataObj);
}
ASSERT(cameras_.size(), "No Camera Available on the device");
if (activeCameraId_.length() == 0) {
// if no back facing camera found, pick up the first one to use...
activeCameraId_ = cameras_.begin()->second.id_;
}
// activeCameraId_ = cameras_.rbegin()->second.id_;
ACameraManager_deleteCameraIdList(cameraIds);
}
/**
* GetSensorOrientation()
* Retrieve current sensor orientation regarding to the phone device
* orientation
* SensorOrientation is NOT settable.
*/
bool NDKCamera::GetSensorOrientation(int32_t* facing, int32_t* angle) {
if (!cameraMgr_) {
return false;
}
ACameraMetadata* metadataObj;
ACameraMetadata_const_entry face, orientation;
CALL_MGR(getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(),
&metadataObj));
CALL_METADATA(getConstEntry(metadataObj, ACAMERA_LENS_FACING, &face));
cameraFacing_ = static_cast<int32_t>(face.data.u8[0]);
CALL_METADATA(
getConstEntry(metadataObj, ACAMERA_SENSOR_ORIENTATION, &orientation));
LOGI("====Current SENSOR_ORIENTATION: %8d", orientation.data.i32[0]);
ACameraMetadata_free(metadataObj);
cameraOrientation_ = orientation.data.i32[0];
if (facing) *facing = cameraFacing_;
if (angle) *angle = cameraOrientation_;
return true;
}
/**
* StartPreview()
* Toggle preview start/stop
*/
void NDKCamera::StartPreview(bool start) {
if (start) {
CALL_SESSION(setRepeatingRequest(captureSession_, nullptr, 1,
&requests_[PREVIEW_REQUEST_IDX].request_,
nullptr));
} else if (!start && captureSessionState_ == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession_);
} else {
ASSERT(false, "Conflict states(%s, %d)", (start ? "true" : "false"),
captureSessionState_);
}
}
/**
* Capture one jpg photo into
* /sdcard/DCIM/Camera
* refer to WriteFile() for details
*/
bool NDKCamera::TakePhoto(void) {
if (captureSessionState_ == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession_);
}
CALL_SESSION(capture(captureSession_, GetCaptureCallback(), 1,
&requests_[JPG_CAPTURE_REQUEST_IDX].request_,
&requests_[JPG_CAPTURE_REQUEST_IDX].sessionSequenceId_));
return true;
}
void NDKCamera::UpdateCameraRequestParameter(int32_t code, int64_t val) {
ACaptureRequest* request = requests_[PREVIEW_REQUEST_IDX].request_;
switch (code) {
case ACAMERA_SENSOR_EXPOSURE_TIME:
if (exposureRange_.Supported()) {
exposureTime_ = val;
CALL_REQUEST(setEntry_i64(request, ACAMERA_SENSOR_EXPOSURE_TIME, 1,
&exposureTime_));
}
break;
case ACAMERA_SENSOR_SENSITIVITY:
if (sensitivityRange_.Supported()) {
sensitivity_ = val;
CALL_REQUEST(setEntry_i32(request, ACAMERA_SENSOR_SENSITIVITY, 1,
&sensitivity_));
}
break;
default:
ASSERT(false, "==ERROR==: error code for CameraParameterChange: %d",
code);
return;
}
uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
CALL_REQUEST(setEntry_u8(request, ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff));
CALL_SESSION(
setRepeatingRequest(captureSession_, nullptr, 1, &request,
&requests_[PREVIEW_REQUEST_IDX].sessionSequenceId_));
}
/**
* Retrieve Camera Exposure adjustable range.
*
* @param min Camera minimium exposure time in nanoseconds
* @param max Camera maximum exposure tiem in nanoseconds
*
* @return true min and max are loaded with the camera's exposure values
* false camera has not initialized, no value available
*/
bool NDKCamera::GetExposureRange(int64_t* min, int64_t* max, int64_t* curVal) {
if (!exposureRange_.Supported() || !exposureTime_ || !min || !max ||
!curVal) {
return false;
}
*min = exposureRange_.min_;
*max = exposureRange_.max_;
*curVal = exposureTime_;
return true;
}
/**
* Retrieve Camera sensitivity range.
*
* @param min Camera minimium sensitivity
* @param max Camera maximum sensitivity
*
* @return true min and max are loaded with the camera's sensitivity values
* false camera has not initialized, no value available
*/
bool NDKCamera::GetSensitivityRange(int64_t* min, int64_t* max,
int64_t* curVal) {
if (!sensitivityRange_.Supported() || !sensitivity_ || !min || !max ||
!curVal) {
return false;
}
*min = static_cast<int64_t>(sensitivityRange_.min_);
*max = static_cast<int64_t>(sensitivityRange_.max_);
*curVal = sensitivity_;
return true;
}

@ -0,0 +1,139 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CAMERA_NATIVE_CAMERA_H
#define CAMERA_NATIVE_CAMERA_H
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraMetadataTags.h>
#include <map>
#include <string>
#include <vector>
#include "image_reader.h"
enum class CaptureSessionState : int32_t {
READY = 0, // session is ready
ACTIVE, // session is busy
CLOSED, // session is closed(by itself or a new session evicts)
MAX_STATE
};
template <typename T>
class RangeValue {
public:
T min_, max_;
/**
* return absolute value from relative value
* value: in percent (50 for 50%)
*/
T value(int percent) {
return static_cast<T>(min_ + (max_ - min_) * percent / 100);
}
RangeValue() { min_ = max_ = static_cast<T>(0); }
bool Supported(void) const { return (min_ != max_); }
};
enum PREVIEW_INDICES {
PREVIEW_REQUEST_IDX = 0,
JPG_CAPTURE_REQUEST_IDX,
CAPTURE_REQUEST_COUNT,
};
struct CaptureRequestInfo {
ANativeWindow* outputNativeWindow_;
ACaptureSessionOutput* sessionOutput_;
ACameraOutputTarget* target_;
ACaptureRequest* request_;
ACameraDevice_request_template template_;
int sessionSequenceId_;
};
class CameraId;
class NDKCamera {
private:
ACameraManager* cameraMgr_;
std::map<std::string, CameraId> cameras_;
std::string activeCameraId_;
uint32_t cameraFacing_;
uint32_t cameraOrientation_;
std::vector<CaptureRequestInfo> requests_;
ACaptureSessionOutputContainer* outputContainer_;
ACameraCaptureSession* captureSession_;
CaptureSessionState captureSessionState_;
// set up exposure control
int64_t exposureTime_;
RangeValue<int64_t> exposureRange_;
int32_t sensitivity_;
RangeValue<int32_t> sensitivityRange_;
volatile bool valid_;
ACameraManager_AvailabilityCallbacks* GetManagerListener();
ACameraDevice_stateCallbacks* GetDeviceListener();
ACameraCaptureSession_stateCallbacks* GetSessionListener();
ACameraCaptureSession_captureCallbacks* GetCaptureCallback();
public:
NDKCamera();
~NDKCamera();
void EnumerateCamera(void);
bool MatchCaptureSizeRequest(ANativeWindow* display, ImageFormat* view,
ImageFormat* capture);
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow,
int32_t imageRotation);
bool GetSensorOrientation(int32_t* facing, int32_t* angle);
void OnCameraStatusChanged(const char* id, bool available);
void OnDeviceState(ACameraDevice* dev);
void OnDeviceError(ACameraDevice* dev, int err);
void OnSessionState(ACameraCaptureSession* ses, CaptureSessionState state);
void OnCaptureSequenceEnd(ACameraCaptureSession* session, int sequenceId,
int64_t frameNumber);
void OnCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request,
ACameraCaptureFailure* failure);
void StartPreview(bool start);
bool TakePhoto(void);
bool GetExposureRange(int64_t* min, int64_t* max, int64_t* curVal);
bool GetSensitivityRange(int64_t* min, int64_t* max, int64_t* curVal);
void UpdateCameraRequestParameter(int32_t code, int64_t val);
};
// helper classes to hold enumerated camera
class CameraId {
public:
ACameraDevice* device_;
std::string id_;
acamera_metadata_enum_android_lens_facing_t facing_;
bool available_; // free to use ( no other apps are using
bool owner_; // we are the owner of the camera
explicit CameraId(const char* id)
: device_(nullptr),
facing_(ACAMERA_LENS_FACING_FRONT),
available_(false),
owner_(false) {
id_ = id;
}
explicit CameraId(void) { CameraId(""); }
};
#endif // CAMERA_NATIVE_CAMERA_H

@ -0,0 +1,156 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <utils/native_debug.h>
#include "camera_engine.h"
/**
* Retrieve current rotation from Java side
*
* @return current rotation angle
*/
int CameraEngine::GetDisplayRotation() {
ASSERT(app_, "Application is not initialized");
JNIEnv *env;
ANativeActivity *activity = app_->activity;
activity->vm->GetEnv((void **)&env, JNI_VERSION_1_6);
activity->vm->AttachCurrentThread(&env, NULL);
jobject activityObj = env->NewGlobalRef(activity->clazz);
jclass clz = env->GetObjectClass(activityObj);
jint newOrientation = env->CallIntMethod(
activityObj, env->GetMethodID(clz, "getRotationDegree", "()I"));
env->DeleteGlobalRef(activityObj);
activity->vm->DetachCurrentThread();
return newOrientation;
}
/**
* Initializate UI on Java side. The 2 seekBars' values are passed in
* array in the tuple of ( min, max, curVal )
* 0: exposure min
* 1: exposure max
* 2: exposure val
* 3: sensitivity min
* 4: sensitivity max
* 5: sensitivity val
*/
const int kInitDataLen = 6;
void CameraEngine::EnableUI(void) {
JNIEnv *jni;
app_->activity->vm->AttachCurrentThread(&jni, NULL);
int64_t range[3];
// Default class retrieval
jclass clazz = jni->GetObjectClass(app_->activity->clazz);
jmethodID methodID = jni->GetMethodID(clazz, "EnableUI", "([J)V");
jlongArray initData = jni->NewLongArray(kInitDataLen);
ASSERT(initData && methodID, "JavaUI interface Object failed(%p, %p)",
methodID, initData);
if (!camera_->GetExposureRange(&range[0], &range[1], &range[2])) {
memset(range, 0, sizeof(int64_t) * 3);
}
jni->SetLongArrayRegion(initData, 0, 3, range);
if (!camera_->GetSensitivityRange(&range[0], &range[1], &range[2])) {
memset(range, 0, sizeof(int64_t) * 3);
}
jni->SetLongArrayRegion(initData, 3, 3, range);
jni->CallVoidMethod(app_->activity->clazz, methodID, initData);
app_->activity->vm->DetachCurrentThread();
}
/**
* Handles UI request to take a photo into
* /sdcard/DCIM/Camera
*/
void CameraEngine::OnTakePhoto() {
if (camera_) {
camera_->TakePhoto();
}
}
void CameraEngine::OnPhotoTaken(const char *fileName) {
JNIEnv *jni;
app_->activity->vm->AttachCurrentThread(&jni, NULL);
// Default class retrieval
jclass clazz = jni->GetObjectClass(app_->activity->clazz);
jmethodID methodID =
jni->GetMethodID(clazz, "OnPhotoTaken", "(Ljava/lang/String;)V");
jstring javaName = jni->NewStringUTF(fileName);
jni->CallVoidMethod(app_->activity->clazz, methodID, javaName);
app_->activity->vm->DetachCurrentThread();
}
/**
* Process user camera and disk writing permission
* Resume application initialization after user granted camera and disk usage
* If user denied permission, do nothing: no camera
*
* @param granted user's authorization for camera and disk usage.
* @return none
*/
void CameraEngine::OnCameraPermission(jboolean granted) {
cameraGranted_ = (granted != JNI_FALSE);
if (cameraGranted_) {
OnAppInitWindow();
}
}
/**
* A couple UI handles ( from UI )
* user camera and disk permission
* exposure and sensitivity SeekBars
* takePhoto button
*/
extern "C" JNIEXPORT void JNICALL
Java_com_sample_camera_basic_CameraActivity_notifyCameraPermission(
JNIEnv *env, jclass type, jboolean permission) {
std::thread permissionHandler(&CameraEngine::OnCameraPermission,
GetAppEngine(), permission);
permissionHandler.detach();
}
extern "C" JNIEXPORT void JNICALL
Java_com_sample_camera_basic_CameraActivity_TakePhoto(JNIEnv *env,
jclass type) {
std::thread takePhotoHandler(&CameraEngine::OnTakePhoto, GetAppEngine());
takePhotoHandler.detach();
}
extern "C" JNIEXPORT void JNICALL
Java_com_sample_camera_basic_CameraActivity_OnExposureChanged(
JNIEnv *env, jobject instance, jlong exposurePercent) {
GetAppEngine()->OnCameraParameterChanged(ACAMERA_SENSOR_EXPOSURE_TIME,
exposurePercent);
}
extern "C" JNIEXPORT void JNICALL
Java_com_sample_camera_basic_CameraActivity_OnSensitivityChanged(
JNIEnv *env, jobject instance, jlong sensitivity) {
GetAppEngine()->OnCameraParameterChanged(ACAMERA_SENSOR_SENSITIVITY,
sensitivity);
}

@ -0,0 +1,459 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "image_reader.h"
#include <dirent.h>
#include <cstdlib>
#include <ctime>
#include <functional>
#include <string>
#include <thread>
#include "utils/native_debug.h"
/*
* For JPEG capture, captured files are saved under
* DirName
* File names are incrementally appended an index number as
* capture0.jpg, capture1.jpg, capture2.jpg
*/
static const char *kDirName = "/sdcard/DCIM/Camera/";
static const char *kFileName = "capture";
/**
* MAX_BUF_COUNT:
* Max buffers in this ImageReader.
*/
#define MAX_BUF_COUNT 4
/**
* ImageReader listener: called by AImageReader for every frame captured
* We pass the event to ImageReader class, so it could do some housekeeping
* about
* the loaded queue. For example, we could keep a counter to track how many
* buffers are full and idle in the queue. If camera almost has no buffer to
* capture
* we could release ( skip ) some frames by AImageReader_getNextImage() and
* AImageReader_delete().
*/
void OnImageCallback(void *ctx, AImageReader *reader) {
reinterpret_cast<ImageReader *>(ctx)->ImageCallback(reader);
}
/**
* Constructor
*/
ImageReader::ImageReader(ImageFormat *res, enum AIMAGE_FORMATS format)
: presentRotation_(0), reader_(nullptr) {
callback_ = nullptr;
callbackCtx_ = nullptr;
media_status_t status = AImageReader_new(res->width, res->height, format,
MAX_BUF_COUNT, &reader_);
ASSERT(reader_ && status == AMEDIA_OK, "Failed to create AImageReader");
AImageReader_ImageListener listener{
.context = this,
.onImageAvailable = OnImageCallback,
};
AImageReader_setImageListener(reader_, &listener);
}
ImageReader::~ImageReader() {
ASSERT(reader_, "NULL Pointer to %s", __FUNCTION__);
AImageReader_delete(reader_);
}
void ImageReader::RegisterCallback(
void *ctx, std::function<void(void *ctx, const char *fileName)> func) {
callbackCtx_ = ctx;
callback_ = func;
}
void ImageReader::ImageCallback(AImageReader *reader) {
int32_t format;
media_status_t status = AImageReader_getFormat(reader, &format);
ASSERT(status == AMEDIA_OK, "Failed to get the media format");
if (format == AIMAGE_FORMAT_JPEG) {
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
ASSERT(status == AMEDIA_OK && image, "Image is not available");
// Create a thread and write out the jpeg files
std::thread writeFileHandler(&ImageReader::WriteFile, this, image);
writeFileHandler.detach();
}
}
ANativeWindow *ImageReader::GetNativeWindow(void) {
if (!reader_) return nullptr;
ANativeWindow *nativeWindow;
media_status_t status = AImageReader_getWindow(reader_, &nativeWindow);
ASSERT(status == AMEDIA_OK, "Could not get ANativeWindow");
return nativeWindow;
}
/**
* GetNextImage()
* Retrieve the next image in ImageReader's bufferQueue, NOT the last image so
* no image is skipped. Recommended for batch/background processing.
*/
AImage *ImageReader::GetNextImage(void) {
AImage *image;
media_status_t status = AImageReader_acquireNextImage(reader_, &image);
if (status != AMEDIA_OK) {
return nullptr;
}
return image;
}
/**
* GetLatestImage()
* Retrieve the last image in ImageReader's bufferQueue, deleting images in
* in front of it on the queue. Recommended for real-time processing.
*/
AImage *ImageReader::GetLatestImage(void) {
AImage *image;
media_status_t status = AImageReader_acquireLatestImage(reader_, &image);
if (status != AMEDIA_OK) {
return nullptr;
}
return image;
}
/**
* Delete Image
* @param image {@link AImage} instance to be deleted
*/
void ImageReader::DeleteImage(AImage *image) {
if (image) AImage_delete(image);
}
/**
* Helper function for YUV_420 to RGB conversion. Courtesy of Tensorflow
* ImageClassifier Sample:
* https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/android/jni/yuv2rgb.cc
* The difference is that here we have to swap UV plane when calling it.
*/
#ifndef MAX
#define MAX(a, b) \
({ \
__typeof__(a) _a = (a); \
__typeof__(b) _b = (b); \
_a > _b ? _a : _b; \
})
#define MIN(a, b) \
({ \
__typeof__(a) _a = (a); \
__typeof__(b) _b = (b); \
_a < _b ? _a : _b; \
})
#endif
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
// ranges
// are normalized to eight bits.
static const int kMaxChannelValue = 262143;
static inline uint32_t YUV2RGB(int nY, int nU, int nV) {
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0) nY = 0;
// This is the floating point equivalent. We do the conversion in integer
// because some Android devices do not have floating point in hardware.
// nR = (int)(1.164 * nY + 1.596 * nV);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 2.018 * nU);
int nR = (int)(1192 * nY + 1634 * nV);
int nG = (int)(1192 * nY - 833 * nV - 400 * nU);
int nB = (int)(1192 * nY + 2066 * nU);
nR = MIN(kMaxChannelValue, MAX(0, nR));
nG = MIN(kMaxChannelValue, MAX(0, nG));
nB = MIN(kMaxChannelValue, MAX(0, nB));
nR = (nR >> 10) & 0xff;
nG = (nG >> 10) & 0xff;
nB = (nB >> 10) & 0xff;
return 0xff000000 | (nR << 16) | (nG << 8) | nB;
}
/**
* Convert yuv image inside AImage into ANativeWindow_Buffer
* ANativeWindow_Buffer format is guaranteed to be
* WINDOW_FORMAT_RGBX_8888
* WINDOW_FORMAT_RGBA_8888
* @param buf a {@link ANativeWindow_Buffer } instance, destination of
* image conversion
* @param image a {@link AImage} instance, source of image conversion.
* it will be deleted via {@link AImage_delete}
*/
bool ImageReader::DisplayImage(ANativeWindow_Buffer *buf, AImage *image) {
ASSERT(buf->format == WINDOW_FORMAT_RGBX_8888 ||
buf->format == WINDOW_FORMAT_RGBA_8888,
"Not supported buffer format");
int32_t srcFormat = -1;
AImage_getFormat(image, &srcFormat);
ASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
int32_t srcPlanes = 0;
AImage_getNumberOfPlanes(image, &srcPlanes);
ASSERT(srcPlanes == 3, "Is not 3 planes");
switch (presentRotation_) {
case 0:
PresentImage(buf, image);
break;
case 90:
PresentImage90(buf, image);
break;
case 180:
PresentImage180(buf, image);
break;
case 270:
PresentImage270(buf, image);
break;
default:
ASSERT(0, "NOT recognized display rotation: %d", presentRotation_);
}
AImage_delete(image);
return true;
}
/*
* PresentImage()
* Converting yuv to RGB
* No rotation: (x,y) --> (x, y)
* Refer to:
* https://mathbits.com/MathBits/TISection/Geometry/Transformations2.htm
*/
void ImageReader::PresentImage(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &vPixel, &vLen);
AImage_getPlaneData(image, 2, &uPixel, &uLen);
int32_t uvPixelStride;
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = MIN(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = MIN(buf->width, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out += buf->stride;
}
}
/*
* PresentImage90()
* Converting YUV to RGB
* Rotation image anti-clockwise 90 degree -- (x, y) --> (-y, x)
*/
void ImageReader::PresentImage90(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &vPixel, &vLen);
AImage_getPlaneData(image, 2, &uPixel, &uLen);
int32_t uvPixelStride;
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = MIN(buf->width, (srcRect.bottom - srcRect.top));
int32_t width = MIN(buf->height, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
out += height - 1;
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
// [x, y]--> [-y, x]
out[x * buf->stride] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out -= 1; // move to the next column
}
}
/*
* PresentImage180()
* Converting yuv to RGB
* Rotate image 180 degree: (x, y) --> (-x, -y)
*/
void ImageReader::PresentImage180(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &vPixel, &vLen);
AImage_getPlaneData(image, 2, &uPixel, &uLen);
int32_t uvPixelStride;
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = MIN(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = MIN(buf->width, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
out += (height - 1) * buf->stride;
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
// mirror image since we are using front camera
out[width - 1 - x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
// out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out -= buf->stride;
}
}
/*
* PresentImage270()
* Converting image from YUV to RGB
* Rotate Image counter-clockwise 270 degree: (x, y) --> (y, x)
*/
void ImageReader::PresentImage270(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &vPixel, &vLen);
AImage_getPlaneData(image, 2, &uPixel, &uLen);
int32_t uvPixelStride;
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = MIN(buf->width, (srcRect.bottom - srcRect.top));
int32_t width = MIN(buf->height, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
out[(width - 1 - x) * buf->stride] =
YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out += 1; // move to the next column
}
}
void ImageReader::SetPresentRotation(int32_t angle) {
presentRotation_ = angle;
}
/**
* Write out jpeg files to kDirName directory
* @param image point capture jpg image
*/
void ImageReader::WriteFile(AImage *image) {
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
ASSERT(status == AMEDIA_OK && planeCount == 1,
"Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
DIR *dir = opendir(kDirName);
if (dir) {
closedir(dir);
} else {
std::string cmd = "mkdir -p ";
cmd += kDirName;
system(cmd.c_str());
}
struct timespec ts {
0, 0
};
clock_gettime(CLOCK_REALTIME, &ts);
struct tm localTime;
localtime_r(&ts.tv_sec, &localTime);
std::string fileName = kDirName;
std::string dash("-");
fileName += kFileName + std::to_string(localTime.tm_mon) +
std::to_string(localTime.tm_mday) + dash +
std::to_string(localTime.tm_hour) +
std::to_string(localTime.tm_min) +
std::to_string(localTime.tm_sec) + ".jpg";
FILE *file = fopen(fileName.c_str(), "wb");
if (file && data && len) {
fwrite(data, 1, len, file);
fclose(file);
if (callback_) {
callback_(callbackCtx_, fileName.c_str());
}
} else {
if (file) fclose(file);
}
AImage_delete(image);
}

@ -0,0 +1,117 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CAMERA_IMAGE_READER_H
#define CAMERA_IMAGE_READER_H
#include <media/NdkImageReader.h>
#include <functional>
/*
* ImageFormat:
* A Data Structure to communicate resolution between camera and ImageReader
*/
struct ImageFormat {
int32_t width;
int32_t height;
int32_t format; // Through out this demo, the format is fixed to
// YUV_420 format
};
class ImageReader {
public:
/**
* Ctor and Dtor()
*/
explicit ImageReader(ImageFormat* res, enum AIMAGE_FORMATS format);
~ImageReader();
/**
* Report cached ANativeWindow, which was used to create camera's capture
* session output.
*/
ANativeWindow* GetNativeWindow(void);
/**
* Retrieve Image on the top of Reader's queue
*/
AImage* GetNextImage(void);
/**
* Retrieve Image on the back of Reader's queue, dropping older images
*/
AImage* GetLatestImage(void);
/**
* Delete Image
* @param image {@link AImage} instance to be deleted
*/
void DeleteImage(AImage* image);
/**
* AImageReader callback handler. Called by AImageReader when a frame is
* captured
* (Internal function, not to be called by clients)
*/
void ImageCallback(AImageReader* reader);
/**
* DisplayImage()
* Present camera image to the given display buffer. Avaliable image is
* converted
* to display buffer format. Supported display format:
* WINDOW_FORMAT_RGBX_8888
* WINDOW_FORMAT_RGBA_8888
* @param buf {@link ANativeWindow_Buffer} for image to display to.
* @param image a {@link AImage} instance, source of image conversion.
* it will be deleted via {@link AImage_delete}
* @return true on success, false on failure
*/
bool DisplayImage(ANativeWindow_Buffer* buf, AImage* image);
/**
* Configure the rotation angle necessary to apply to
* Camera image when presenting: all rotations should be accumulated:
* CameraSensorOrientation + Android Device Native Orientation +
* Human Rotation (rotated degree related to Phone native orientation
*/
void SetPresentRotation(int32_t angle);
/**
* regsiter a callback function for client to be notified that jpeg already
* written out.
* @param ctx is client context when callback is invoked
* @param callback is the actual callback function
*/
void RegisterCallback(void* ctx,
std::function<void(void* ctx, const char* fileName)>);
private:
int32_t presentRotation_;
AImageReader* reader_;
std::function<void(void* ctx, const char* fileName)> callback_;
void* callbackCtx_;
void PresentImage(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage90(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage180(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage270(ANativeWindow_Buffer* buf, AImage* image);
void WriteFile(AImage* image);
};
#endif // CAMERA_IMAGE_READER_H

@ -0,0 +1,459 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "camera_utils.h"
#include <camera/NdkCameraManager.h>
#include <media/NdkImage.h>
#include <cinttypes>
#include <string>
#include <typeinfo>
#include <utility>
#include <vector>
#include "utils/native_debug.h"
#define UKNOWN_TAG "UNKNOW_TAG"
#define MAKE_PAIR(val) std::make_pair(val, #val)
template <typename T>
const char* GetPairStr(T key, std::vector<std::pair<T, const char*>>& store) {
typedef typename std::vector<std::pair<T, const char*>>::iterator iterator;
for (iterator it = store.begin(); it != store.end(); ++it) {
if (it->first == key) {
return it->second;
}
}
LOGW("(%#08x) : UNKNOWN_TAG for %s", key, typeid(store[0].first).name());
return UKNOWN_TAG;
}
/*
* camera_status_t error translation
*/
using ERROR_PAIR = std::pair<camera_status_t, const char*>;
static std::vector<ERROR_PAIR> errorInfo{
MAKE_PAIR(ACAMERA_OK),
MAKE_PAIR(ACAMERA_ERROR_UNKNOWN),
MAKE_PAIR(ACAMERA_ERROR_INVALID_PARAMETER),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_DISCONNECTED),
MAKE_PAIR(ACAMERA_ERROR_NOT_ENOUGH_MEMORY),
MAKE_PAIR(ACAMERA_ERROR_METADATA_NOT_FOUND),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_DEVICE),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_SERVICE),
MAKE_PAIR(ACAMERA_ERROR_SESSION_CLOSED),
MAKE_PAIR(ACAMERA_ERROR_INVALID_OPERATION),
MAKE_PAIR(ACAMERA_ERROR_STREAM_CONFIGURE_FAIL),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_IN_USE),
MAKE_PAIR(ACAMERA_ERROR_MAX_CAMERA_IN_USE),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_DISABLED),
MAKE_PAIR(ACAMERA_ERROR_PERMISSION_DENIED),
};
const char* GetErrorStr(camera_status_t err) {
return GetPairStr<camera_status_t>(err, errorInfo);
}
/*
* camera_metadata_tag_t translation. Useful to look at available tags
* on the underneath platform
*/
using TAG_PAIR = std::pair<acamera_metadata_tag_t, const char*>;
static std::vector<TAG_PAIR> tagInfo{
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_MODE),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_TRANSFORM),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_GAINS),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_ABERRATION_MODE),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_END),
MAKE_PAIR(ACAMERA_CONTROL_AE_ANTIBANDING_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION),
MAKE_PAIR(ACAMERA_CONTROL_AE_LOCK),
MAKE_PAIR(ACAMERA_CONTROL_AE_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AE_REGIONS),
MAKE_PAIR(ACAMERA_CONTROL_AE_TARGET_FPS_RANGE),
MAKE_PAIR(ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER),
MAKE_PAIR(ACAMERA_CONTROL_AF_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AF_REGIONS),
MAKE_PAIR(ACAMERA_CONTROL_AF_TRIGGER),
MAKE_PAIR(ACAMERA_CONTROL_AWB_LOCK),
MAKE_PAIR(ACAMERA_CONTROL_AWB_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AWB_REGIONS),
MAKE_PAIR(ACAMERA_CONTROL_CAPTURE_INTENT),
MAKE_PAIR(ACAMERA_CONTROL_EFFECT_MODE),
MAKE_PAIR(ACAMERA_CONTROL_MODE),
MAKE_PAIR(ACAMERA_CONTROL_SCENE_MODE),
MAKE_PAIR(ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AE_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES),
MAKE_PAIR(ACAMERA_CONTROL_AE_COMPENSATION_RANGE),
MAKE_PAIR(ACAMERA_CONTROL_AE_COMPENSATION_STEP),
MAKE_PAIR(ACAMERA_CONTROL_AF_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AVAILABLE_EFFECTS),
MAKE_PAIR(ACAMERA_CONTROL_AVAILABLE_SCENE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AWB_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_MAX_REGIONS),
MAKE_PAIR(ACAMERA_CONTROL_AE_STATE),
MAKE_PAIR(ACAMERA_CONTROL_AF_STATE),
MAKE_PAIR(ACAMERA_CONTROL_AWB_STATE),
MAKE_PAIR(ACAMERA_CONTROL_AE_LOCK_AVAILABLE),
MAKE_PAIR(ACAMERA_CONTROL_AWB_LOCK_AVAILABLE),
MAKE_PAIR(ACAMERA_CONTROL_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE),
MAKE_PAIR(ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST),
MAKE_PAIR(ACAMERA_CONTROL_END),
MAKE_PAIR(ACAMERA_EDGE_MODE),
MAKE_PAIR(ACAMERA_EDGE_AVAILABLE_EDGE_MODES),
MAKE_PAIR(ACAMERA_EDGE_END),
MAKE_PAIR(ACAMERA_FLASH_MODE),
MAKE_PAIR(ACAMERA_FLASH_STATE),
MAKE_PAIR(ACAMERA_FLASH_END),
MAKE_PAIR(ACAMERA_FLASH_INFO_AVAILABLE),
MAKE_PAIR(ACAMERA_FLASH_INFO_END),
MAKE_PAIR(ACAMERA_HOT_PIXEL_MODE),
MAKE_PAIR(ACAMERA_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES),
MAKE_PAIR(ACAMERA_HOT_PIXEL_END),
MAKE_PAIR(ACAMERA_JPEG_GPS_COORDINATES),
MAKE_PAIR(ACAMERA_JPEG_GPS_PROCESSING_METHOD),
MAKE_PAIR(ACAMERA_JPEG_GPS_TIMESTAMP),
MAKE_PAIR(ACAMERA_JPEG_ORIENTATION),
MAKE_PAIR(ACAMERA_JPEG_QUALITY),
MAKE_PAIR(ACAMERA_JPEG_THUMBNAIL_QUALITY),
MAKE_PAIR(ACAMERA_JPEG_THUMBNAIL_SIZE),
MAKE_PAIR(ACAMERA_JPEG_AVAILABLE_THUMBNAIL_SIZES),
MAKE_PAIR(ACAMERA_JPEG_END),
MAKE_PAIR(ACAMERA_LENS_APERTURE),
MAKE_PAIR(ACAMERA_LENS_FILTER_DENSITY),
MAKE_PAIR(ACAMERA_LENS_FOCAL_LENGTH),
MAKE_PAIR(ACAMERA_LENS_FOCUS_DISTANCE),
MAKE_PAIR(ACAMERA_LENS_OPTICAL_STABILIZATION_MODE),
MAKE_PAIR(ACAMERA_LENS_FACING),
MAKE_PAIR(ACAMERA_LENS_POSE_ROTATION),
MAKE_PAIR(ACAMERA_LENS_POSE_TRANSLATION),
MAKE_PAIR(ACAMERA_LENS_FOCUS_RANGE),
MAKE_PAIR(ACAMERA_LENS_STATE),
MAKE_PAIR(ACAMERA_LENS_INTRINSIC_CALIBRATION),
MAKE_PAIR(ACAMERA_LENS_RADIAL_DISTORTION),
MAKE_PAIR(ACAMERA_LENS_END),
MAKE_PAIR(ACAMERA_LENS_INFO_AVAILABLE_APERTURES),
MAKE_PAIR(ACAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES),
MAKE_PAIR(ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS),
MAKE_PAIR(ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION),
MAKE_PAIR(ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE),
MAKE_PAIR(ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE),
MAKE_PAIR(ACAMERA_LENS_INFO_SHADING_MAP_SIZE),
MAKE_PAIR(ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION),
MAKE_PAIR(ACAMERA_LENS_INFO_END),
MAKE_PAIR(ACAMERA_NOISE_REDUCTION_MODE),
MAKE_PAIR(ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES),
MAKE_PAIR(ACAMERA_NOISE_REDUCTION_END),
MAKE_PAIR(ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS),
MAKE_PAIR(ACAMERA_REQUEST_PIPELINE_DEPTH),
MAKE_PAIR(ACAMERA_REQUEST_PIPELINE_MAX_DEPTH),
MAKE_PAIR(ACAMERA_REQUEST_PARTIAL_RESULT_COUNT),
MAKE_PAIR(ACAMERA_REQUEST_AVAILABLE_CAPABILITIES),
MAKE_PAIR(ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS),
MAKE_PAIR(ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS),
MAKE_PAIR(ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS),
MAKE_PAIR(ACAMERA_REQUEST_END),
MAKE_PAIR(ACAMERA_SCALER_CROP_REGION),
MAKE_PAIR(ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM),
MAKE_PAIR(ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS),
MAKE_PAIR(ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS),
MAKE_PAIR(ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS),
MAKE_PAIR(ACAMERA_SCALER_CROPPING_TYPE),
MAKE_PAIR(ACAMERA_SCALER_END),
MAKE_PAIR(ACAMERA_SENSOR_EXPOSURE_TIME),
MAKE_PAIR(ACAMERA_SENSOR_FRAME_DURATION),
MAKE_PAIR(ACAMERA_SENSOR_SENSITIVITY),
MAKE_PAIR(ACAMERA_SENSOR_REFERENCE_ILLUMINANT1),
MAKE_PAIR(ACAMERA_SENSOR_REFERENCE_ILLUMINANT2),
MAKE_PAIR(ACAMERA_SENSOR_CALIBRATION_TRANSFORM1),
MAKE_PAIR(ACAMERA_SENSOR_CALIBRATION_TRANSFORM2),
MAKE_PAIR(ACAMERA_SENSOR_COLOR_TRANSFORM1),
MAKE_PAIR(ACAMERA_SENSOR_COLOR_TRANSFORM2),
MAKE_PAIR(ACAMERA_SENSOR_FORWARD_MATRIX1),
MAKE_PAIR(ACAMERA_SENSOR_FORWARD_MATRIX2),
MAKE_PAIR(ACAMERA_SENSOR_BLACK_LEVEL_PATTERN),
MAKE_PAIR(ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY),
MAKE_PAIR(ACAMERA_SENSOR_ORIENTATION),
MAKE_PAIR(ACAMERA_SENSOR_TIMESTAMP),
MAKE_PAIR(ACAMERA_SENSOR_NEUTRAL_COLOR_POINT),
MAKE_PAIR(ACAMERA_SENSOR_NOISE_PROFILE),
MAKE_PAIR(ACAMERA_SENSOR_GREEN_SPLIT),
MAKE_PAIR(ACAMERA_SENSOR_TEST_PATTERN_DATA),
MAKE_PAIR(ACAMERA_SENSOR_TEST_PATTERN_MODE),
MAKE_PAIR(ACAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES),
MAKE_PAIR(ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW),
MAKE_PAIR(ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS),
MAKE_PAIR(ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL),
MAKE_PAIR(ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL),
MAKE_PAIR(ACAMERA_SENSOR_END),
MAKE_PAIR(ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT),
MAKE_PAIR(ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION),
MAKE_PAIR(ACAMERA_SENSOR_INFO_PHYSICAL_SIZE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_WHITE_LEVEL),
MAKE_PAIR(ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED),
MAKE_PAIR(ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_END),
MAKE_PAIR(ACAMERA_SHADING_MODE),
MAKE_PAIR(ACAMERA_SHADING_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_SHADING_END),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_DETECT_MODE),
MAKE_PAIR(ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_IDS),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_LANDMARKS),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_RECTANGLES),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_SCORES),
MAKE_PAIR(ACAMERA_STATISTICS_LENS_SHADING_MAP),
MAKE_PAIR(ACAMERA_STATISTICS_SCENE_FLICKER),
MAKE_PAIR(ACAMERA_STATISTICS_HOT_PIXEL_MAP),
MAKE_PAIR(ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE),
MAKE_PAIR(ACAMERA_STATISTICS_END),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_END),
MAKE_PAIR(ACAMERA_TONEMAP_CURVE_BLUE),
MAKE_PAIR(ACAMERA_TONEMAP_CURVE_GREEN),
MAKE_PAIR(ACAMERA_TONEMAP_CURVE_RED),
MAKE_PAIR(ACAMERA_TONEMAP_MODE),
MAKE_PAIR(ACAMERA_TONEMAP_MAX_CURVE_POINTS),
MAKE_PAIR(ACAMERA_TONEMAP_AVAILABLE_TONE_MAP_MODES),
MAKE_PAIR(ACAMERA_TONEMAP_GAMMA),
MAKE_PAIR(ACAMERA_TONEMAP_PRESET_CURVE),
MAKE_PAIR(ACAMERA_TONEMAP_END),
MAKE_PAIR(ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL),
MAKE_PAIR(ACAMERA_INFO_END),
MAKE_PAIR(ACAMERA_BLACK_LEVEL_LOCK),
MAKE_PAIR(ACAMERA_BLACK_LEVEL_END),
MAKE_PAIR(ACAMERA_SYNC_FRAME_NUMBER),
MAKE_PAIR(ACAMERA_SYNC_MAX_LATENCY),
MAKE_PAIR(ACAMERA_SYNC_END),
MAKE_PAIR(ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS),
MAKE_PAIR(ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS),
MAKE_PAIR(ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS),
MAKE_PAIR(ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE),
MAKE_PAIR(ACAMERA_DEPTH_END),
};
const char* GetTagStr(acamera_metadata_tag_t tag) {
return GetPairStr<acamera_metadata_tag_t>(tag, tagInfo);
}
using FORMAT_PAIR = std::pair<int, const char*>;
static std::vector<FORMAT_PAIR> formatInfo{
MAKE_PAIR(AIMAGE_FORMAT_YUV_420_888),
MAKE_PAIR(AIMAGE_FORMAT_JPEG),
MAKE_PAIR(AIMAGE_FORMAT_RAW16),
MAKE_PAIR(AIMAGE_FORMAT_RAW_PRIVATE),
MAKE_PAIR(AIMAGE_FORMAT_RAW10),
MAKE_PAIR(AIMAGE_FORMAT_RAW12),
MAKE_PAIR(AIMAGE_FORMAT_DEPTH16),
MAKE_PAIR(AIMAGE_FORMAT_DEPTH_POINT_CLOUD),
MAKE_PAIR(AIMAGE_FORMAT_PRIVATE),
};
const char* GetFormatStr(int fmt) { return GetPairStr<int>(fmt, formatInfo); }
void PrintMetadataTags(int32_t entries, const uint32_t* pTags) {
LOGI("MetadataTag (start):");
for (int32_t idx = 0; idx < entries; ++idx) {
const char* name =
GetTagStr(static_cast<acamera_metadata_tag_t>(pTags[idx]));
LOGI("(%#08x) : %s", pTags[idx], name);
}
LOGI("MetadataTag (end)");
}
void PrintLensFacing(ACameraMetadata_const_entry& lens) {
ASSERT(lens.tag == ACAMERA_LENS_FACING, "Wrong tag(%#x) of %s to %s",
lens.tag, GetTagStr((acamera_metadata_tag_t)lens.tag), __FUNCTION__);
LOGI("LensFacing: tag(%#x), type(%d), count(%d), val(%#x)", lens.tag,
lens.type, lens.count, lens.data.u8[0]);
}
/*
* Stream_Configuration is in format of:
* format, width, height, input?
* ACAMERA_TYPE_INT32 type
*/
void PrintStreamConfigurations(ACameraMetadata_const_entry& val) {
#define MODE_LABLE "ModeInfo:"
const char* tagName = GetTagStr(static_cast<acamera_metadata_tag_t>(val.tag));
ASSERT(!(val.count & 0x3), "STREAM_CONFIGURATION (%d) should multiple of 4",
val.count);
ASSERT(val.type == ACAMERA_TYPE_INT32,
"STREAM_CONFIGURATION TYPE(%d) is not ACAMERA_TYPE_INT32(1)",
val.type);
LOGI("%s -- %s:", tagName, MODE_LABLE);
for (uint32_t i = 0; i < val.count; i += 4) {
LOGI("%s: %08d x %08d %s", GetFormatStr(val.data.i32[i]),
val.data.i32[i + 1], val.data.i32[i + 2],
val.data.i32[i + 3] ? "INPUT" : "OUTPUT");
}
#undef MODE_LABLE
}
void PrintTagVal(const char* printLabel, ACameraMetadata_const_entry& val) {
if (val.tag == ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS) {
PrintStreamConfigurations(val);
return;
}
const char* name = GetTagStr(static_cast<acamera_metadata_tag_t>(val.tag));
for (uint32_t i = 0; i < val.count; ++i) {
switch (val.type) {
case ACAMERA_TYPE_INT32:
LOGI("%s %s: %08d", printLabel, name, val.data.i32[i]);
break;
case ACAMERA_TYPE_BYTE:
LOGI("%s %s: %#02x", printLabel, name, val.data.u8[i]);
break;
case ACAMERA_TYPE_INT64:
LOGI("%s %s: %" PRIu64, printLabel, name, (int64_t)val.data.i64[i]);
break;
case ACAMERA_TYPE_FLOAT:
LOGI("%s %s: %f", printLabel, name, val.data.f[i]);
break;
case ACAMERA_TYPE_DOUBLE:
LOGI("%s %s: %" PRIx64, printLabel, name, val.data.i64[i]);
break;
case ACAMERA_TYPE_RATIONAL:
LOGI("%s %s: %08x, %08x", printLabel, name, val.data.r[i].numerator,
val.data.r[i].denominator);
break;
default:
ASSERT(false, "Unknown tag value type: %d", val.type);
}
}
}
/*
* PrintCamera():
* Enumerate existing camera and its metadata.
*/
void PrintCameras(ACameraManager* cmrMgr) {
if (!cmrMgr) return;
ACameraIdList* cameraIds = nullptr;
camera_status_t status = ACameraManager_getCameraIdList(cmrMgr, &cameraIds);
LOGI("camera Status = %d, %s", status, GetErrorStr(status));
for (int i = 0; i < cameraIds->numCameras; ++i) {
const char* id = cameraIds->cameraIds[i];
LOGI("=====cameraId = %d, cameraName = %s=====", i, id);
ACameraMetadata* metadataObj;
CALL_MGR(getCameraCharacteristics(cmrMgr, id, &metadataObj));
int32_t count = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(metadataObj, &count, &tags);
for (int tagIdx = 0; tagIdx < count; ++tagIdx) {
ACameraMetadata_const_entry val = {
0,
};
camera_status_t status =
ACameraMetadata_getConstEntry(metadataObj, tags[tagIdx], &val);
if (status != ACAMERA_OK) {
LOGW("Unsupported Tag: %s",
GetTagStr(static_cast<acamera_metadata_tag_t>(tags[tagIdx])));
continue;
}
PrintTagVal("Camera Tag:", val);
if (ACAMERA_LENS_FACING == tags[tagIdx]) {
PrintLensFacing(val);
}
}
ACameraMetadata_free(metadataObj);
}
ACameraManager_deleteCameraIdList(cameraIds);
}
void PrintRequestMetadata(ACaptureRequest* req) {
if (!req) return;
int32_t count;
const uint32_t* tags;
CALL_REQUEST(getAllTags(req, &count, &tags));
for (int32_t idx = 0; idx < count; ++idx) {
ACameraMetadata_const_entry val;
CALL_REQUEST(getConstEntry(req, tags[idx], &val));
const char* name =
GetTagStr(static_cast<acamera_metadata_tag_t>(tags[idx]));
for (uint32_t i = 0; i < val.count; ++i) {
switch (val.type) {
case ACAMERA_TYPE_INT32:
LOGI("Capture Tag %s: %08d", name, val.data.i32[i]);
break;
case ACAMERA_TYPE_BYTE:
LOGI("Capture Tag %s: %#08x", name, val.data.u8[i]);
break;
case ACAMERA_TYPE_INT64:
LOGI("Capture Tag %s: %" PRIu64, name, (int64_t)val.data.i64[i]);
break;
case ACAMERA_TYPE_FLOAT:
LOGI("Capture Tag %s: %f", name, val.data.f[i]);
break;
case ACAMERA_TYPE_DOUBLE:
LOGI("Capture Tag %s: %" PRIx64, name, val.data.i64[i]);
break;
case ACAMERA_TYPE_RATIONAL:
LOGI("Capture Tag %s: %08x, %08x", name, val.data.r[i].numerator,
val.data.r[i].denominator);
break;
default:
ASSERT(false, "Unknown tag value type: %d", val.type);
}
}
}
}
/*
* CameraDevice error state translation, used in
* ACameraDevice_ErrorStateCallback
*/
using DEV_ERROR_PAIR = std::pair<int, const char*>;
static std::vector<DEV_ERROR_PAIR> devErrors{
MAKE_PAIR(ERROR_CAMERA_IN_USE), MAKE_PAIR(ERROR_MAX_CAMERAS_IN_USE),
MAKE_PAIR(ERROR_CAMERA_DISABLED), MAKE_PAIR(ERROR_CAMERA_DEVICE),
MAKE_PAIR(ERROR_CAMERA_SERVICE),
};
const char* GetCameraDeviceErrorStr(int err) {
return GetPairStr<int>(err, devErrors);
}
void PrintCameraDeviceError(int err) {
LOGI("CameraDeviceError(%#x): %s", err, GetCameraDeviceErrorStr(err));
}

@ -0,0 +1,52 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CAMERA_CAMERA_UTILS_H__
#define __CAMERA_CAMERA_UTILS_H__
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraManager.h>
/*
* A set of macros to call into Camera APIs. The API is grouped with a few
* objects, with object name as the prefix of function names.
*/
#define CALL_CAMERA(func) \
{ \
camera_status_t status = func; \
ASSERT(status == ACAMERA_OK, "%s call failed with code: %#x, %s", \
__FUNCTION__, status, GetErrorStr(status)); \
}
#define CALL_MGR(func) CALL_CAMERA(ACameraManager_##func)
#define CALL_DEV(func) CALL_CAMERA(ACameraDevice_##func)
#define CALL_METADATA(func) CALL_CAMERA(ACameraMetadata_##func)
#define CALL_CONTAINER(func) CALL_CAMERA(ACaptureSessionOutputContainer_##func)
#define CALL_OUTPUT(func) CALL_CAMERA(ACaptureSessionOutput_##func)
#define CALL_TARGET(func) CALL_CAMERA(ACameraOutputTarget_##func)
#define CALL_REQUEST(func) CALL_CAMERA(ACaptureRequest_##func)
#define CALL_SESSION(func) CALL_CAMERA(ACameraCaptureSession_##func)
/*
* A few debugging functions for error code strings etc
*/
const char* GetErrorStr(camera_status_t err);
const char* GetTagStr(acamera_metadata_tag_t tag);
void PrintMetadataTags(int32_t entries, const uint32_t* pTags);
void PrintLensFacing(ACameraMetadata_const_entry& lensData);
void PrintCameras(ACameraManager* cameraMgr);
void PrintCameraDeviceError(int err);
void PrintRequestMetadata(ACaptureRequest* req);
#endif // __CAMERA_CAMERA_UTILS_H__

@ -0,0 +1,10 @@
#include <android/log.h>
#define LOG_TAG "CAMERA-SAMPLE"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#define ASSERT(cond, fmt, ...) \
if (!(cond)) { \
__android_log_assert(#cond, LOG_TAG, fmt, ##__VA_ARGS__); \
}

@ -0,0 +1,217 @@
package com.xinyingpower.microphoto;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.os.Build;
import android.os.IBinder;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.DisplayMetrics;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class FloatingWindow extends Service {
private Context mContext;
private WindowManager mWindowManager;
private View mView;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
mContext = this;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
allAboutLayout(intent);
moveView();
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onDestroy() {
if (mView != null) {
mWindowManager.removeView(mView);
}
super.onDestroy();
}
WindowManager.LayoutParams mWindowsParams;
private void moveView() {
DisplayMetrics metrics = mContext.getResources().getDisplayMetrics();
int width = (int) (metrics.widthPixels * 1f);
int height = (int) (metrics.heightPixels * 1f);
mWindowsParams = new WindowManager.LayoutParams(
width,//WindowManager.LayoutParams.WRAP_CONTENT,
height,//WindowManager.LayoutParams.WRAP_CONTENT,
//WindowManager.LayoutParams.TYPE_SYSTEM_ALERT,
(Build.VERSION.SDK_INT <= 25) ? WindowManager.LayoutParams.TYPE_PHONE : WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY
,
//WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL,
WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL
| WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN // Not displaying keyboard on bg activity's EditText
| WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
| WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD
| WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
| WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON,
//WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, //Not work with EditText on keyboard
PixelFormat.TRANSLUCENT);
mWindowsParams.gravity = Gravity.TOP | Gravity.LEFT;
//params.x = 0;
mWindowsParams.y = 100;
mWindowManager.addView(mView, mWindowsParams);
mView.setOnTouchListener(new View.OnTouchListener() {
private int initialX;
private int initialY;
private float initialTouchX;
private float initialTouchY;
long startTime = System.currentTimeMillis();
@Override
public boolean onTouch(View v, MotionEvent event) {
if (System.currentTimeMillis() - startTime <= 300) {
return false;
}
if (isViewInBounds(mView, (int) (event.getRawX()), (int) (event.getRawY()))) {
editTextReceiveFocus();
} else {
editTextDontReceiveFocus();
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
initialX = mWindowsParams.x;
initialY = mWindowsParams.y;
initialTouchX = event.getRawX();
initialTouchY = event.getRawY();
break;
case MotionEvent.ACTION_UP:
break;
case MotionEvent.ACTION_MOVE:
mWindowsParams.x = initialX + (int) (event.getRawX() - initialTouchX);
mWindowsParams.y = initialY + (int) (event.getRawY() - initialTouchY);
mWindowManager.updateViewLayout(mView, mWindowsParams);
break;
}
return false;
}
});
}
private boolean isViewInBounds(View view, int x, int y) {
Rect outRect = new Rect();
int[] location = new int[2];
view.getDrawingRect(outRect);
view.getLocationOnScreen(location);
outRect.offset(location[0], location[1]);
return outRect.contains(x, y);
}
private void editTextReceiveFocus() {
if (!wasInFocus) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = true;
}
}
private void editTextDontReceiveFocus() {
if (wasInFocus) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = false;
hideKeyboard(mContext, edt1);
}
}
private boolean wasInFocus = true;
private EditText edt1;
private void allAboutLayout(Intent intent) {
LayoutInflater layoutInflater = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
mView = layoutInflater.inflate(R.layout.ovelay_window, null);
edt1 = (EditText) mView.findViewById(R.id.edt1);
final TextView tvValue = (TextView) mView.findViewById(R.id.tvValue);
Button btnClose = (Button) mView.findViewById(R.id.btnClose);
edt1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowsParams.softInputMode = WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = true;
showSoftKeyboard(v);
}
});
edt1.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {
tvValue.setText(edt1.getText());
}
@Override
public void afterTextChanged(Editable editable) {
}
});
btnClose.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
stopSelf();
}
});
}
private void hideKeyboard(Context context, View view) {
if (view != null) {
InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
public void showSoftKeyboard(View view) {
if (view.requestFocus()) {
InputMethodManager imm = (InputMethodManager)
getSystemService(Context.INPUT_METHOD_SERVICE);
imm.showSoftInput(view, InputMethodManager.SHOW_IMPLICIT);
}
}
}

@ -1,20 +1,30 @@
package com.xinyingpower.microphoto;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.Build;
import android.os.Environment;
import android.os.SystemClock;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import com.dowse.camera.client.DSCameraManager;
import com.xinyingpower.microphoto.databinding.ActivityMainBinding;
import java.io.File;
import java.io.FileOutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
public class MainActivity extends AppCompatActivity {
private static int MY_PERMISSIONS_REQUEST_FOREGROUND_SERVICE = 100;
// Used to load the 'microphoto' library on application startup.
static {
System.loadLibrary("microphoto");
@ -29,43 +39,177 @@ public class MainActivity extends AppCompatActivity {
binding = ActivityMainBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
Intent intent = new Intent(getApplicationContext(), MicroPhotoService.class);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
startForegroundService(intent);
} else {
startService(intent);
}
// Example of a call to a native method
TextView tv = binding.sampleText;
tv.setText(stringFromJNI());
this.binding.button.setOnClickListener(new View.OnClickListener() {
this.binding.start.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
File path = Environment.getExternalStorageDirectory();
File file = new File(path, "photo.jpg");
boolean res = false;
res = DSCameraManager.getInstace().init();
res = DSCameraManager.getInstace().takePhoto(file.getAbsolutePath(), 1);
String[] accessPermissions = new String[] {
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.FOREGROUND_SERVICE
};
boolean needRequire = false;
for(String access : accessPermissions) {
int curPermission = ActivityCompat.checkSelfPermission(MainActivity.this, access);
if(curPermission != PackageManager.PERMISSION_GRANTED) {
needRequire = true;
break;
}
}
if (needRequire) {
ActivityCompat.requestPermissions(
MainActivity.this,
accessPermissions,
MY_PERMISSIONS_REQUEST_FOREGROUND_SERVICE);
return;
}
if (!res)
{
int aa = 0;
int channel = 2;
String path = buildPhotoDir(channel);
String fileName = buildPhotoFileName(channel, 255);
// MainActivity.this.takePhoto(channel, 255, path, fileName);
Intent intent = new Intent(MainActivity.this, MicroPhotoService.class);
intent.setAction(MicroPhotoService.ACTION_START);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
MainActivity.this.startForegroundService(intent);
} else {
MainActivity.this.startService(intent);
}
res = DSCameraManager.getInstace().unInit();
binding.start.setEnabled(false);
binding.stop.setEnabled(true);
}
});
}
protected void takePhoto()
{
File path = Environment.getExternalStorageDirectory();
File file = new File(path, "photo.jpg");
boolean res = false;
res = DSCameraManager.getInstace().init();
res = DSCameraManager.getInstace().takePhoto(file.getAbsolutePath(), 2);
if (!res)
{
int aa = 0;
}
res = DSCameraManager.getInstace().unInit();
}
String buildPhotoDir(int channel) {
File path = new File(Environment.getExternalStorageDirectory(), "com.xinyingpower.com/photos/");
if (!path.exists() && !path.mkdirs()) {
return null;
}
String p = path.getAbsolutePath();
if (!p.endsWith(File.separator)) {
p += File.separator;
}
return p;
}
String buildPhotoFileName(int channel, int preset) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "img_" + Integer.toString(channel) + "_" + Integer.toHexString(preset).toUpperCase() + "_" + date + ".jpg";
return photoFile;
}
private void takePhoto(int aa) {
System.out.println("Preparing to take photo");
Camera camera = null;
int cameraCount = 0;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
cameraCount = Camera.getNumberOfCameras();
for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
SystemClock.sleep(1000);
Camera.getCameraInfo(camIdx, cameraInfo);
try {
camera = Camera.open(camIdx);
} catch (RuntimeException e) {
System.out.println("Camera not available: " + camIdx);
camera = null;
//e.printStackTrace();
}
try {
if (null == camera) {
System.out.println("Could not get camera instance");
} else {
System.out.println("Got the camera, creating the dummy surface texture");
//SurfaceTexture dummySurfaceTextureF = new SurfaceTexture(0);
try {
//camera.setPreviewTexture(dummySurfaceTextureF);
camera.setPreviewTexture(new SurfaceTexture(0));
camera.startPreview();
} catch (Exception e) {
System.out.println("Could not set the surface preview texture");
e.printStackTrace();
}
camera.takePicture(null, null, new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, Camera camera) {
File path = getApplicationContext().getFilesDir();
// String appPath = path.getAbsolutePath();
File pictureFileDir = path;
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
return;
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "PictureFront_" + "_" + date + ".jpg";
String filename = pictureFileDir.getPath() + File.separator + photoFile;
File mainPicture = new File(filename);
// addImageFile(mainPicture);
try {
FileOutputStream fos = new FileOutputStream(mainPicture);
fos.write(data);
fos.close();
System.out.println("image saved");
} catch (Exception error) {
System.out.println("Image could not be saved");
}
camera.release();
}
});
}
} catch (Exception e) {
camera.release();
}
}
}
/**
* A native method that is implemented by the 'microphoto' native library,
* which is packaged with this application.
*/
public native String stringFromJNI();
public native boolean takePhoto(int channel, int preset, String path, String fileName);
}

@ -1,25 +1,80 @@
package com.xinyingpower.microphoto;
import android.app.AlarmManager;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.BitmapFactory;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.Build;
import android.os.Environment;
import android.os.Handler;
import android.os.IBinder;
import android.os.SystemClock;
import android.provider.SyncStateContract;
import android.support.v4.app.NotificationCompat;
import android.text.TextUtils;
import android.util.Log;
import android.widget.RemoteViews;
import android.widget.Toast;
import com.dowse.base.param.pic.ChannelPicParam;
import com.dowse.camera.client.DSCameraManager;
import java.io.File;
import java.io.FileOutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
public class MicroPhotoService extends Service {
public static final String TAG = "MPService";
// Used to load the 'microphoto' library on application startup.
static {
System.loadLibrary("microphoto");
}
public MicroPhotoService() {
private static String ALARM_EVENT = "com.xinyingpower.mp.MicroPhotoService.AlarmReceiver";
public static final int NOTIFICATION_ID_FOREGROUND_SERVICE = 8466503;
public static final String ACTION_START = "ACT_START";
public static final String ACTION_STOP = "ACT_STOP";
public static final String ACTION_MAIN = "ACT_MAIN";
private static String ACTION_HEARTBEAT = "ACT_HB";
private static String ACTION_TAKE_PHOTO = "ACT_TP";
private final static String FOREGROUND_CHANNEL_ID = "foreground_channel_id";
public static class STATE_SERVICE {
public static final int CONNECTED = 10;
public static final int NOT_CONNECTED = 0;
}
private static String mDesc = "";
private AlarmManager mAlarmManager;
private NotificationManager mNotificationManager;
private Handler handler;
private int count = 0;
private static int stateService = STATE_SERVICE.NOT_CONNECTED;
public MicroPhotoService() {
}
@ -34,6 +89,9 @@ public class MicroPhotoService extends Service {
public void onCreate() {
super.onCreate();
mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
stateService = STATE_SERVICE.NOT_CONNECTED;
boolean res = false;
res = DSCameraManager.getInstace().init();
ChannelPicParam picParam = new ChannelPicParam();
@ -50,10 +108,132 @@ public class MicroPhotoService extends Service {
int port = 40032;
String cmdid = "XYDEV100230100012";
init(appPath, ip, port, cmdid);
alarmReceiver = new AlarmReceiver(this);
// 注册广播接受者
IntentFilter intentFilter = new IntentFilter(ACTION_HEARTBEAT);
intentFilter.addAction(ACTION_TAKE_PHOTO);
registerReceiver( alarmReceiver, intentFilter);
registerHeartbeatTimer(getHeartbeatDuration());
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent == null) {
stopForeground(true);
stopSelf();
return START_NOT_STICKY;
}
// if user starts the service
switch (intent.getAction()) {
case ACTION_START:
Log.d(TAG, "Received user starts foreground intent");
startForeground(NOTIFICATION_ID_FOREGROUND_SERVICE, prepareNotification());
// Start the locker receiver
final ScreenActionReceiver screenactionreceiver = new ScreenActionReceiver();
registerReceiver(screenactionreceiver, screenactionreceiver.getFilter());
connect();
break;
case ACTION_STOP:
stopForeground(true);
stopSelf();
break;
default:
stopForeground(true);
stopSelf();
}
return START_NOT_STICKY;
}
private void connect() {
// after 10 seconds its connected
new android.os.Handler().postDelayed(
new Runnable() {
public void run() {
Log.d(TAG, "Bluetooth Low Energy device is connected!!");
Toast.makeText(getApplicationContext(),"Connected!",Toast.LENGTH_SHORT).show();
stateService = STATE_SERVICE.CONNECTED;
startForeground(NOTIFICATION_ID_FOREGROUND_SERVICE, prepareNotification());
}
}, 10000);
}
private Notification prepareNotification() {
// handle build version above android oreo
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O &&
mNotificationManager.getNotificationChannel(FOREGROUND_CHANNEL_ID) == null) {
CharSequence name = getString(R.string.text_name_notification);
int importance = NotificationManager.IMPORTANCE_DEFAULT;
NotificationChannel channel = new NotificationChannel(FOREGROUND_CHANNEL_ID, name, importance);
channel.enableVibration(false);
mNotificationManager.createNotificationChannel(channel);
}
Intent notificationIntent = new Intent(this, MainActivity.class);
notificationIntent.setAction(ACTION_MAIN);
notificationIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
// if min sdk goes below honeycomb
/*if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.HONEYCOMB) {
notificationIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
} else {
notificationIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
}*/
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT);
// make a stop intent
Intent stopIntent = new Intent(this, MicroPhotoService.class);
stopIntent.setAction(ACTION_STOP);
PendingIntent pendingStopIntent = PendingIntent.getService(this, 0, stopIntent, PendingIntent.FLAG_UPDATE_CURRENT);
RemoteViews remoteViews = new RemoteViews(getPackageName(), R.layout.notification);
remoteViews.setOnClickPendingIntent(R.id.btn_stop, pendingStopIntent);
// if it is connected
switch(stateService) {
case STATE_SERVICE.NOT_CONNECTED:
remoteViews.setTextViewText(R.id.tv_state, "DISCONNECTED");
break;
case STATE_SERVICE.CONNECTED:
remoteViews.setTextViewText(R.id.tv_state, "CONNECTED");
break;
}
// notification builder
NotificationCompat.Builder notificationBuilder;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) {
notificationBuilder = new NotificationCompat.Builder(this, FOREGROUND_CHANNEL_ID);
} else {
notificationBuilder = new NotificationCompat.Builder(this);
}
notificationBuilder
.setContent(remoteViews)
.setSmallIcon(R.mipmap.ic_launcher)
.setCategory(NotificationCompat.CATEGORY_SERVICE)
.setOnlyAlertOnce(true)
.setOngoing(true)
.setAutoCancel(true)
.setContentIntent(pendingIntent);
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
notificationBuilder.setVisibility(Notification.VISIBILITY_PUBLIC);
}
return notificationBuilder.build();
}
/*
public boolean takePhoto(short channel, short preset, String path) {
boolean res = DSCameraManager.getInstace().takePhoto(path, channel, 0, 0);
if (!res) {
@ -76,18 +256,105 @@ public class MicroPhotoService extends Service {
}
}
return res;
}
*/
@Override
public void onDestroy() {
stateService = STATE_SERVICE.NOT_CONNECTED;
uninit();
DSCameraManager.getInstace().unInit();
super.onDestroy();
}
private String buildPhotoDir(int channel) {
File path = new File(Environment.getExternalStorageDirectory(), "com.xinyingpower.com/photos/");
if (!path.exists() && !path.mkdirs()) {
return null;
}
String p = path.getAbsolutePath();
if (!p.endsWith(File.separator)) {
p += File.separator;
}
return p;
}
private String buildPhotoFileName(int channel, int preset) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "img_" + Integer.toString(channel) + "_" + Integer.toHexString(preset).toUpperCase() + "_" + date + ".jpg";
return photoFile;
}
public static class AlarmReceiver extends BroadcastReceiver {
private MicroPhotoService mService;
public AlarmReceiver(MicroPhotoService service) {
mService = service;
}
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if(TextUtils.equals(ACTION_HEARTBEAT, action)){
Log.i(TAG, "receiver ACTION=" + action);
mService.registerHeartbeatTimer(mService.getHeartbeatDuration());
}
}
}
private BroadcastReceiver receiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// 获取广播事件
String action = intent.getAction();
if(TextUtils.equals(ACTION_HEARTBEAT, action)){
Log.i(TAG, "receiver ACTION");
registerHeartbeatTimer(getHeartbeatDuration());
}
}
};
private void registerHeartbeatTimer(long timeout){
// 创建延迟意图
Intent alarmIntent = new Intent();
alarmIntent.setAction(ACTION_HEARTBEAT);
PendingIntent pendingIntent = PendingIntent.getBroadcast(this, 0, alarmIntent, 0);
AlarmManager alarmManager = (AlarmManager) getSystemService(ALARM_SERVICE);
alarmManager.setExactAndAllowWhileIdle(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + timeout, pendingIntent);
// alarmManager.setExactAndAllowWhileIdle(AlarmManager.ELAPSED_REALTIME, SystemClock.elapsedRealtime() + timeout, pendingIntent);
}
private void registerPhotoTimer(long ts){
// 创建延迟意图
Intent alarmIntent = new Intent();
alarmIntent.setAction(ACTION_TAKE_PHOTO);
PendingIntent pendingIntent = PendingIntent.getBroadcast(this, 0, alarmIntent, 0);
AlarmManager alarmManager = (AlarmManager) getSystemService(ALARM_SERVICE);
alarmManager.setExactAndAllowWhileIdle(AlarmManager.RTC_WAKEUP, ts, pendingIntent);
}
protected native boolean init(String appPath, String ip, int port, String cmdid);
protected native long getHeartbeatDuration();
protected native boolean takePhoto(int channel, int preset, String path, String fileName);
protected native boolean uninit();
protected long mHandler = 0;
private AlarmReceiver alarmReceiver = null;
}

@ -0,0 +1,84 @@
package com.xinyingpower.microphoto;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Build;
import android.util.Log;
import android.widget.Toast;
public class ScreenActionReceiver extends BroadcastReceiver {
private String TAG = "ScreenActionReceiver";
@Override
public void onReceive(Context context, Intent intent) {
//LOG
StringBuilder sb = new StringBuilder();
sb.append("Action: " + intent.getAction() + "\n");
sb.append("URI: " + intent.toUri(Intent.URI_INTENT_SCHEME).toString() + "\n");
String log = sb.toString();
Log.d(TAG, log);
Toast.makeText(context, log, Toast.LENGTH_LONG).show();
String action = intent.getAction();
if(Intent.ACTION_SCREEN_ON.equals(action))
{
Log.d(TAG, "screen is on...");
Toast.makeText(context,"screen ON",Toast.LENGTH_LONG);
//Run the locker
context.startService(new Intent(context, FloatingWindow.class));
}
else if(Intent.ACTION_SCREEN_OFF.equals(action))
{
Log.d(TAG, "screen is off...");
Toast.makeText(context,"screen OFF",Toast.LENGTH_LONG);
}
else if(Intent.ACTION_USER_PRESENT.equals(action))
{
Log.d(TAG, "screen is unlock...");
Toast.makeText(context,"screen UNLOCK",Toast.LENGTH_LONG);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.startForegroundService(new Intent(context, FloatingWindow.class));
} else {
context.startService(new Intent(context, FloatingWindow.class));
}
}
else if(Intent.ACTION_BOOT_COMPLETED.equals(action)){
Log.d(TAG, "boot completed...");
Toast.makeText(context,"BOOTED..",Toast.LENGTH_LONG);
//Run the locker
/* Intent i = new Intent(context, FloatingWindow.class);
context.startService(i);
*/
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.startForegroundService(new Intent(context, FloatingWindow.class));
} else {
context.startService(new Intent(context, FloatingWindow.class));
}
}
}
public IntentFilter getFilter(){
final IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_SCREEN_OFF);
filter.addAction(Intent.ACTION_SCREEN_ON);
return filter;
}
}

@ -17,11 +17,70 @@
app:layout_constraintTop_toTopOf="parent" />
<Button
android:id="@+id/button"
android:id="@+id/start"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Button"
tools:layout_editor_absoluteX="112dp"
tools:layout_editor_absoluteY="216dp" />
android:layout_marginStart="66dp"
android:layout_marginTop="80dp"
android:text="Start"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/port" />
<Button
android:id="@+id/stop"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="68dp"
android:layout_marginTop="80dp"
android:enabled="false"
android:text="Stop"
app:layout_constraintStart_toEndOf="@+id/start"
app:layout_constraintTop_toBottomOf="@+id/port" />
<EditText
android:id="@+id/server"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="35dp"
android:ems="10"
android:inputType=""
app:layout_constraintTop_toTopOf="parent"
tools:layout_editor_absoluteX="114dp" />
<EditText
android:id="@+id/port"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="11dp"
android:ems="10"
android:inputType=""
android:text="Name"
app:layout_constraintTop_toBottomOf="@+id/server"
tools:layout_editor_absoluteX="118dp" />
<TextView
android:id="@+id/textView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Server"
tools:layout_editor_absoluteX="16dp"
tools:layout_editor_absoluteY="44dp" />
<TextView
android:id="@+id/textView2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Port"
tools:layout_editor_absoluteX="19dp"
tools:layout_editor_absoluteY="96dp" />
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="411dp"
android:layout_height="441dp"
android:layout_marginStart="2dp"
android:visibility="gone"
app:layout_constraintStart_toStartOf="parent"
tools:layout_editor_absoluteY="288dp" />
</android.support.constraint.ConstraintLayout>

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:orientation="horizontal"
android:weightSum="4">
<LinearLayout
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="3"
android:gravity="center">
<TextView
android:id="@+id/tv_state"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="STATES"
android:textSize="16sp" />
</LinearLayout>
<LinearLayout
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:gravity="center">
<Button
android:id="@+id/btn_stop"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="STOP"
android:textSize="13sp" />
</LinearLayout>
</LinearLayout>

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/activity_main"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
android:background="#548F32"
tools:context=".MainActivity">
<EditText
android:id="@+id/edt1"
android:hint="Type here"
android:layout_toLeftOf="@+id/btnClose"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Close"
android:layout_alignParentRight="true"
android:id="@+id/btnClose"
/>
<TextView
android:id="@+id/tvValue"
android:layout_below="@+id/edt1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="" />
</RelativeLayout>

@ -0,0 +1,5 @@
<resources>
<!-- Default screen margins, per the Android Design guidelines. -->
<dimen name="activity_horizontal_margin">16dp</dimen>
<dimen name="activity_vertical_margin">16dp</dimen>
</resources>

@ -1,3 +1,4 @@
<resources>
<string name="app_name">MicroPhoto</string>
<string name="text_name_notification">Notification Name</string>
</resources>
Loading…
Cancel
Save