修改数据格式

hdrplus
jxjajs 7 months ago
commit 1e47fe04e6

@ -5,7 +5,7 @@ plugins {
// 10,00,000 major-minor-build
def AppMajorVersion = 1
def AppMinorVersion = 1
def AppBuildNumber = 1
def AppBuildNumber = 5
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber
@ -36,10 +36,10 @@ android {
externalNativeBuild {
cmake {
// cppFlags '-std=c++17 -frtti -fexceptions -Wno-error=format-security'
cppFlags '-std=c++17 -fexceptions -Wno-error=format-security'
cppFlags '-std=c++17 -fexceptions -Wno-error=format-security -fopenmp'
// cppFlags '-std=c++17 -Wno-error=format-security'
// arguments "-DANDROID_STL=c++_shared"
arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DHDRPLUS_ROOT=" + hdrplusroot, "-DNCNN_ROOT=" + ncnnroot
arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DHDRPLUS_ROOT=" + hdrplusroot, "-DNCNN_ROOT=" + ncnnroot, "-DHALIDE_ROOT=" + halideroot
abiFilters 'arm64-v8a', 'armeabi-v7a'
// setAbiFilters(['arm64-v8a'])
}

@ -79,6 +79,13 @@
android:enabled="true"
android:exported="false"
android:grantUriPermissions="true" />
<intent>
<action android:name="android.media.action.IMAGE_CAPTURE" />
</intent>
<intent>
<action android:name="android.media.action.STILL_IMAGE_CAMERA" />
</intent>
</queries>
<application

@ -41,8 +41,20 @@ add_definitions(-DALIGN_HB_TIMER_TO_PHOTO)
add_definitions(-DENABLE_3V3_ALWAYS)
add_definitions(-DUSING_HDRPLUS)
add_definitions(-DUSING_EXEC_HDRP=1)
set(USING_EXEC_HDRP 1)
if(ANDROID_ABI STREQUAL "armeabi-v7a")
add_definitions(-DUSING_N938)
elseif(ANDROID_ABI STREQUAL "arm64-v8a")
# add_definitions(-DUSING_N938)
endif()
<<<<<<< HEAD
add_definitions(-DUSING_N938)
=======
>>>>>>> 1b0d0f421fe8db524af9afc327dce98899a13e6d
# include_directories(${OpenCV_DIR}/include)
# add_library( lib_opencv SHARED IMPORTED )
@ -120,10 +132,17 @@ find_package(OpenMP REQUIRED)
include_directories( ${CMAKE_CURRENT_SOURCE_DIR}/hdrplus/include )
# include_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/hdrplus2)
include_directories(hdrplus2/${ANDROID_ABI})
include_directories(${HALIDE_ROOT}/${ANDROID_ABI}/include)
SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
SET(HDRPLUS_SOURCES
SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
SET(HDRPLUS_SOURCES
hdrplus/src/align.cpp
hdrplus/src/bayer_image.cpp
hdrplus/src/burst.cpp
@ -131,9 +150,15 @@ SET(HDRPLUS_SOURCES
hdrplus/src/hdrplus_pipeline.cpp
hdrplus/src/merge.cpp
hdrplus/src/params.cpp
)
SET(HDRPLUS2_SOURCES
hdrplus2/src/HDRPlus.cpp
hdrplus2/src/Burst.cpp
hdrplus2/src/InputSource.cpp
hdrplus2/src/LibRaw2DngConverter.cpp
hdrplus2/${ANDROID_ABI}/hdrplus_pipeline.registration.cpp)
SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
# SET(TERM_CORE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../../../xymp/Core)
@ -323,6 +348,25 @@ add_library(
${FREETYPE_SRC_FILES}
)
if(USING_EXEC_HDRP)
message(WARNING "HDRP Compiled")
add_executable( libhdrp.so
${HDRPLUS_SOURCES}
hdrplus/bin/hdrplus.cpp )
target_link_libraries( libhdrp.so PUBLIC -fopenmp -static-openmp
android z
${OpenCV_LIBS}
# ${LIBRAW_LIBRARY}
${HDRPLUS_LIBS}
)
else(USING_EXEC_HDRP)
endif()
SET(HDRPLUS_SOURCES_EMBED ${HDRPLUS2_SOURCES} )
SET(HDRPLUS_LIBS_EMBED ${HDRPLUS2_LIBS} )
add_library( # Sets the name of the library.
microphoto
@ -350,7 +394,7 @@ add_library( # Sets the name of the library.
# camera2/OpenCVFont.cpp
${HDRPLUS_SOURCES}
${HDRPLUS_SOURCES_EMBED}
${CAMERA2_SOURCES}
${IMG_UTILS_SRCS}
@ -426,7 +470,7 @@ target_link_libraries( # Specifies the target library.
android camera2ndk mediandk z
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS}
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED}
)

@ -31,20 +31,78 @@ typedef struct
char str[MAX_STRING_LEN];
}IOT_PARAM;
std::mutex GpioControl::m_locker;
std::vector<std::pair<int, uint32_t>> GpioControl::m_references;
void GpioControl::setInt(int cmd, int value)
{
int fd = open(GPIO_NODE_MP, O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value = value;
// LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value);
if( fd > 0 )
int fd = -1;
IOT_PARAM param = { cmd, value, 0 };
// param.cmd = cmd;
// param.value = value;
int res = 0;
uint32_t references = (value != 0) ? 1 : 0;
std::vector<std::pair<int, uint32_t> >::iterator it;
if (value)
{
int res = ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
close(fd);
m_locker.lock();
fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
res = ioctl(fd, IOT_PARAM_WRITE, &param);
#ifdef _DEBUG
ALOGI("setInt cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
#endif
close(fd);
// check res???
for (it = m_references.begin(); it != m_references.end(); ++it)
{
if (it->first == cmd)
{
it->second++;
references = it->second;
break;
}
}
if (it == m_references.end())
{
m_references.push_back(std::pair<int, uint32_t >(cmd, references));
}
}
m_locker.unlock();
}
else
{
m_locker.lock();
for (it = m_references.begin(); it != m_references.end(); ++it)
{
if (it->first == cmd)
{
if (it->second > 0)
{
it->second--;
}
references = it->second;
break;
}
}
if (references == 0)
{
fd = open(GPIO_NODE_MP, O_RDONLY);
if (fd > 0) {
res = ioctl(fd, IOT_PARAM_WRITE, &param);
#ifdef _DEBUG
ALOGI("setInt cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
#endif
close(fd);
}
}
m_locker.unlock();
}
return;
}
int GpioControl::getInt(int cmd)
@ -132,6 +190,7 @@ std::string GpioControl::getString(int cmd)
return "";
}
<<<<<<< HEAD
#ifdef USING_N938
#if 0
@ -193,3 +252,5 @@ bool GpioControl::OpenSensors()
}
#endif
=======
>>>>>>> 1b0d0f421fe8db524af9afc327dce98899a13e6d

@ -8,6 +8,15 @@
#include <string>
#include <chrono>
#include <thread>
#include <mutex>
#include <vector>
#include <utility>
#ifndef USING_N938
#ifndef USING_PLZ // MicroPhoto
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
@ -33,95 +42,89 @@
#define CMD_SET_SPI_MAXSPEEDHZ 125
#define CMD_SET_PWM_BEE_STATE 126
#define CMD_SET_ALM_MODE 128
#define CMD_SET_SPI_POWER 129
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_CAM_3V3_EN_STATE 132
#define CMD_SET_12V_EN_STATE 133
#define CMD_SET_SYSTEM_RESET 202
#ifdef USING_N938
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_CAM_3V3_EN_STATE 132
#define CMD_SET_12V_EN_STATE 133
#define CMD_SET_485_STATE 121
#define CMD_SET_SPI_MODE 123
#define CMD_SET_SPI_BITS_PER_WORD 124
#define CMD_SET_SPI_MAXSPEEDHZ 125
#define CMD_SET_SPI_POWER 129
#define CMD_SET_WTH_POWER 490
#define CMD_SET_PULL_POWER 491
#define CMD_SET_ANGLE_POWER 492
#define CMD_SET_OTHER_POWER 493
#define CMD_SET_PIC1_POWER 494
#define CMD_SET_GPIO157_POWER 510
#define CMD_SET_GPIO5_POWER 511
#define CMD_SET_PWM_BEE_STATE 126
#define CMD_SET_ALM_MODE 128
#define CMD_SET_485_en0 301
#define CMD_SET_485_en1 302
#define CMD_SET_485_en2 303
#define CMD_SET_485_en3 304
#define CMD_SET_485_en4 305
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
#if 0
#define CMD_485_0_DE 156 // 485_0 DE信号
#define CMD_485_0_PWR_EN 157 // 485_0 电源使能
#define CMD_485_0_1_DE_EN 171 // 485_0&1DE电平转换芯片使能信号
#define CMD_485_1_DE 172 //
#define CMD_SET_CAM_3V3_EN_STATE 72 // 整板3V3上电使能
#define CMD_3V3_SWITCH_EN 45 // 整板485_3V3信号电平转换电源使能
#define CMD_UART0_EN 73 // 预留UART0电平转换芯片使能
#define CMD_485_1_PWR_EN 5 // 485_1 电源使能
#define CMD_485_3_DE 6 // 485_3 DE信号
#define CMD_485_2_DE 7 // 485_2 DE信号
#define CMD_485_4_DE 13 // 485_4 DE信号
#define CMD_NETWORK_PWR_EN 94 // 100M网络电源使能
#define CMD_485_2_PWR_EN 92 // 485_2 电源使能
#define CMD_485_3_PWR_EN 91 // 485_3 电源使能
#define CMD_485_4_PWR_EN 90 // 485_4 电源使能
#define CMD_SEC_EN 27 // 加密芯片上电使能
#define CMD_485_2_3_DE_EN 26 // 485_2&3 DE电平转换芯片使能信号
#define CMD_5V_PWR_EN 14 // 整板5V0上电使能
#define CMD_SD_CARD_DECT 15 // SD CARD DECT
#define CMD_PIC1_EN 16
#define CMD_OTHER_EN 21
#define CMD_ANGLE_EN 22
#define CMD_PULL_EN 23
#define CMD_WEATHER_EN 24
#define CMD_LED_CTRL 46
#define CMD_BD_EN 47
#define CMD_ADC_EN 44
#define CMD_SPI_PWR_EN 43 // SPI转串口电源使能
#if 1
#define CMD_SET_SPI_POWER 129
#define CMD_SET_CAM_3V3_EN_STATE 132
#endif
#endif // USING_N938
#else // defined(USING_PLZ)
#define CMD_SET_485_ENABLE 512
#define CMD_SET_3V3_PWR_ENABLE 516
#define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_SENSOR_ENABLE 504
#define CMD_SET_SENSOR_PWR_ENABLE 505
#define CMD_SET_SENSOR2_ENABLE 506
#define CMD_SET_SENSOR4_ENABLE 510
#define CMD_SET_SENSOR1_PWR_ENABLE 513
#define CMD_SET_SENSOR2_PWR_ENABLE 514
#define CMD_SET_SENSOR3_PWR_ENABLE 509
#define CMD_SET_SENSOR4_PWR_ENABLE 525
#define CMD_SET_PHOTO_IN 520
#define CMD_SET_PHOTO_OUT 515
#define CMD_SET_ADC_ENABLE 500
#define CMD_SET_MIPI_SWITCH 501
#define CMD_SET_CAM_RSTN1 502
#define CMD_SET_CAM_RSTN0 503
#define CMD_SET_SD_DECT 507
#define CMD_SET_PTZ_PWR_ENABLE 508
#define CMD_SET_RTC_ENABLE 511
#define CMD_SET_100M_ENABLE 518
#define CMD_SET_100M_SWITCH_PWR_ENABLE 519
#define CMD_SET_AM_POWER_ENABLE 521
#define CMD_SET_NRSEC_POWER_ENABLE 522
#define CMD_SET_AMP_ENABLE 523
#define CMD_SET_LIGHT1_RESISTOR_ENABLE 524
#define CMD_SET_100M_RESET 526
#endif // USING_PLZ
#else // defined(USING_N938)
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_485_EN1 302
#define CMD_SET_CAM_3V3_EN_STATE 360
#define CMD_SET_UART0_EN 361
#define CMD_SET_485_EN0 301
#define CMD_SET_NETWORK_POWER_EN 362
#define CMD_SET_485_EN3 304
#define CMD_SET_485_EN2 303
#define CMD_SET_SPI_POWER 129
#define CMD_SET_5V_EN 363
#define CMD_SDCARD_DETECT_EN 364
#define CMD_SET_PIC1_POWER 494
#define CMD_SET_OTHER_POWER 493
#define CMD_SET_ANGLE_POWER 492
#define CMD_SET_PULL_POWER 491
#define CMD_SET_WTH_POWER 490
#define CMD_SET_485_EN4 305
#define CMD_LED_CTRL 365
#define CMD_BD_EN 366
#define CMD_ADC_EN 367
#define CMD_SPI2SERIAL_POWER_EN 368
#define CMD_RS485_3V3_EN 369
#endif // USING_N938
<<<<<<< HEAD
#ifndef USING_N938
#define GPIO_NODE_N938 "/sys/devices/platform/1000b000.pinctrl/mt_gpio"
#else
=======
>>>>>>> 1b0d0f421fe8db524af9afc327dce98899a13e6d
#define GPIO_NODE_MP "/dev/mtkgpioctrl"
#endif // USING_N938
class GpioControl
{
private:
static std::mutex m_locker;
static std::vector<std::pair<int, uint32_t>> m_references;
public:
static void setInt(int cmd, int value);
@ -133,12 +136,18 @@ public:
static void setOtgState(bool on)
{
#ifndef USING_N938
setInt(CMD_SET_OTG_STATE, on ? 1 : 0);
#endif
}
static bool getOtgState()
{
#ifndef USING_N938
return getInt(CMD_SET_OTG_STATE) != 0;
#else
return false;
#endif
}
static void setCam3V3Enable(bool enabled)
@ -157,82 +166,142 @@ public:
static void setLightAdc(int i)
{
#ifndef USING_N938
setInt(CMD_SET_LIGHT_ADC, i);
#endif
}
static int getLightAdc()
{
#ifndef USING_N938
return getInt(CMD_GET_LIGHT_ADC);
#else
return -1;
#endif
}
static int getChargingVoltage()
{
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_VOL_STATE);
#else
return -1;
#endif
}
static int getChargingShuntVoltage()
{
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_SHUNT_VOLTAGE_STATE);
#else
return -1;
#endif
}
static int getChargingBusVoltage() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE);
#else
return -1;
#endif
}
static int getChargingPower() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_POWER_STATE);
#else
return -1;
#endif
}
static int getChargingCurrent() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_CURRENT_STATE);
#else
return -1;
#endif
}
static int getBatteryVoltage() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_VOL_STATE);
#else
return -1;
#endif
}
static int getBatteryShuntVoltage() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_SHUNT_VOLTAGE_STATE);
#else
return -1;
#endif
}
static int getBatteryBusVoltage() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_BUS_VOLTAGE_STATE);
#else
return -1;
#endif
}
static int getBatteryPower() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_POWER_STATE);
#else
return -1;
#endif
}
static int getBatteryCurrent() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_CURRENT_STATE);
#else
return -1;
#endif
}
static void set485WriteMode() {
#if 0
setInt(CMD_SET_485_STATE, 1);
#endif
}
static void set485ReadMode() {
#if 0
setInt(CMD_SET_485_STATE, 0);
#endif
}
static void setSpiMode(int i) {
#ifndef USING_N938
setInt(CMD_SET_SPI_MODE, i);
#endif
}
static void setSpiBitsPerWord(int i) {
#ifndef USING_N938
setInt(CMD_SET_SPI_BITS_PER_WORD, i);
#endif
}
static void setSpiMaxSpeedHz(long j) {
#ifndef USING_N938
setLong(CMD_SET_SPI_MAXSPEEDHZ, j);
#endif
}
static void setBeeOn(bool z) {
#ifndef USING_N938
setInt(CMD_SET_PWM_BEE_STATE, z ? 1 : 0);
#endif
}
static void setJidianqiState(bool z) {
#ifndef USING_N938
setInt(CMD_SET_ALM_MODE, z ? 1 : 0);
#endif
}
static void setSpiPower(bool on) {
@ -244,19 +313,17 @@ public:
}
static void setRS485Enable(bool z) {
#ifndef USING_N938
setInt(CMD_SET_485_EN_STATE, z ? 1 : 0);
#endif
}
static void set12VEnable(bool z) {
#ifndef USING_N938
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
}
#ifdef USING_N938
static bool SetN938Cmd(int cmd, int val);
static bool OpenSensors();
static bool CloseSensors();
#endif
}
};

@ -195,7 +195,7 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
google_breakpad::ExceptionHandler eh(descriptor, NULL, DumpCallback, NULL, true, -1);
#endif
#if 0
{
struct sigaction sig_action = {};
sig_action.sa_sigaction = posix_signal_handler;
@ -223,6 +223,7 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
// }
env->DeleteLocalRef(clazz);
#endif
return result;
}
@ -280,7 +281,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
jobject pThis, jstring appPath,
jstring ip, jint port, jstring cmdid, jint protocol,
jint networkProtocol, jint encryptData, jlong netHandle, jint signalLevel,
jint versionCode, jlong buildTime, jstring simcard, jstring tfCardPath) {
jint versionCode, jlong buildTime, jstring simcard, jstring tfCardPath, jstring nativeLibraryDir) {
/*
google_breakpad::MinidumpDescriptor descriptor(".");
@ -310,6 +311,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
const char *cmdidStr = cmdid == NULL ? NULL : env->GetStringUTFChars(cmdid, 0);
const char *simcardStr = simcard == NULL ? NULL : env->GetStringUTFChars(simcard, 0);
const char *tfCardPathStr = tfCardPath == NULL ? NULL : env->GetStringUTFChars(tfCardPath, 0);
const char *nativeLibraryDirStr = nativeLibraryDir == NULL ? NULL : env->GetStringUTFChars(nativeLibraryDir, 0);
JavaVM* vm = NULL;
jint ret = env->GetJavaVM(&vm);
@ -320,7 +322,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
CTerminal* pTerminal = NewTerminal(protocol);
CPhoneDevice* device = new CPhoneDevice(vm, pThis, MakeString(appPathStr), NETID_UNSET, versionCode);
CPhoneDevice* device = new CPhoneDevice(vm, pThis, MakeString(appPathStr), NETID_UNSET, versionCode, MakeString(nativeLibraryDirStr));
device->SetListener(pTerminal);
device->UpdateSignalLevel(signalLevel);
device->SetBuildTime(buildTime / 1000);
@ -331,14 +333,20 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
// pTerminal->SetPacketSize(1 * 1024); // 1K
#if defined(USING_NRSEC) && !defined(USING_NRSEC_VPN)
pTerminal->InitEncryptionInfo(simcardStr, "/dev/spidev0.0", "");
#endif
#ifdef _DEBUG
ALOGD("Call Startup");
#endif
bool res = pTerminal->Startup(device);
#ifdef _DEBUG
ALOGD("Finish Startup");
#endif
if (appPathStr != NULL) env->ReleaseStringUTFChars(appPath, appPathStr);
if (ipStr != NULL) env->ReleaseStringUTFChars(ip, ipStr);
if (cmdidStr != NULL) env->ReleaseStringUTFChars(cmdid, cmdidStr);
if (simcardStr != NULL) env->ReleaseStringUTFChars(simcard, simcardStr);
if (tfCardPathStr != NULL) env->ReleaseStringUTFChars(tfCardPath, tfCardPathStr);
if (nativeLibraryDirStr != NULL) env->ReleaseStringUTFChars(nativeLibraryDir, nativeLibraryDirStr);
if (!res)
{
@ -392,7 +400,7 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
CTerminal::LoadChannelConfig(channel, configFilePathStr, cfg);
CTerminal::ConvertChannelConfigToPhotoInfo(cfg, photoOrVideo != JNI_FALSE, photoInfo);
CPhoneDevice* device = new CPhoneDevice(vm, NULL, "", NETID_UNSET, 0);
CPhoneDevice* device = new CPhoneDevice(vm, NULL, "", NETID_UNSET, 0, std::string(""));
// device->SetListener(pTerminal);
if (photoInfo.usbCamera)
@ -885,9 +893,11 @@ Java_com_xypower_mpapp_MicroPhotoService_burstCaptureFinished(
return;
}
#if 0
const char* pathsStr = env->GetStringUTFChars(pathsJoinedByTab, 0);
((CPhoneDevice *)dev)->ProcessRawCapture(result != JNI_FALSE, numberOfCaptures, MakeString(pathsStr), frontCamera != JNI_FALSE, rotation, photoId);
env->ReleaseStringUTFChars(pathsJoinedByTab, pathsStr);
#endif
}
}
@ -940,7 +950,7 @@ Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs(
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto(
JNIEnv* env, jclass cls, jlong handler, jstring path) {
JNIEnv* env, jclass cls, jlong handler, jstring path, jlong photoInfo) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
@ -953,10 +963,25 @@ Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto(
return JNI_FALSE;
}
const char *pathStr = env->GetStringUTFChars(path, 0);
IDevice::PHOTO_INFO* pPhotoInfo = photoInfo == 0 ? NULL : reinterpret_cast<IDevice::PHOTO_INFO *>(photoInfo);
const char *pathStr = NULL;
if (path != NULL)
{
pathStr = env->GetStringUTFChars(path, 0);
}
bool res = pTerminal->SendExternalPhoto(pathStr);
env->ReleaseStringUTFChars(path, pathStr);
if (pathStr != NULL)
{
env->ReleaseStringUTFChars(path, pathStr);
}
if (pPhotoInfo != NULL)
{
delete pPhotoInfo;
}
return res ? JNI_TRUE : JNI_FALSE;
}
@ -1328,8 +1353,7 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile(
#ifdef USING_NRSEC
if (env->GetStringUTFLength(outputPath) <= 0)
{
if (env->GetStringUTFLength(outputPath) <= 0) {
return JNI_FALSE;
}
@ -1340,8 +1364,7 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile(
GpioControl::setSpiPower(true);
NrsecPort nrsec;
if (!nrsec.Open(path))
{
if (!nrsec.Open(path)) {
return JNI_FALSE;
}
@ -1354,9 +1377,8 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile(
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOffCameraPower(NULL);
if (res)
{
const char* outputPathStr = env->GetStringUTFChars(outputPath, 0);
if (res) {
const char *outputPathStr = env->GetStringUTFChars(outputPath, 0);
res = writeFile(outputPathStr, &data[0], len);
env->ReleaseStringUTFChars(outputPath, outputPathStr);
}

File diff suppressed because it is too large Load Diff

@ -161,7 +161,9 @@ public:
virtual bool on_image(cv::Mat& rgb);
virtual void on_error(const std::string& msg);
virtual void onDisconnected(ACameraDevice* device);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
protected:
CPhoneDevice* m_dev;
@ -174,7 +176,9 @@ public:
virtual void onImageAvailable(AImageReader* reader);
virtual int32_t getOutputFormat() const;
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
protected:
std::string m_path;
@ -190,7 +194,7 @@ public:
unsigned long uid;
};
CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode);
CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode, const std::string& nativeLibDir);
virtual ~CPhoneDevice();
virtual void SetListener(IListener* listener);
@ -201,7 +205,7 @@ public:
virtual bool UpdateSchedules();
virtual bool QuerySystemProperties(map<string, string>& properties);
virtual bool InstallAPP(const std::string& path, unsigned int delayedTime);
virtual bool Reboot(int resetType);
virtual bool Reboot(int resetType, const std::string& reason);
virtual bool EnableGPS(bool enabled);
virtual float QueryBattaryVoltage(int timesForAvg, bool* isCharging);
virtual bool RequestPosition();
@ -215,15 +219,14 @@ public:
virtual int GetWData(WEATHER_INFO *weatherInfo);
virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, SENSOR_PARAM *sensorParam);
virtual bool OpenSensors();
virtual bool CloseSensors();
virtual bool OpenSensors(int sensortype);
virtual bool CloseSensors(int sensortype);
bool GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector<uint32_t>& items);
void UpdatePosition(double lon, double lat, double radius, time_t ts);
bool OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId);
bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, unsigned int photoId);
bool ProcessRawCapture(bool result, int numberOfCaptures, const std::string& pathsJoinedByTab, bool frontCamera, int rotation, long photoId);
void UpdateSignalLevel(int signalLevel);
void UpdateTfCardPath(const std::string& tfCardPath)
@ -275,7 +278,9 @@ protected:
std::string QueryCpuTemperature();
bool OnImageReady(cv::Mat& mat);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
void onError(const std::string& msg);
void onDisconnected(ACameraDevice* device);
@ -287,10 +292,12 @@ protected:
void handleTimerImpl(TIMER_CONTEXT* context);
void static handleRebootTimer(union sigval v);
// void handleRebootTimerImpl();
void RestartApp(int rebootType, long timeout);
void RestartApp(int rebootType, long timeout, const std::string& reason);
int QueryBatteryVoltage(int retries);
int CallExecv(int rotation, int frontCamera, const std::string& outputPath, const std::vector<std::string>& images);
protected:
std::mutex m_devLocker;
@ -299,6 +306,7 @@ protected:
jobject m_javaService;
std::string m_appPath;
std::string m_tfCardPath;
std::string m_nativeLibraryDir;
jmethodID mRegisterHeartbeatMid;
jmethodID mUpdateCaptureScheduleMid;
@ -314,6 +322,9 @@ protected:
jmethodID mInstallAppMid;
jmethodID mEnableGpsMid;
jmethodID mRequestPositionMid;
jmethodID mExecHdrplusMid;
jmethodID mCallSysCameraMid;
std::string mPath;
IDevice::PHOTO_INFO mPhotoInfo;
@ -327,6 +338,7 @@ protected:
atomic_ulong m_timerUidFeed;
atomic_ulong m_wakelockIdFeed;
atomic_ulong m_uniqueIdFeed;
std::map<IDevice::timer_uid_t, TIMER_CONTEXT*> mTimers;
mutable CPhoneCamera* mCamera;

@ -70,18 +70,26 @@ int getInt(int cmd)
}
static void setRS485Enable(bool z) {
#ifndef USING_N938
setInt(CMD_SET_485_EN_STATE, z ? 1 : 0);
#endif
}
static void set485WriteMode() {
#ifndef USING_N938
setInt(CMD_SET_485_STATE, 1);
#endif
}
static void set485ReadMode() {
#ifndef USING_N938
setInt(CMD_SET_485_STATE, 0);
#endif
}
static void set12VEnable(bool z) {
#ifndef USING_N938
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
#endif
}
static void setCam3V3Enable(bool enabled)
@ -595,25 +603,25 @@ void Gm_CloseSensorsPower()
#endif
#if 1
setInt(CMD_SET_SPI_POWER, 1);
setInt(CMD_SET_485_en0, 1);
setInt(CMD_SET_485_en1, 1);
setInt(CMD_SET_485_en2, 1);
setInt(CMD_SET_485_en3, 1);
setInt(CMD_SET_485_en4, 1);
setInt(CMD_SET_485_EN0, 1);
setInt(CMD_SET_485_EN1, 1);
setInt(CMD_SET_485_EN2, 1);
setInt(CMD_SET_485_EN3, 1);
setInt(CMD_SET_485_EN4, 1);
#else
setInt(CMD_SET_SPI_POWER, 0);
setInt(CMD_SET_485_en0, 0);
setInt(CMD_SET_485_en1, 0);
setInt(CMD_SET_485_en2, 0);
setInt(CMD_SET_485_en3, 0);
setInt(CMD_SET_485_en4, 0);
setInt(CMD_SET_485_EN0, 0);
setInt(CMD_SET_485_EN1, 0);
setInt(CMD_SET_485_EN2, 0);
setInt(CMD_SET_485_EN3, 0);
setInt(CMD_SET_485_EN4, 0);
sleep(3);
igpio = getInt(CMD_SET_SPI_POWER);
igpio = getInt(CMD_SET_485_en0);
igpio = getInt(CMD_SET_485_en1);
igpio = getInt(CMD_SET_485_en2);
igpio = getInt(CMD_SET_485_en3);
igpio = getInt(CMD_SET_485_en4);
igpio = getInt(CMD_SET_485_EN0);
igpio = getInt(CMD_SET_485_EN1);
igpio = getInt(CMD_SET_485_EN2);
igpio = getInt(CMD_SET_485_EN3);
igpio = getInt(CMD_SET_485_EN4);
#endif
*/
}
@ -655,26 +663,26 @@ void Gm_OpenSensorsPower()
#endif
#if 1
setInt(CMD_SET_SPI_POWER, 1);
setInt(CMD_SET_485_en0, 1);
setInt(CMD_SET_485_en1, 1);
setInt(CMD_SET_485_en2, 1);
setInt(CMD_SET_485_en3, 1);
setInt(CMD_SET_485_en4, 1);
setInt(CMD_SET_485_EN0, 1);
setInt(CMD_SET_485_EN1, 1);
setInt(CMD_SET_485_EN2, 1);
setInt(CMD_SET_485_EN3, 1);
setInt(CMD_SET_485_EN4, 1);
//sleep(3);
igpio = getInt(CMD_SET_SPI_POWER);
igpio = getInt(CMD_SET_485_en0);
igpio = getInt(CMD_SET_485_en1);
igpio = getInt(CMD_SET_485_en2);
igpio = getInt(CMD_SET_485_en3);
igpio = getInt(CMD_SET_485_en4);
igpio = getInt(CMD_SET_485_EN0);
igpio = getInt(CMD_SET_485_EN1);
igpio = getInt(CMD_SET_485_EN2);
igpio = getInt(CMD_SET_485_EN3);
igpio = getInt(CMD_SET_485_EN4);
#else
setInt(CMD_SET_485_en0, 0);
setInt(CMD_SET_485_en1, 0);
setInt(CMD_SET_485_en2, 0);
setInt(CMD_SET_485_en3, 0);
setInt(CMD_SET_485_en4, 0);
setInt(CMD_SET_485_EN0, 0);
setInt(CMD_SET_485_EN1, 0);
setInt(CMD_SET_485_EN2, 0);
setInt(CMD_SET_485_EN3, 0);
setInt(CMD_SET_485_EN4, 0);
#endif
// 打开电源
@ -1282,6 +1290,7 @@ void GM_StartSerialComm()
{
if (i == srdt.camerauseserial)
continue;
serialport[i].Retry = 0;
serialport[i].RetryTime = 800;
serialport[i].WaitTime = 20;

@ -51,16 +51,16 @@ static void set_parity (struct termios *opt, char parity)
{
switch (parity)
{
case'N':/* 无校验 */
case 'N':/* 无校验 */
case 'n':
opt->c_cflag &= ~PARENB;
break;
case'E':/*偶校验*/
case 'E':/*偶校验*/
case 'e':
opt->c_cflag |= PARENB;
opt->c_cflag &= ~PARODD;
break;
case'O':/* 奇校验 */
case 'O':/* 奇校验 */
case 'o':
opt->c_cflag |= PARENB;
opt->c_cflag |= ~PARODD;

@ -28,6 +28,16 @@
#include <LogThread.h>
#include "DngCreator.h"
#ifdef _DEBUG
void Auto_AImage_delete(AImage* image)
{
AImage_delete(image);
}
#else
#define Auto_AImage_delete AImage_delete
#endif
static void onAvailabilityCallback(void* context, const char* cameraId)
{
((NdkCamera*)context)->onAvailabilityCallback(cameraId);
@ -100,6 +110,48 @@ void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureR
((NdkCamera*)context)->onCaptureCompleted(session, request, result);
}
inline uint8_t GetCaptureIntent(ACameraDevice_request_template templateId)
{
/*
ACAMERA_CONTROL_CAPTURE_INTENT_CUSTOM = 0,
ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW = 1,
ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE = 2,
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD = 3,
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT = 4,
ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG = 5,
ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL = 6,
ACAMERA_CONTROL_CAPTURE_INTENT_MOTION_TRACKING = 7,
*/
uint8_t captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
switch (templateId)
{
case TEMPLATE_PREVIEW: // = 1,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW;
break;
case TEMPLATE_STILL_CAPTURE: // = 2,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
case TEMPLATE_RECORD: // = 3,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
break;
case TEMPLATE_VIDEO_SNAPSHOT: // = 4,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
break;
case TEMPLATE_ZERO_SHUTTER_LAG: // = 5,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
break;
case TEMPLATE_MANUAL: // = 6,
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL;
break;
default:
captureIntent = ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
}
return captureIntent;
}
NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params)
{
camera_facing = 0;
@ -118,6 +170,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO;
aeLockAvailable = false;
awbLockAvailable = false;
m_fatalError = false;
sceneModeSupported = false;
@ -138,10 +191,17 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
mPreviewImageReader = NULL;
mPreviewImageWindow = NULL;
mPreviewOutputTarget = NULL;
mPreviewSessionOutput = NULL;
mImageReader = NULL;
mImageWindow = NULL;
mOutputTarget = NULL;
mSessionOutput = NULL;
mImageReader2 = NULL;
mImageWindow2 = NULL;
mOutputTarget2 = NULL;
mSessionOutput2 = NULL;
camera_device = 0;
@ -152,6 +212,13 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
mResult = { 0 };
mLdr = ~0;
mFinalLdr = 0;
mFinalBurstCaptures = m_params.burstRawCapture == 0 ? 1 : m_params.burstCaptures;
if (mFinalBurstCaptures == 0)
{
mFinalBurstCaptures = 1;
}
mFinalOutputFormat = (m_params.burstRawCapture == 0) ? AIMAGE_FORMAT_YUV_420_888 : AIMAGE_FORMAT_RAW16;
}
NdkCamera::~NdkCamera()
@ -223,6 +290,8 @@ int NdkCamera::selfTest(const std::string& cameraId, int32_t& maxResolutionX, in
}
}
ACameraMetadata_free(camera_metadata);
return 0;
}
@ -239,6 +308,8 @@ int NdkCamera::open(const std::string& cameraId) {
DisplayDimension disp(mWidth, mHeight);
DisplayDimension foundRes = disp;
camera_status_t status = ACAMERA_OK;
int32_t previewWidth = 0;
int32_t previewHeight = 0;
ALOGD("Start ACameraManager_getCameraIdList");
{
@ -296,24 +367,52 @@ int NdkCamera::open(const std::string& cameraId) {
if (input) continue;
int32_t format = e.data.i32[i + 0];
if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/)
if (format == AIMAGE_FORMAT_RAW16)
{
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
// XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height());
if (!disp.IsSameRatio(res))
if (mFinalOutputFormat == AIMAGE_FORMAT_RAW16)
{
if (res.width() >= mWidth && res.height() >= mHeight)
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
if (!disp.IsSameRatio(res))
{
temp = res;
if (res.width() >= mWidth && res.height() >= mHeight)
{
temp = res;
}
continue;
}
continue;
if (res > disp)
{
foundIt = true;
foundRes = res;
}
}
}
else if (format == AIMAGE_FORMAT_YUV_420_888/* || format == AIMAGE_FORMAT_JPEG*/)
{
if (previewWidth == 0 || previewHeight == 0)
{
previewWidth = e.data.i32[i + 1];
previewHeight = e.data.i32[i + 2];
}
if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp)
if (mFinalOutputFormat == AIMAGE_FORMAT_YUV_420_888)
{
foundIt = true;
foundRes = res;
DisplayDimension res(e.data.i32[i + 1], e.data.i32[i + 2]);
// XYLOG(XYLOG_SEVERITY_DEBUG, "CameraId=%s CX=%d CY=%d", cameraId.c_str(), res.width(), res.height());
if (!disp.IsSameRatio(res))
{
if (res.width() >= mWidth && res.height() >= mHeight)
{
temp = res;
}
continue;
}
if (/*format == AIMAGE_FORMAT_YUV_420_888 && */res > disp)
{
foundIt = true;
foundRes = res;
}
}
}
}
@ -556,6 +655,10 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACameraManager_openCamera(camera_manager, cameraId.c_str(), &camera_device_state_callbacks, &camera_device);
if (status != ACAMERA_OK)
{
if (status == ACAMERA_ERROR_MAX_CAMERA_IN_USE)
{
m_fatalError = true;
}
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to open camera %s res=%d", cameraId.c_str(), status);
return 1;
}
@ -572,7 +675,8 @@ int NdkCamera::open(const std::string& cameraId) {
}
// setup imagereader and its surface
media_status_t mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, 5, &mPreviewImageReader);
media_status_t mstatus = AImageReader_new(previewWidth, previewHeight, AIMAGE_FORMAT_YUV_420_888, 4, &mPreviewImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new preview, status=%d", status);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
@ -587,7 +691,8 @@ int NdkCamera::open(const std::string& cameraId) {
status = ACaptureSessionOutput_create(mPreviewImageWindow, &mPreviewSessionOutput);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mPreviewSessionOutput);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 2, &mImageReader);
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), getOutputFormat(), burstCaptures + 1, &mImageReader);
AASSERT(status == ACAMERA_OK, "Failed to call AImageReader_new, status=%d", status);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
@ -598,11 +703,33 @@ int NdkCamera::open(const std::string& cameraId) {
ANativeWindow_acquire(mImageWindow);
}
status = ACameraOutputTarget_create(mImageWindow, &mOutputTarget);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraOutputTarget_create, status=%d", status);
status = ACaptureSessionOutput_create(mImageWindow, &mSessionOutput);
AASSERT(status == ACAMERA_OK, "Failed to call ACaptureSessionOutput_create, status=%d", status);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput);
if (m_params.burstRawCapture == 1) // Auto
{
mstatus = AImageReader_new(foundRes.org_width(), foundRes.org_height(), AIMAGE_FORMAT_YUV_420_888, burstCaptures, &mImageReader2);
if (mstatus == AMEDIA_OK)
{
AImageReader_ImageListener listener;
listener.context = this;
listener.onImageAvailable = ::onImageAvailable;
mstatus = AImageReader_setImageListener(mImageReader2, &listener);
mstatus = AImageReader_getWindow(mImageReader2, &mImageWindow2);
ANativeWindow_acquire(mImageWindow2);
}
status = ACameraOutputTarget_create(mImageWindow2, &mOutputTarget2);
status = ACaptureSessionOutput_create(mImageWindow2, &mSessionOutput2);
status = ACaptureSessionOutputContainer_add(capture_session_output_container, mSessionOutput2);
}
CaptureRequest *request = CreateRequest(true);
mCaptureRequests.push_back(request);
#if 0
@ -627,7 +754,7 @@ int NdkCamera::open(const std::string& cameraId) {
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : GetCaptureIntent(ACameraDevice_request_template)m_params.requestTemplate);
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
@ -808,6 +935,7 @@ int NdkCamera::open(const std::string& cameraId) {
camera_capture_session_state_callbacks.onReady = ::onSessionReady;
camera_capture_session_state_callbacks.onClosed = onSessionClosed;
status = ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraDevice_createCaptureSession, status=%d", status);
ACameraCaptureSession_captureCallbacks capture_session_capture_callbacks;
capture_session_capture_callbacks.context = this;
@ -820,7 +948,7 @@ int NdkCamera::open(const std::string& cameraId) {
capture_session_capture_callbacks.onCaptureBufferLost = 0;
status = ACameraCaptureSession_setRepeatingRequest(capture_session, &capture_session_capture_callbacks, 1, &(mCaptureRequests[PREVIEW_REQUEST_IDX]->request), &(mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId));
AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_setRepeatingRequest, status=%d", status);
ALOGW("Preview Request: seqId=%d", mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId);
m_startTime = GetMicroTimeStamp();
@ -837,24 +965,26 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
CaptureRequest *request = new CaptureRequest();
std::memset(request, 0, sizeof(CaptureRequest));
bool autoSwitchToOneFrame = (m_params.burstRawCapture == 1) && (mFinalOutputFormat == AIMAGE_FORMAT_YUV_420_888);
request->pThis = this;
request->imageReader = isPreviewRequest ? mPreviewImageReader : mImageReader;
request->imageWindow = isPreviewRequest ? mPreviewImageWindow : mImageWindow;
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : mOutputTarget;
request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : mSessionOutput;
request->imageReader = isPreviewRequest ? mPreviewImageReader : (autoSwitchToOneFrame ? mImageReader2 : mImageReader);
request->imageWindow = isPreviewRequest ? mPreviewImageWindow : (autoSwitchToOneFrame ? mImageWindow2 : mImageWindow);
request->imageTarget = isPreviewRequest ? mPreviewOutputTarget : (autoSwitchToOneFrame ? mOutputTarget2 : mOutputTarget);
request->sessionOutput = isPreviewRequest ? mPreviewSessionOutput : (autoSwitchToOneFrame ? mSessionOutput2 : mSessionOutput);
request->templateId = isPreviewRequest ? TEMPLATE_PREVIEW : (ACameraDevice_request_template)m_params.requestTemplate;
// mCaptureRequests.push_back(request);
// capture request
status = ACameraDevice_createCaptureRequest(camera_device, request->templateId, &request->request);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraDevice_createCaptureRequest, status=%d", status);
ACaptureRequest_setUserContext(request->request, request);
// uint8_t ctrlMode = sceneModeSupported ? ACAMERA_CONTROL_MODE_USE_SCENE_MODE : ACAMERA_CONTROL_MODE_AUTO;
uint8_t ctrlMode = ACAMERA_CONTROL_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_MODE, 1, &ctrlMode);
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
uint8_t captureIntent = isPreviewRequest ? ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : GetCaptureIntent((ACameraDevice_request_template)m_params.requestTemplate);
status = ACaptureRequest_setEntry_u8(request->request, ACAMERA_CONTROL_CAPTURE_INTENT, 1, &captureIntent);
uint8_t flashMode = ACAMERA_FLASH_MODE_OFF;
@ -1022,6 +1152,7 @@ NdkCamera::CaptureRequest* NdkCamera::CreateRequest(bool isPreviewRequest)
}
status = ACaptureRequest_addTarget(request->request, request->imageTarget);
AASSERT(status == ACAMERA_OK, "Failed to call ACaptureRequest_addTarget, status=%d", status);
// status = ACaptureSessionOutput_create(request->imageWindow, &request->sessionOutput);
// status = ACaptureSessionOutputContainer_add(capture_session_output_container, request->sessionOutput);
@ -1034,6 +1165,21 @@ void NdkCamera::close()
XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::try close %s", mCameraId.c_str());
camera_status_t res = ACAMERA_OK;
/*
if (mPreviewImageReader != NULL)
{
AImageReader_setImageListener(mPreviewImageReader, NULL);
}
if (mImageReader != NULL)
{
AImageReader_setImageListener(mImageReader, NULL);
}
if (mImageReader2 != NULL)
{
AImageReader_setImageListener(mImageReader2, NULL);
}
*/
mCaptureFrames.clear();
if ((ACameraManager *)camera_manager != NULL)
@ -1085,7 +1231,7 @@ void NdkCamera::close()
if (mPreviewImageReader != NULL)
{
// AImageReader_setImageListener(image_reader, NULL);
AImageReader_setImageListener(mPreviewImageReader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mPreviewImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
@ -1107,7 +1253,7 @@ void NdkCamera::close()
if (mImageReader != NULL)
{
// AImageReader_setImageListener(image_reader, NULL);
AImageReader_setImageListener(mImageReader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
@ -1115,6 +1261,28 @@ void NdkCamera::close()
mImageReader = 0;
}
if (mOutputTarget2 != NULL)
{
ACameraOutputTarget_free(mOutputTarget2);
mOutputTarget2 = 0;
}
if (mImageWindow2 != NULL)
{
ANativeWindow_release(mImageWindow2);
mImageWindow2 = 0;
}
if (mImageReader2 != NULL)
{
AImageReader_setImageListener(mImageReader2, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mImageReader2);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mImageReader2 = 0;
}
if (mPreviewSessionOutput != NULL)
{
if (capture_session_output_container)
@ -1134,6 +1302,15 @@ void NdkCamera::close()
ACaptureSessionOutput_free(mSessionOutput);
mSessionOutput = 0;
}
if (mSessionOutput2 != NULL)
{
if (capture_session_output_container)
{
ACaptureSessionOutputContainer_remove(capture_session_output_container, mSessionOutput2);
}
ACaptureSessionOutput_free(mSessionOutput2);
mSessionOutput2 = 0;
}
if (capture_session_output_container)
{
@ -1165,12 +1342,12 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus);
XYLOG(XYLOG_SEVERITY_ERROR, "Preview AImageReader_acquireLatestImage error: %d", mstatus);
}
return;
}
if (mLdr == ~0)
if (!mCaptureTriggered)
{
uint8_t* y_data = 0;
int y_len = 0;
@ -1182,7 +1359,9 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif
avgY = avgY / (uint64_t)y_len;
mLdr = avgY;
m_locker.lock();
mLdr = (uint8_t)avgY;
m_locker.unlock();
}
AImage_delete(image);
@ -1190,7 +1369,12 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
}
else
{
while (1)
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
{
burstCaptures = 1;
}
if (burstCaptures == 1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
@ -1198,32 +1382,154 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < m_params.burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus);
}
if (mCaptureFrames.size() < burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Capture AImageReader_acquireNextImage error: %d", mstatus);
}
}
break;
return;
}
unsigned long long ts = GetMicroTimeStamp();
int32_t format;
mstatus = AImage_getFormat(image, &format);
if (format == AIMAGE_FORMAT_YUV_420_888)
{
int32_t width;
int32_t height;
mstatus = AImage_getWidth(image, &width);
mstatus = AImage_getHeight(image, &height);
int32_t y_pixelStride = 0;
int32_t u_pixelStride = 0;
int32_t v_pixelStride = 0;
AImage_getPlanePixelStride(image, 0, &y_pixelStride);
AImage_getPlanePixelStride(image, 1, &u_pixelStride);
AImage_getPlanePixelStride(image, 2, &v_pixelStride);
int32_t y_rowStride = 0;
int32_t u_rowStride = 0;
int32_t v_rowStride = 0;
AImage_getPlaneRowStride(image, 0, &y_rowStride);
AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t* y_data = 0;
uint8_t* u_data = 0;
uint8_t* v_data = 0;
int y_len = 0;
int u_len = 0;
int v_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len);
AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len);
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
{
// already nv21
ConvertYUV21ToMat(y_data, width, height, mWidth, mHeight, camera_orientation,
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame);
}
else
{
// construct nv21
uint8_t* nv21 = new uint8_t[width * height + width * height / 2];
{
// Y
uint8_t* yptr = nv21;
for (int y = 0; y < height; y++)
{
const uint8_t* y_data_ptr = y_data + y_rowStride * y;
for (int x = 0; x < width; x++)
{
yptr[0] = y_data_ptr[0];
yptr++;
y_data_ptr += y_pixelStride;
}
}
// UV
uint8_t* uvptr = nv21 + width * height;
for (int y = 0; y < height / 2; y++)
{
const uint8_t* v_data_ptr = v_data + v_rowStride * y;
const uint8_t* u_data_ptr = u_data + u_rowStride * y;
for (int x = 0; x < width / 2; x++)
{
uvptr[0] = v_data_ptr[0];
uvptr[1] = u_data_ptr[0];
uvptr += 2;
v_data_ptr += v_pixelStride;
u_data_ptr += u_pixelStride;
}
}
}
ConvertYUV21ToMat(nv21, width, height,mWidth, mHeight, camera_orientation,
camera_facing == ACAMERA_LENS_FACING_FRONT, m_params.orientation, mOneFrame);
delete[] nv21;
}
}
m_photoTaken = true;
m_locker.lock();
mCaptureFrames.push_back(std::shared_ptr<AImage>(image, AImage_delete));
m_locker.unlock();
ALOGD("Capture Image Received");
AImage_delete(image);
std::shared_ptr<ACameraMetadata> result;
bool captureCompleted = false;
m_locker.lock();
if (!mCaptureResults.empty())
{
captureCompleted = true;
result = mCaptureResults[0];
}
m_locker.unlock();
if (captureCompleted)
{
onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame);
}
}
else
{
while (1)
{
mstatus = AImageReader_acquireNextImage(reader, &image);
if (mstatus != AMEDIA_OK)
{
// https://stackoverflow.com/questions/67063562
if (mstatus != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE)
{
if (mCaptureFrames.size() < burstCaptures)
{
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireNextImage error: %d", mstatus);
}
}
break;
}
bool captureCompleted = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
m_locker.lock();
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
m_locker.unlock();
m_photoTaken = true;
m_locker.lock();
mCaptureFrames.push_back(std::shared_ptr<AImage>(image, Auto_AImage_delete));
m_locker.unlock();
ALOGD("Capture Image Received");
}
bool captureCompleted = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
m_locker.lock();
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
m_locker.unlock();
if (captureCompleted)
{
FireBurstCapture();
}
}
if (captureCompleted)
{
onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames);
}
}
}
@ -1240,7 +1546,17 @@ bool NdkCamera::on_image(cv::Mat& rgb)
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames)
bool NdkCamera::onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, uint32_t duration, cv::Mat rgb)
{
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames)
{
return false;
}
bool NdkCamera::onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames)
{
return false;
}
@ -1449,6 +1765,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
//XYLOG(XYLOG_SEVERITY_DEBUG, "Trigger PRECAPTURE status=%d AES=%u", (int)status, (unsigned int)mResult.aeState);
AASSERT(status == ACAMERA_OK, "Failed to call PRECAPTURE_TRIGGER, status=%d", status);
readyForCapture = false;
numberOfPrecaptures = 0;
@ -1522,8 +1839,23 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
if (readyForCapture/* && mCaptureRequests.size() > 1*/)
{
ALOGW("Ready for Capture AFS=%u AES=%u AWBS=%u Time=%u",
(unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, (unsigned int)(ts - m_startTime));
// Must update mFinalLdr As getBurstCaptures getOutputFormat depends mFinalLdr
if (mLdr != ~0)
{
mFinalLdr = mLdr;
}
XYLOG(XYLOG_SEVERITY_INFO, "Ready for Capture AFS=%u AES=%u AWBS=%u LDR=%u Time=%u",
(unsigned int)afState, (unsigned int)aeState, (unsigned int)awbState, mFinalLdr, (unsigned int)(ts - m_startTime));
if (m_params.burstRawCapture == 1)
{
if (mFinalLdr > 50)
{
XYLOG(XYLOG_SEVERITY_WARNING, "Switch to OneFrame Capture(YUV) As LDR=%u", mFinalLdr);
mFinalOutputFormat = AIMAGE_FORMAT_YUV_420_888;
mFinalBurstCaptures = 1;
}
}
uint32_t burstCaptures = getBurstCaptures();
if (burstCaptures == 0)
@ -1544,7 +1876,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
}
// ALOGW("Will Stop Repeating Request");
status = ACameraCaptureSession_stopRepeating(capture_session);
// status = ACameraCaptureSession_stopRepeating(capture_session);
// ALOGW("Finished Repeating Request");
ACameraCaptureSession_captureCallbacks capture_session_capture_cb;
@ -1560,6 +1892,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
int numberOfRequests = requests.size();
status = ACameraCaptureSession_capture(capture_session, &capture_session_capture_cb,
numberOfRequests, &requests[0], &sequenceId);
AASSERT(status == ACAMERA_OK, "Failed to call ACameraCaptureSession_capture, status=%d", status);
ALOGW("Capture num = %d sequenceId=%d", numberOfRequests, sequenceId);
for (int idx = 1; idx < mCaptureRequests.size(); idx++)
@ -1572,24 +1905,101 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
}
else
{
#ifdef _DEBUG
uint64_t tid = getThreadIdOfULL();
ALOGW("Capture Result sequenceId=%d TID=%lld", pCaptureRequest->sessionSequenceId, (long long)tid);
#endif
unsigned long long ts = GetMicroTimeStamp();
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
bool captureCompleted = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
m_locker.lock();
mCaptureResults.push_back(std::shared_ptr<ACameraMetadata>(pCopy, ACameraMetadata_free));
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
m_locker.unlock();
if (captureCompleted)
{
onBurstCapture(mCharacteristics, mCaptureResults, mLdr, mCaptureFrames);
}
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
if (expectedTimes == 1)
{
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = !mOneFrame.empty();
m_locker.unlock();
if (captureCompleted)
{
onOneCapture(mCharacteristics, captureResult, mFinalLdr, ts - m_startTime, mOneFrame);
}
}
else
{
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes;
m_locker.unlock();
if (captureCompleted)
{
FireBurstCapture();
}
}
}
}
void NdkCamera::FireBurstCapture()
{
unsigned long long ts = GetMicroTimeStamp();
size_t expectedTimes = mCaptureRequests.size() - 1;
std::vector<std::shared_ptr<ACameraMetadata> > captureResults;
uint32_t ldr;
std::vector<std::shared_ptr<AImage> > captureFrames;
m_locker.lock();
ldr = mFinalLdr;
if (ldr == 0 && mLdr != ~0)
{
ldr = mLdr;
}
captureResults.swap(mCaptureResults);
captureFrames.swap(mCaptureFrames);
m_locker.unlock();
media_status_t mstatus;
std::vector<std::vector<uint8_t> > frames;
for (int idx = 0; idx < expectedTimes; idx++)
{
std::shared_ptr<AImage> spImage = captureFrames[idx];
std::shared_ptr<ACameraMetadata> spResult = captureResults[idx];
auto it = frames.insert(frames.end(), std::vector<uint8_t>());
int32_t width = 0;
int32_t height = 0;
mstatus = AImage_getWidth(spImage.get(), &width);
mstatus = AImage_getHeight(spImage.get(), &height);
int32_t planeCount = 0;
mstatus = AImage_getNumberOfPlanes(spImage.get(), &planeCount);
AASSERT(mstatus == AMEDIA_OK && planeCount == 1, "Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *planeData = NULL;
int planeDataLen = 0;
mstatus = AImage_getPlaneData(spImage.get(), 0, &planeData, &planeDataLen);
ALOGD("Start Converting Dng");
DngCreator dngCreator(mCharacteristics.get(), spResult.get());
dngCreator.writeInputBuffer(*it, planeData, planeDataLen, width, height, 0);
ALOGD("End Converting Dng");
}
captureFrames.clear();
onBurstCapture(mCharacteristics, captureResults, ldr, ts - m_startTime, frames);
#ifdef _DEBUG
ALOGD("Frames Size: %u", (uint32_t)frames.size());
#endif
}
void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult)
{
camera_status_t status = ACAMERA_ERROR_BASE;
@ -1622,9 +2032,13 @@ void NdkCamera::CopyPreviewRequest(ACaptureRequest* request, const ACameraMetada
void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure)
{
XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d", session, request, failure->reason);
XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d PhotoTaken=%d", session, request, failure->reason, m_photoTaken ? 1 : 0);
char msg[32] = { 0 };
if (failure->sequenceId == mCaptureRequests[PREVIEW_REQUEST_IDX]->sessionSequenceId)
{
return;
}
char msg[64] = { 0 };
snprintf(msg, sizeof(msg), "CaptureFailed reason=%d PhotoTaken=%d", failure->reason, m_photoTaken ? 1 : 0);
if (!m_photoTaken)
{
@ -1674,12 +2088,13 @@ bool NdkCamera::IsCameraAvailable(const std::string& cameraId)
int32_t NdkCamera::getOutputFormat() const
{
return m_params.burstRawCapture ? AIMAGE_FORMAT_RAW16 : AIMAGE_FORMAT_YUV_420_888;
return mFinalOutputFormat;
// return m_params.burstRawCapture ? AIMAGE_FORMAT_RAW16 : AIMAGE_FORMAT_YUV_420_888;
}
int32_t NdkCamera::getBurstCaptures() const
{
return m_params.burstRawCapture ? m_params.burstCaptures : 1;
return mFinalBurstCaptures;
}
void NdkCamera::CreateSession(ANativeWindow* previewWindow,
@ -1912,7 +2327,6 @@ bool NdkCamera::convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& widt
}
}
// on_image((unsigned ch
}
}

@ -81,8 +81,8 @@ public:
unsigned int orientation:3;
unsigned int zoom : 1;
unsigned int wait3ALocked : 3;
unsigned int burstRawCapture : 1;
unsigned int reserved : 18;
unsigned int burstRawCapture : 2;
unsigned int reserved : 16;
int64_t exposureTime;
unsigned int sensitivity;
int compensation;
@ -166,7 +166,10 @@ public:
virtual void on_error(const std::string& msg);
virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height);
virtual void onDisconnected(ACameraDevice* device);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, std::vector<std::shared_ptr<AImage> >& frames);
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, uint32_t duration, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
void onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result);
void onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result);
@ -176,9 +179,16 @@ public:
void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult);
void FireBurstCapture();
uint32_t GetLdr() const
{
return mLdr;
return mFinalLdr;
}
bool HasFatalError() const
{
return m_fatalError;
}
bool IsCameraAvailable(const std::string& cameraId);
@ -206,6 +216,7 @@ protected:
uint8_t awbMode;
bool aeLockAvailable;
bool awbLockAvailable;
bool m_fatalError;
uint64_t numberOfPrecaptures;
unsigned long long m_precaptureStartTime;
@ -244,14 +255,25 @@ protected:
ACameraOutputTarget* mOutputTarget;
ACaptureSessionOutput* mSessionOutput;
AImageReader* mImageReader2;
ANativeWindow* mImageWindow2;
ACameraOutputTarget* mOutputTarget2;
ACaptureSessionOutput* mSessionOutput2;
std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests;
std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
uint32_t mLdr;
uint32_t mFinalLdr;
uint32_t mFinalBurstCaptures;
int32_t mFinalOutputFormat;
std::vector<std::shared_ptr<AImage> > mCaptureFrames;
cv::Mat mOneFrame;
std::vector<std::vector<uint8_t> > mRawFrames;
ACameraCaptureSession* capture_session;
// AImageReader* image_reader;

@ -0,0 +1,72 @@
#include "hdrplus/hdrplus_pipeline.h"
int main( int argc, char** argv )
{
int rotation = atoi(argv[1]);
bool frontCamera = atoi(argv[2]) != 0;
std::vector<std::string> paths;
for (int idx = 4; idx < argc; idx++)
{
paths.push_back(argv[idx]);
}
cv::Mat mat;
hdrplus::hdrplus_pipeline pipeline;
pipeline.run_pipeline( paths, 0, mat);
if (mat.empty())
{
printf("run_pipeline return empty mat");
}
mat = hdrplus::convert16bit2_8bit_(mat.clone());
if (rotation > 0)
{
if (rotation == 1) // 0
{
cv::Mat tempPic;
cv::transpose(mat, tempPic);
cv::flip(tempPic, mat, 0);
}
else if (rotation == 2) // 90
{
cv::Mat tempPic;
cv::transpose(mat, tempPic);
cv::flip(tempPic, mat, 1);
}
else if (rotation == 3) // 180
{
if (frontCamera)
{
cv::flip(mat, mat, 0);
}
else
{
cv::flip(mat, mat, -1);
}
}
else if (rotation == 4) // 270
{
cv::Mat tempPic;
cv::transpose(mat, tempPic);
cv::flip(tempPic, mat, 0);
}
}
cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR);
if (mat.empty())
{
printf("mat is empty before save");
}
bool res = cv::imwrite(argv[3], mat);
if (!res)
{
printf("Failed to write file %s err=%d", argv[3], errno);
}
return 0;
}

@ -10,11 +10,29 @@
namespace hdrplus
{
class MemFile
{
public:
std::vector<uint8_t> content;
const std::vector<uint8_t> GetConstData() const
{
return content;
}
std::vector<uint8_t> GetData()
{
return content;
}
};
class bayer_image
{
public:
explicit bayer_image( const std::string& bayer_image_path );
explicit bayer_image( const std::vector<uint8_t>& bayer_image_content );
explicit bayer_image( const std::vector<uint8_t>& bayer_image_content );
explicit bayer_image( std::shared_ptr<MemFile> bayer_image_file );
~bayer_image() = default;
std::pair<double, double> get_noise_params() const;

@ -8,12 +8,14 @@
namespace hdrplus
{
class burst
{
public:
explicit burst( const std::string& burst_path, const std::string& reference_image_path );
explicit burst(const std::vector<std::string>& burst_paths, int reference_image_index);
explicit burst( const std::vector<std::vector<uint8_t> >& bayer_image_contents, int reference_image_index );
explicit burst( const std::vector<std::shared_ptr<MemFile> >& bayer_image_files, int reference_image_index );
~burst() = default;

@ -33,10 +33,14 @@ class finish
bayer_image* refBayer;
std::string mergedImgPath;
finish() = default;
finish()
{
refBayer = NULL;
}
// please use this initialization after merging part finish
finish(std::string burstPath, cv::Mat mergedBayer,int refIdx){
finish(std::string burstPath, cv::Mat mergedBayer,int refIdx) {
refBayer = NULL;
this->refIdx = refIdx;
this->burstPath = burstPath;
this->mergedBayer = mergedBayer;
@ -60,7 +64,14 @@ class finish
~finish() = default;
~finish()
{
if (refBayer != NULL)
{
delete refBayer;
refBayer = NULL;
}
}
// finish pipeline func
// void process(std::string burstPath, cv::Mat mergedBayer,int refIdx);

@ -10,7 +10,31 @@
namespace hdrplus
{
class hdrplus_pipeline
inline cv::Mat convert16bit2_8bit_(cv::Mat ans) {
if(ans.type()==CV_16UC3){
cv::MatIterator_<cv::Vec3w> it, end;
for( it = ans.begin<cv::Vec3w>(), end = ans.end<cv::Vec3w>(); it != end; ++it)
{
// std::cout<<sizeof (*it)[0] <<std::endl;
(*it)[0] *=(255.0/USHRT_MAX);
(*it)[1] *=(255.0/USHRT_MAX);
(*it)[2] *=(255.0/USHRT_MAX);
}
ans.convertTo(ans, CV_8UC3);
}else if(ans.type()==CV_16UC1){
u_int16_t* ptr = (u_int16_t*)ans.data;
int end = ans.rows*ans.cols;
for(int i=0;i<end;i++){
*(ptr+i) *=(255.0/USHRT_MAX);
}
ans.convertTo(ans, CV_8UC1);
}else{
// std::cout<<"Unsupported Data Type"<<std::endl;
}
return ans;
}
class hdrplus_pipeline
{
private:
hdrplus::align align_module;
@ -21,6 +45,7 @@ class hdrplus_pipeline
void run_pipeline( const std::string& burst_path, const std::string& reference_image_path );
bool run_pipeline( const std::vector<std::string>& burst_paths, int reference_image_index, cv::Mat& finalImg );
bool run_pipeline( const std::vector<std::vector<uint8_t> >& burst_contents, int reference_image_index, cv::Mat& finalImg );
bool run_pipeline( const std::vector<std::shared_ptr<MemFile> >& burst_contents, int reference_image_index, cv::Mat& finalImg );
hdrplus_pipeline() = default;
~hdrplus_pipeline() = default;

@ -107,7 +107,7 @@ static void build_per_grayimg_pyramid( \
break;
default:
#ifdef __ANDROID__
break;
#else
throw std::runtime_error("inv scale factor " + std::to_string( inv_scale_factors[ i ]) + "invalid" );
#endif
@ -987,6 +987,8 @@ void align::process( const hdrplus::burst& burst_images, \
} // for alternative image
per_grayimg_pyramid.clear();
}
} // namespace hdrplus

@ -141,6 +141,74 @@ bayer_image::bayer_image( const std::vector<uint8_t>& bayer_image_content )
#endif
}
bayer_image::bayer_image( std::shared_ptr<MemFile> bayer_image_file )
{
libraw_processor = std::make_shared<LibRaw>();
// Open RAW image file
int return_code;
{
std::vector<uint8_t>& fileData = bayer_image_file->content;
if ( ( return_code = libraw_processor->open_buffer( (void *)(&fileData[0]), fileData.size() ) ) != LIBRAW_SUCCESS )
{
libraw_processor->recycle();
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error opening file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
}
// Unpack the raw image
if ( ( return_code = libraw_processor->unpack() ) != LIBRAW_SUCCESS )
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error unpack file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
// Get image basic info
width = int( libraw_processor->imgdata.rawdata.sizes.raw_width );
height = int( libraw_processor->imgdata.rawdata.sizes.raw_height );
// Read exif tags
Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(&bayer_image_file->content[0], bayer_image_file->content.size());
assert(image.get() != 0);
image->readMetadata();
Exiv2::ExifData &exifData = image->exifData();
if (exifData.empty()) {
std::string error = "No Exif data found in the file";
std::cout << error << std::endl;
}
white_level = exifData["Exif.Image.WhiteLevel"].toLong();
black_level_per_channel.resize( 4 );
black_level_per_channel.at(0) = exifData["Exif.Image.BlackLevel"].toLong(0);
black_level_per_channel.at(1) = exifData["Exif.Image.BlackLevel"].toLong(1);
black_level_per_channel.at(2) = exifData["Exif.Image.BlackLevel"].toLong(2);
black_level_per_channel.at(3) = exifData["Exif.Image.BlackLevel"].toLong(3);
iso = exifData["Exif.Image.ISOSpeedRatings"].toLong();
// Create CV mat
// https://answers.opencv.org/question/105972/de-bayering-a-cr2-image/
// https://www.libraw.org/node/2141
raw_image = cv::Mat( height, width, CV_16U, libraw_processor->imgdata.rawdata.raw_image ).clone(); // changed the order of width and height
// 2x2 box filter
grayscale_image = box_filter_kxk<uint16_t, 2>( raw_image );
#ifndef NDEBUG
printf("%s::%s read bayer image with\n width %zu\n height %zu\n iso %.3f\n white level %d\n black level %d %d %d %d\n", \
__FILE__, __func__, width, height, iso, white_level, \
black_level_per_channel[0], black_level_per_channel[1], black_level_per_channel[2], black_level_per_channel[3] );
fflush( stdout );
#endif
}
std::pair<double, double> bayer_image::get_noise_params() const
{
// Set ISO to 100 if not positive

@ -248,4 +248,74 @@ burst::burst( const std::vector<std::vector<uint8_t> >& bayer_image_contents, in
#endif
}
burst::burst( const std::vector<std::shared_ptr<MemFile> >& bayer_image_files, int reference_image_index )
{
// Number of images
num_images = bayer_image_files.size();
// Find reference image path in input directory
// reference image path need to be absolute path
reference_image_idx = -1;
if ( reference_image_index >= 0 && reference_image_index < bayer_image_files.size() )
{
reference_image_idx = reference_image_index;
}
if ( reference_image_idx == -1 )
{
return;
// throw std::runtime_error("Error reference image index is out of range " );
}
#ifndef NDEBUG
printf("%s::%s reference image idx %d\n", \
__FILE__, __func__, reference_image_idx );
#endif
// Get source bayer image
// Downsample original bayer image by 2x2 box filter
for ( const auto& bayer_image_file : bayer_image_files )
{
bayer_images.emplace_back( bayer_image_file );
}
// Pad information
int tile_size_bayer = 32;
int padding_top = tile_size_bayer / 2;
int padding_bottom = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer );
int padding_left = tile_size_bayer / 2;
int padding_right = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer );
padding_info_bayer = std::vector<int>{ padding_top, padding_bottom, padding_left, padding_right };
// Pad bayer image
for ( const auto& bayer_image_i : bayer_images )
{
cv::Mat bayer_image_pad_i;
cv::copyMakeBorder( bayer_image_i.raw_image, \
bayer_image_pad_i, \
padding_top, padding_bottom, padding_left, padding_right, \
cv::BORDER_REFLECT );
// cv::Mat use internal reference count
bayer_images_pad.emplace_back( bayer_image_pad_i );
grayscale_images_pad.emplace_back( box_filter_kxk<uint16_t, 2>( bayer_image_pad_i ) );
}
#ifndef NDEBUG
printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \
__FILE__, __func__, \
bayer_images[ 0 ].height, \
bayer_images[ 0 ].width, \
bayer_images_pad[ 0 ].size().height, \
bayer_images_pad[ 0 ].size().width );
printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \
__FILE__, __func__, \
padding_top, padding_bottom, padding_left, padding_right );
#endif
}
} // namespace hdrplus

@ -523,6 +523,7 @@ namespace hdrplus
// }
cv::Mat processMergedMat(cv::Mat mergedImg, int opencv_type){
cv::Mat m;
#if 0
uint16_t* ptr = (uint16_t*)mergedImg.data;
for(int r = 0; r < mergedImg.rows; r++) {
std::vector<int> dvals;
@ -533,13 +534,14 @@ namespace hdrplus
cv::transpose(mline, mline);
m.push_back(mline);
}
#endif
int ch = CV_MAT_CN(opencv_type);
m = mergedImg.clone();
m = m.reshape(ch);
m.convertTo(m, opencv_type);
return m;
}
void show20_20(cv::Mat m){
@ -565,17 +567,17 @@ namespace hdrplus
std::cout<<"finish pipeline start ..."<<std::endl;
// save merged Image value
// #ifndef HDRPLUS_NO_DETAILED_OUTPUT
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
writeCSV(DBG_OUTPUT_ROOT "merged.csv",burst_images.merged_bayer_image);
// #endif
#endif
this->refIdx = burst_images.reference_image_idx;
// this->burstPath = burstPath;
// std::cout<<"processMerged:"<<std::endl;
// show20_20(mergedB);
this->mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1);
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
this->mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1);
// this->mergedBayer = processMergedMat(mergedB,CV_16UC1);//loadFromCSV("merged.csv", CV_16UC1);
// std::cout<<"processMerged:"<<std::endl;
// show20_20(this->mergedBayer);
@ -583,7 +585,7 @@ namespace hdrplus
// this->mergedBayer = processMergedMat(burst_images.merged_bayer_image, CV_16UC1);
#else
// this->mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1);
// this->mergedBayer = processMergedMat(burst_images.merged_bayer_image, CV_16UC1);
this->mergedBayer = processMergedMat(burst_images.merged_bayer_image, CV_16UC1);
// std::cout<<"processMerged:"<<std::endl;
#endif
// std::cout<<"csv:"<<std::endl;

@ -11,7 +11,7 @@
#include <fstream>
#ifdef __ANDROID__
#include <AndroidHelper.h>
// #include <AndroidHelper.h>
#endif
namespace hdrplus
@ -46,25 +46,25 @@ bool hdrplus_pipeline::run_pipeline( \
burst burst_images( burst_paths, reference_image_index );
std::vector<std::vector<std::vector<std::pair<int, int>>>> alignments;
#ifdef __ANDROID__
ALOGI("Finish loading images");
// ALOGI("Finish loading images");
#endif
// Run align
align_module.process( burst_images, alignments );
#ifdef __ANDROID__
ALOGI("Finish align");
// ALOGI("Finish align");
#endif
// Run merging
merge_module.process( burst_images, alignments );
#ifdef __ANDROID__
ALOGI("Finish merging");
// ALOGI("Finish merging");
#endif
// Run finishing
finish_module.process( burst_images, finalImg);
#ifdef __ANDROID__
ALOGI("Finish process");
// ALOGI("Finish process");
#endif
return true;
@ -78,28 +78,61 @@ bool hdrplus_pipeline::run_pipeline( \
burst burst_images( burst_contents, reference_image_index );
std::vector<std::vector<std::vector<std::pair<int, int>>>> alignments;
#ifdef __ANDROID__
ALOGI("Finish loading images");
// ALOGI("Finish loading images");
#endif
// Run align
align_module.process( burst_images, alignments );
#ifdef __ANDROID__
ALOGI("Finish align");
// ALOGI("Finish align");
#endif
// Run merging
merge_module.process( burst_images, alignments );
#ifdef __ANDROID__
ALOGI("Finish merging");
// ALOGI("Finish merging");
#endif
// Run finishing
finish_module.process( burst_images, finalImg);
#ifdef __ANDROID__
ALOGI("Finish process");
// ALOGI("Finish process");
#endif
return true;
}
bool hdrplus_pipeline::run_pipeline( \
const std::vector<std::shared_ptr<MemFile> >& burst_files, \
int reference_image_index, cv::Mat& finalImg )
{
// Create burst of images
burst burst_images( burst_files, reference_image_index );
std::vector<std::vector<std::vector<std::pair<int, int>>>> alignments;
#ifdef __ANDROID__
// ALOGI("Finish loading images");
#endif
// Run align
align_module.process( burst_images, alignments );
#ifdef __ANDROID__
// ALOGI("Finish align");
#endif
// Run merging
merge_module.process( burst_images, alignments );
#ifdef __ANDROID__
// ALOGI("Finish merging");
#endif
// Run finishing
finish_module.process( burst_images, finalImg);
#ifdef __ANDROID__
// ALOGI("Finish process");
#endif
return true;
}
} // namespace hdrplus

@ -20,7 +20,9 @@ namespace hdrplus
// Get padded bayer image
cv::Mat reference_image = burst_images.bayer_images_pad[burst_images.reference_image_idx];
cv::imwrite("ref.jpg", reference_image);
#ifndef NDEBUG
// cv::imwrite("ref.jpg", reference_image);
#endif
// Get raw channels
std::vector<cv::Mat> channels(4);
@ -98,7 +100,7 @@ namespace hdrplus
cv::Range horizontal = cv::Range(padding[2], reference_image.cols - padding[3]);
cv::Range vertical = cv::Range(padding[0], reference_image.rows - padding[1]);
burst_images.merged_bayer_image = merged(vertical, horizontal);
cv::imwrite("merged.jpg", burst_images.merged_bayer_image);
// cv::imwrite("merged.jpg", burst_images.merged_bayer_image);
}
std::vector<cv::Mat> merge::getReferenceTiles(cv::Mat reference_image) {

File diff suppressed because it is too large Load Diff

@ -0,0 +1,36 @@
// MACHINE GENERATED -- DO NOT EDIT
extern "C" {
struct halide_filter_metadata_t;
void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char * const *extra_key_value_pairs
);
}
extern "C" {
extern int hdrplus_pipeline_argv(void **args);
extern const struct halide_filter_metadata_t *hdrplus_pipeline_metadata();
}
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
extern "C" const char * const *HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC();
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
namespace halide_nsreg_hdrplus_pipeline {
namespace {
struct Registerer {
Registerer() {
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC());
#else
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), nullptr);
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
}
};
static Registerer registerer;
} // namespace
} // halide_nsreg_hdrplus_pipeline

File diff suppressed because it is too large Load Diff

@ -0,0 +1,36 @@
// MACHINE GENERATED -- DO NOT EDIT
extern "C" {
struct halide_filter_metadata_t;
void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char * const *extra_key_value_pairs
);
}
extern "C" {
extern int hdrplus_pipeline_argv(void **args);
extern const struct halide_filter_metadata_t *hdrplus_pipeline_metadata();
}
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
extern "C" const char * const *HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC();
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
namespace halide_nsreg_hdrplus_pipeline {
namespace {
struct Registerer {
Registerer() {
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC());
#else
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), nullptr);
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
}
};
static Registerer registerer;
} // namespace
} // halide_nsreg_hdrplus_pipeline

@ -0,0 +1,13 @@
#ifndef __HDRPLUS__
#define __HDRPLUS__
#include <string>
#include <vector>
#include <opencv2/opencv.hpp> // all opencv header
int doHdrPlus(const std::string& dir_path, const std::string& out_name, const std::vector<std::string>& in_names);
bool doHdrPlus(const std::vector< std::vector<uint8_t> >& images, cv::Mat& mat);
#endif // __HDRPLUS__

File diff suppressed because it is too large Load Diff

@ -0,0 +1,39 @@
#include "Burst.h"
Halide::Runtime::Buffer<uint16_t> Burst::ToBuffer() const {
if (Raws.empty()) {
return Halide::Runtime::Buffer<uint16_t>();
}
Halide::Runtime::Buffer<uint16_t> result(GetWidth(), GetHeight(),
Raws.size());
for (int i = 0; i < Raws.size(); ++i) {
auto resultSlice = result.sliced(2, i);
Raws[i].CopyToBuffer(resultSlice);
}
return result;
}
void Burst::CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const {
buffer.copy_from(ToBuffer());
}
std::vector<RawImage> Burst::LoadRaws(const std::vector< std::vector<uint8_t> >& images) {
std::vector<RawImage> result;
for (const auto &img : images) {
result.emplace_back(&img[0], img.size());
}
return result;
}
std::vector<RawImage> Burst::LoadRaws(const std::string &dirPath,
std::vector<std::string> &inputs) {
std::vector<RawImage> result;
for (const auto &input : inputs) {
const std::string img_path = dirPath + "/" + input;
result.emplace_back(img_path);
}
return result;
}
const RawImage &Burst::GetRaw(const size_t i) const { return this->Raws[i]; }

@ -0,0 +1,76 @@
#pragma once
#include "InputSource.h"
#include <hdrplus_pipeline.h>
#include <string>
#include <vector>
class Burst {
public:
Burst(std::string dir_path, std::vector<std::string> inputs)
: Dir(std::move(dir_path)), Inputs(std::move(inputs)),
Raws(LoadRaws(Dir, Inputs))
{
}
Burst(const std::vector< std::vector<uint8_t> >& images)
: Raws(LoadRaws(images))
{
}
~Burst() = default;
Burst(const Burst& src)
{
this->Dir = src.Dir;
this->Inputs = src.Inputs;
this->Raws = src.Raws;
int aa = 0;
}
int GetWidth() const { return Raws.empty() ? -1 : Raws[0].GetWidth(); }
int GetHeight() const { return Raws.empty() ? -1 : Raws[0].GetHeight(); }
int GetBlackLevel() const
{
return Raws.empty() ? -1 : Raws[0].GetScalarBlackLevel();
}
int GetWhiteLevel() const {
return Raws.empty() ? -1 : Raws[0].GetWhiteLevel();
}
WhiteBalance GetWhiteBalance() const {
return Raws.empty() ? WhiteBalance{-1, -1, -1, -1}
: Raws[0].GetWhiteBalance();
}
CfaPattern GetCfaPattern() const {
return Raws.empty() ? CfaPattern::CFA_UNKNOWN : Raws[0].GetCfaPattern();
}
Halide::Runtime::Buffer<float> GetColorCorrectionMatrix() const {
return Raws.empty() ? Halide::Runtime::Buffer<float>()
: Raws[0].GetColorCorrectionMatrix();
}
Halide::Runtime::Buffer<uint16_t> ToBuffer() const;
void CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const;
const RawImage &GetRaw(const size_t i) const;
private:
std::string Dir;
std::vector<std::string> Inputs;
std::vector<RawImage> Raws;
private:
static std::vector<RawImage> LoadRaws(const std::string &dirPath,
std::vector<std::string> &inputs);
static std::vector<RawImage> LoadRaws(const std::vector< std::vector<uint8_t> >& images);
};

@ -0,0 +1,144 @@
#include <fstream>
#include <iostream>
#include <stdio.h>
#ifdef _DEBUG
#define STB_IMAGE_WRITE_IMPLEMENTATION
#include <include/stb_image_write.h>
#endif
#include <hdrplus_pipeline.h>
#include "Burst.h"
#include <include/HDRPlus.h>
extern "C" void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char *const *extra_key_value_pairs) {
}
/*
* HDRPlus Class -- Houses file I/O, defines pipeline attributes and calls
* processes main stages of the pipeline.
*/
class HDRPlus {
const Burst &burst;
public:
const Compression c;
const Gain g;
HDRPlus(Burst& burst, const Compression c, const Gain g)
: burst(burst), c(c), g(g)
{
}
Halide::Runtime::Buffer<uint8_t> process() {
const int width = burst.GetWidth();
const int height = burst.GetHeight();
Halide::Runtime::Buffer<uint8_t> output_img(3, width, height);
#ifdef _DEBUG
std::cerr << "Black point: " << burst.GetBlackLevel() << std::endl;
std::cerr << "White point: " << burst.GetWhiteLevel() << std::endl;
#endif
const WhiteBalance wb = burst.GetWhiteBalance();
std::cerr << "RGGB: " << wb.r << " " << wb.g0 << " " << wb.g1 << " " << wb.b
<< std::endl;
Halide::Runtime::Buffer<uint16_t> imgs = burst.ToBuffer();
if (imgs.dimensions() != 3 || imgs.extent(2) < 2) {
throw std::invalid_argument(
"The input of HDRPlus must be a 3-dimensional buffer with at least "
"two channels.");
}
const int cfa_pattern = static_cast<int>(burst.GetCfaPattern());
auto ccm = burst.GetColorCorrectionMatrix();
hdrplus_pipeline(imgs, burst.GetBlackLevel(), burst.GetWhiteLevel(), wb.r,
wb.g0, wb.g1, wb.b, cfa_pattern, ccm, c, g, output_img);
// transpose to account for interleaved layout
output_img.transpose(0, 1);
output_img.transpose(1, 2);
return output_img;
}
#ifdef _DEBUG
static bool save_png(const std::string &dir_path, const std::string &img_name,
const Halide::Runtime::Buffer<uint8_t> &img) {
const std::string img_path = dir_path + "/" + img_name;
const int stride_in_bytes = img.width() * img.channels();
if (!stbi_write_png(img_path.c_str(), img.width(), img.height(),
img.channels(), img.data(), stride_in_bytes)) {
std::cerr << "Unable to write output image '" << img_name << "'"
<< std::endl;
return false;
}
return true;
}
#endif
};
bool doHdrPlus(const std::vector< std::vector<uint8_t> >& images, cv::Mat& mat)
{
Compression c = 3.8f;
Gain g = 1.1f;
Burst burst(images);
HDRPlus hdr_plus(burst, c, g);
Halide::Runtime::Buffer<uint8_t> outputHdr = hdr_plus.process();
#ifdef _DEBUG
HDRPlus::save_png("/sdcard/com.xypower.mpapp/tmp", "2.png", outputHdr);
#endif
int width = outputHdr.width();
int height = outputHdr.height();
int channels = outputHdr.channels();
int jch = 0;
mat = cv::Mat::zeros(height, width, CV_8UC3);
for (int i = 0; i < height; ++i)
{
jch = 0;
for (int j = 0; j < width; ++j)
{
for (int n = 0; n < channels; ++n)
{
mat.at<uchar>(i, jch + n) = (uchar)outputHdr(j, i, n);
}
jch += channels;
}
}
// if (!HDRPlus::save_png(dir_path, out_name, output)) {
return true;
}
#if 0
int doHdrPlus(const std::string& dir_path, const std::string& out_name, const std::vector<std::string>& in_names) {
Compression c = 3.8f;
Gain g = 1.1f;
Burst burst(dir_path, in_names);
HDRPlus hdr_plus(burst, c, g);
Halide::Runtime::Buffer<uint8_t> output = hdr_plus.process();
if (!HDRPlus::save_png(dir_path, out_name, output)) {
return EXIT_FAILURE;
}
return 0;
}
#endif

@ -0,0 +1,152 @@
#include "InputSource.h"
#include <algorithm>
#include <unordered_map>
#include "LibRaw2DngConverter.h"
RawImage::RawImage(const std::string &path)
: Path(path), RawProcessor(std::make_shared<LibRaw>()) {
// TODO: Check LibRaw parametres.
// RawProcessor->imgdata.params.X = Y;
std::cerr << "Opening " << path << std::endl;
if (int err = RawProcessor->open_file(path.c_str())) {
std::cerr << "Cannot open file " << path
<< " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int err = RawProcessor->unpack()) {
std::cerr << "Cannot unpack file " << path
<< " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int ret = RawProcessor->raw2image()) {
std::cerr << "Cannot do raw2image on " << path
<< " error: " << libraw_strerror(ret) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
}
RawImage::RawImage(const uint8_t* data, size_t length)
: RawProcessor(std::make_shared<LibRaw>())
{
std::cerr << "Opening raw from memory" << std::endl;
if (int err = RawProcessor->open_buffer((void *)data, length)) {
std::cerr << "Cannot open raw from memory" << " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening raw");
#endif
}
if (int err = RawProcessor->unpack()) {
std::cerr << "Cannot unpack raw from memory " << " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int ret = RawProcessor->raw2image()) {
std::cerr << "Cannot do raw2image" << " error: " << libraw_strerror(ret) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
}
WhiteBalance RawImage::GetWhiteBalance() const {
const auto coeffs = RawProcessor->imgdata.color.cam_mul;
// Scale multipliers to green channel
const float r = coeffs[0] / coeffs[1];
const float g0 = 1.f; // same as coeffs[1] / coeffs[1];
const float g1 = 1.f;
const float b = coeffs[2] / coeffs[1];
return WhiteBalance{r, g0, g1, b};
}
void RawImage::CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const {
const auto image_data = (uint16_t *)RawProcessor->imgdata.rawdata.raw_image;
const auto raw_width = RawProcessor->imgdata.rawdata.sizes.raw_width;
const auto raw_height = RawProcessor->imgdata.rawdata.sizes.raw_height;
const auto top = RawProcessor->imgdata.rawdata.sizes.top_margin;
const auto left = RawProcessor->imgdata.rawdata.sizes.left_margin;
Halide::Runtime::Buffer<uint16_t> raw_buffer(image_data, raw_width,
raw_height);
buffer.copy_from(raw_buffer.translated({-left, -top}));
}
void RawImage::WriteDng(const std::string &output_path,
const Halide::Runtime::Buffer<uint16_t> &buffer) const {
LibRaw2DngConverter converter(*this);
converter.SetBuffer(buffer);
converter.Write(output_path);
}
std::array<float, 4> RawImage::GetBlackLevel() const {
// See https://www.libraw.org/node/2471
const auto raw_color = RawProcessor->imgdata.color;
const auto base_black_level = static_cast<float>(raw_color.black);
std::array<float, 4> black_level = {
base_black_level + static_cast<float>(raw_color.cblack[0]),
base_black_level + static_cast<float>(raw_color.cblack[1]),
base_black_level + static_cast<float>(raw_color.cblack[2]),
base_black_level + static_cast<float>(raw_color.cblack[3])};
if (raw_color.cblack[4] == 2 && raw_color.cblack[5] == 2) {
for (int x = 0; x < raw_color.cblack[4]; ++x) {
for (int y = 0; y < raw_color.cblack[5]; ++y) {
const auto index = y * 2 + x;
black_level[index] = raw_color.cblack[6 + index];
}
}
}
return black_level;
}
int RawImage::GetScalarBlackLevel() const {
const auto black_level = GetBlackLevel();
return static_cast<int>(
*std::min_element(black_level.begin(), black_level.end()));
}
std::string RawImage::GetCfaPatternString() const {
static const std::unordered_map<char, char> CDESC_TO_CFA = {
{'R', 0}, {'G', 1}, {'B', 2}, {'r', 0}, {'g', 1}, {'b', 2}};
const auto &cdesc = RawProcessor->imgdata.idata.cdesc;
return {CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(0, 0)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(0, 1)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(1, 0)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(1, 1)])};
}
CfaPattern RawImage::GetCfaPattern() const {
const auto cfa_pattern = GetCfaPatternString();
if (cfa_pattern == std::string{0, 1, 1, 2}) {
return CfaPattern::CFA_RGGB;
} else if (cfa_pattern == std::string{1, 0, 2, 1}) {
return CfaPattern::CFA_GRBG;
} else if (cfa_pattern == std::string{2, 1, 1, 0}) {
return CfaPattern::CFA_BGGR;
} else if (cfa_pattern == std::string{1, 2, 0, 1}) {
return CfaPattern::CFA_GBRG;
}
throw std::invalid_argument("Unsupported CFA pattern: " + cfa_pattern);
return CfaPattern::CFA_UNKNOWN;
}
Halide::Runtime::Buffer<float> RawImage::GetColorCorrectionMatrix() const {
const auto raw_color = RawProcessor->imgdata.color;
Halide::Runtime::Buffer<float> ccm(3, 3);
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
ccm(i, j) = raw_color.rgb_cam[j][i];
}
}
return ccm;
}

@ -0,0 +1,47 @@
#pragma once
#include <array>
#include <string>
#include <libraw/libraw.h>
#include "finish.h"
#include <HalideBuffer.h>
class RawImage {
public:
explicit RawImage(const std::string &path);
explicit RawImage(const uint8_t* data, size_t length);
~RawImage() = default;
int GetWidth() const { return RawProcessor->imgdata.rawdata.sizes.width; }
int GetHeight() const { return RawProcessor->imgdata.rawdata.sizes.height; }
int GetScalarBlackLevel() const;
std::array<float, 4> GetBlackLevel() const;
int GetWhiteLevel() const { return RawProcessor->imgdata.color.maximum; }
WhiteBalance GetWhiteBalance() const;
std::string GetCfaPatternString() const;
CfaPattern GetCfaPattern() const;
Halide::Runtime::Buffer<float> GetColorCorrectionMatrix() const;
void CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const;
// Writes current RawImage as DNG. If buffer was provided, then use it instead
// of internal buffer.
void WriteDng(const std::string &path,
const Halide::Runtime::Buffer<uint16_t> &buffer = {}) const;
std::shared_ptr<LibRaw> GetRawProcessor() const { return RawProcessor; }
private:
std::string Path;
std::shared_ptr<LibRaw> RawProcessor;
};

@ -0,0 +1,95 @@
#include "LibRaw2DngConverter.h"
#include <unordered_map>
#include <libraw/libraw.h>
#include "InputSource.h"
LibRaw2DngConverter::LibRaw2DngConverter(const RawImage &raw)
: OutputStream(), Raw(raw),
Tiff(SetTiffFields(
TiffPtr(TIFFStreamOpen("", &OutputStream), TIFFClose))) {}
LibRaw2DngConverter::TiffPtr
LibRaw2DngConverter::SetTiffFields(LibRaw2DngConverter::TiffPtr tiff_ptr) {
const auto RawProcessor = Raw.GetRawProcessor();
const auto raw_color = RawProcessor->imgdata.color;
const uint16_t bayer_pattern_dimensions[] = {2, 2};
const auto tiff = tiff_ptr.get();
TIFFSetField(tiff, TIFFTAG_DNGVERSION, "\01\04\00\00");
TIFFSetField(tiff, TIFFTAG_DNGBACKWARDVERSION, "\01\04\00\00");
TIFFSetField(tiff, TIFFTAG_SUBFILETYPE, 0);
TIFFSetField(tiff, TIFFTAG_COMPRESSION, COMPRESSION_NONE);
TIFFSetField(tiff, TIFFTAG_BITSPERSAMPLE, 16);
TIFFSetField(tiff, TIFFTAG_ROWSPERSTRIP, 1);
TIFFSetField(tiff, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT);
TIFFSetField(tiff, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_CFA);
TIFFSetField(tiff, TIFFTAG_SAMPLESPERPIXEL, 1);
TIFFSetField(tiff, TIFFTAG_PLANARCONFIG, PLANARCONFIG_CONTIG);
TIFFSetField(tiff, TIFFTAG_SAMPLEFORMAT, SAMPLEFORMAT_UINT);
TIFFSetField(tiff, TIFFTAG_CFAREPEATPATTERNDIM, &bayer_pattern_dimensions);
const std::string cfa = Raw.GetCfaPatternString();
TIFFSetField(tiff, TIFFTAG_CFAPATTERN, cfa.c_str());
TIFFSetField(tiff, TIFFTAG_MAKE, "hdr-plus");
TIFFSetField(tiff, TIFFTAG_UNIQUECAMERAMODEL, "hdr-plus");
const std::array<float, 9> color_matrix = {
raw_color.cam_xyz[0][0], raw_color.cam_xyz[0][1], raw_color.cam_xyz[0][2],
raw_color.cam_xyz[1][0], raw_color.cam_xyz[1][1], raw_color.cam_xyz[1][2],
raw_color.cam_xyz[2][0], raw_color.cam_xyz[2][1], raw_color.cam_xyz[2][2],
};
TIFFSetField(tiff, TIFFTAG_COLORMATRIX1, 9, &color_matrix);
TIFFSetField(tiff, TIFFTAG_CALIBRATIONILLUMINANT1, 21); // D65
const std::array<float, 3> as_shot_neutral = {
1.f / (raw_color.cam_mul[0] / raw_color.cam_mul[1]), 1.f,
1.f / (raw_color.cam_mul[2] / raw_color.cam_mul[1])};
TIFFSetField(tiff, TIFFTAG_ASSHOTNEUTRAL, 3, &as_shot_neutral);
TIFFSetField(tiff, TIFFTAG_CFALAYOUT, 1); // Rectangular (or square) layout
TIFFSetField(
tiff, TIFFTAG_CFAPLANECOLOR, 3,
"\00\01\02"); // RGB
// https://www.awaresystems.be/imaging/tiff/tifftags/cfaplanecolor.html
const std::array<float, 4> black_level = Raw.GetBlackLevel();
TIFFSetField(tiff, TIFFTAG_BLACKLEVEL, 4, &black_level);
static const uint32_t white_level = raw_color.maximum;
TIFFSetField(tiff, TIFFTAG_WHITELEVEL, 1, &white_level);
if (RawProcessor->imgdata.sizes.flip > 0) {
// Seems that LibRaw uses LibTIFF notation.
TIFFSetField(tiff, TIFFTAG_ORIENTATION, RawProcessor->imgdata.sizes.flip);
} else {
TIFFSetField(tiff, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT);
}
return tiff_ptr;
}
void LibRaw2DngConverter::SetBuffer(
const Halide::Runtime::Buffer<uint16_t> &buffer) const {
const auto width = buffer.width();
const auto height = buffer.height();
const auto tiff = Tiff.get();
TIFFSetField(tiff, TIFFTAG_IMAGEWIDTH, width);
TIFFSetField(tiff, TIFFTAG_IMAGELENGTH, height);
uint16_t *row_pointer = buffer.data();
for (int row = 0; row < height; row++) {
TIFFWriteScanline(tiff, row_pointer, row, 0);
row_pointer += width;
}
}
void LibRaw2DngConverter::Write(const std::string &path) const {
TIFFCheckpointDirectory(Tiff.get());
TIFFFlush(Tiff.get());
std::ofstream output(path, std::ofstream::binary);
output << OutputStream.str();
}

@ -0,0 +1,26 @@
#pragma once
#include <sstream>
#include <tiffio.h>
#include <tiffio.hxx>
#include <HalideBuffer.h>
class RawImage;
class LibRaw2DngConverter {
using TiffPtr = std::shared_ptr<TIFF>;
TiffPtr SetTiffFields(TiffPtr tiff_ptr);
public:
explicit LibRaw2DngConverter(const RawImage &raw);
void SetBuffer(const Halide::Runtime::Buffer<uint16_t> &buffer) const;
void Write(const std::string &path) const;
private:
std::ostringstream OutputStream;
const RawImage &Raw;
std::shared_ptr<TIFF> Tiff;
};

@ -0,0 +1,36 @@
#ifndef HDRPLUS_FINISH_H_
#define HDRPLUS_FINISH_H_
#include <hdrplus_pipeline.h>
template <class T = float> struct TypedWhiteBalance {
template <class TT>
explicit TypedWhiteBalance(const TypedWhiteBalance<TT> &other)
: r(other.r), g0(other.g0), g1(other.g1), b(other.b) {}
TypedWhiteBalance(T r, T g0, T g1, T b) : r(r), g0(g0), g1(g1), b(b) {}
T r;
T g0;
T g1;
T b;
};
using WhiteBalance = TypedWhiteBalance<float>;
typedef uint16_t BlackPoint;
typedef uint16_t WhitePoint;
typedef float Compression;
typedef float Gain;
enum class CfaPattern : int {
CFA_UNKNOWN = 0,
CFA_RGGB = 1,
CFA_GRBG = 2,
CFA_BGGR = 3,
CFA_GBRG = 4
};
#endif

@ -5,6 +5,8 @@ import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.UriMatcher;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.net.Uri;
@ -18,7 +20,13 @@ import com.xypower.common.MicroPhotoContext;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
public class BridgeProvider extends ContentProvider {
@ -34,6 +42,8 @@ public class BridgeProvider extends ContentProvider {
private final static String PATH_TAKE_PHOTO = "/takePhoto";
private final static String PATH_TAKE_VIDEO = "/takeVideo";
private final static String PATH_HDRPLUS = "/hdrplus";
private final static String PATH_RECOG_PIC = "/recogPic";
public BridgeProvider() {
@ -106,6 +116,7 @@ public class BridgeProvider extends ContentProvider {
matcher.addURI(AUTHORITY, PATH_GEN_CERT_REQ, 4);
matcher.addURI(AUTHORITY, PATH_TAKE_PHOTO, 5);
matcher.addURI(AUTHORITY, PATH_TAKE_VIDEO, 6);
matcher.addURI(AUTHORITY, PATH_HDRPLUS, 7);
int res = 0;
int matched = matcher.match(uri);
@ -128,6 +139,9 @@ public class BridgeProvider extends ContentProvider {
case 6:
res = takeVideo(uri, values);
break;
case 7:
res = hdrPlus(uri, values);
break;
default:
break;
}
@ -416,4 +430,73 @@ public class BridgeProvider extends ContentProvider {
return 1;
}
private int hdrPlus(Uri uri, ContentValues values) {
int rotation = values.containsKey("rotation") ? values.getAsInteger("rotation").intValue() : -1;
int frontCamera = values.containsKey("front") ? values.getAsInteger("front").intValue() : 0;
String outputPath = values.containsKey("output") ? values.getAsString("output") : null;
int numberOfCaptures = values.containsKey("captures") ? values.getAsInteger("captures").intValue() : 0;
List<String> paths = new ArrayList<>();
for (int idx = 0; idx < numberOfCaptures; idx++) {
String key = "path" + Integer.toString(idx + 1);
String path = values.containsKey(key) ? values.getAsString(key) : null;
if (!TextUtils.isEmpty(path)) {
paths.add(path);
}
}
ApplicationInfo applicationInfo = null;
Context context = getContext();
try {
applicationInfo = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_SHARED_LIBRARY_FILES);
} catch (Exception ex) {
}
Log.d(TAG, "nativeLibraryDir= " + applicationInfo.nativeLibraryDir);
String exeFilePath = applicationInfo.nativeLibraryDir + '/' + "libhdrp.so";
File hdrpFile = new File(exeFilePath);
if (!hdrpFile.exists()) {
return 0;
}
String cmd = exeFilePath + " " + Integer.toString(rotation) + " ";
cmd += Integer.toString(frontCamera) + " ";
cmd += outputPath + " " + TextUtils.join(" ", paths);
String[] params = new String[]{""};
File workDir = context.getFilesDir();
int exitCode = 0;
try {
Process process = Runtime.getRuntime().exec(cmd, params, workDir.getAbsoluteFile());
// Intrinsics.checkNotNullExpressionValue(process, "process");
InputStream inputStream = process.getInputStream();
BufferedReader reader = new BufferedReader((Reader)(new InputStreamReader(inputStream)));
// StringBuilder stringBuilder = new StringBuilder();
while(true) {
String line = reader.readLine();
if (line == null) {
exitCode = process.exitValue();
reader.close();
process.destroy();
break;
}
if (line != null) {
// this.outputCallback.invoke(var5);
Log.d("HDRPlus", line);
// stringBuilder.append(line);
// stringBuilder.append("\r\n");
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
return 1;
}
}

@ -115,7 +115,9 @@ public class MainActivity extends AppCompatActivity {
}
}
Log.d(TAG, "Start inflate");
binding = ActivityMainBinding.inflate(getLayoutInflater());
Log.d(TAG, "Finish inflate");
setContentView(binding.getRoot());
// getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
@ -163,6 +165,10 @@ public class MainActivity extends AppCompatActivity {
Intent intent = getIntent();
final int noDelay = intent.getIntExtra("noDelay", 0);
int rebootFlag = intent.getIntExtra("reboot", 0);
String reason = intent.getStringExtra("reason");
if (!TextUtils.isEmpty(reason)) {
Log.w(TAG, "App Started with reason: " + reason);
}
if (rebootFlag == 1) {
Log.i(TAG, "After Reboot");
}
@ -386,7 +392,7 @@ public class MainActivity extends AppCompatActivity {
@Override
public void onClick(View v) {
Context context = v.getContext().getApplicationContext();
MicroPhotoService.restartApp(context, context.getPackageName());
MicroPhotoService.restartApp(context, context.getPackageName(), "Manual Restart From MainActivity");
}
});

@ -9,10 +9,13 @@ import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.ComponentCallbacks2;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageDecoder;
@ -45,6 +48,7 @@ import androidx.core.app.NotificationCompat;
import androidx.core.content.FileProvider;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import android.provider.MediaStore;
import android.telephony.SignalStrength;
import android.telephony.SubscriptionManager;
import android.telephony.TelephonyManager;
@ -63,9 +67,17 @@ import com.xypower.mpapp.utils.DeviceUtil;
import com.xypower.mpapp.v2.Camera2VideoActivity;
import com.xypower.mpapp.video.RawActivity;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.RandomAccessFile;
import java.io.Reader;
import java.lang.reflect.Method;
import java.net.InetAddress;
import java.net.URI;
import java.nio.channels.FileLock;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
@ -146,6 +158,9 @@ public class MicroPhotoService extends Service {
public static boolean isRunning = false;
FileOutputStream mAppRunningFile;
FileLock mAppLock;
private Runnable delayedSleep = new Runnable() {
@Override
public void run() {
@ -156,6 +171,43 @@ public class MicroPhotoService extends Service {
public MicroPhotoService() {
}
@Override
public void onTrimMemory(int level) {
Log.w(TAG, "onTrimMemory level=" + level);
if (level >= ComponentCallbacks2.TRIM_MEMORY_RUNNING_CRITICAL) {
// Clear the caches. Note all pending requests will be removed too.
final Context context = getApplicationContext();
try {
infoLog("Restart MpApp as for TrimMemory");
mHander.postDelayed(new Runnable() {
@Override
public void run() {
restartApp(context, MicroPhotoContext.PACKAGE_NAME_MPAPP, "TrimMemory");
}
}, 1000);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
@Override
public void onLowMemory() {
final Context context = getApplicationContext();
try {
Intent intent = new Intent(this, MainActivity.class);
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
PendingIntent pi = PendingIntent.getActivity(this,0, intent,0);
AlarmManager alarmManager=(AlarmManager)getSystemService(ALARM_SERVICE);
alarmManager.set(AlarmManager.RTC_WAKEUP,System.currentTimeMillis() + 5000, pi);
infoLog("Restart MpApp after 5s as for LowMemory");
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
@ -165,6 +217,30 @@ public class MicroPhotoService extends Service {
public void onCreate() {
super.onCreate();
try {
final String appPath = MicroPhotoContext.buildMpAppDir(this);
File lockerFile = new File(appPath);
lockerFile = new File(lockerFile, "data/alive/running");
mAppRunningFile = new FileOutputStream(lockerFile);
for (int idx = 0; idx < 3; idx++) {
try {
mAppLock = mAppRunningFile.getChannel().tryLock();
if (mAppLock != null && mAppLock.isValid()) {
break;
}
try {
Thread.sleep(16);
} catch (Exception ex) {
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
mHander = new ServiceHandler();
mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
@ -235,6 +311,18 @@ public class MicroPhotoService extends Service {
@Override
public void onDestroy() {
try {
if (mAppLock != null) {
mAppLock.close();
}
if (mAppRunningFile != null) {
mAppRunningFile.close();
}
} catch (Exception ex) {
ex.printStackTrace();
}
mStateService = STATE_SERVICE.NOT_CONNECTED;
Log.w(TAG, "MicroPhotoService::onDestroy called");
@ -346,27 +434,6 @@ public class MicroPhotoService extends Service {
Log.i(TAG, "PhotoTimer Fired: CH=" + channel + " PR=" + preset);
mService.notifyToTakePhoto(mService.mNativeHandle, channel, preset, ts, photoOrVideo);
}
File cameraAdbCfg = new File(MicroPhotoContext.buildMpAppDir(mService.getApplication()), "data/cameraAdb.cfg");
if (cameraAdbCfg.exists()) {
final String appPath = MicroPhotoContext.buildMpAppDir(context);
mService.mHander.postDelayed(new Runnable() {
@Override
public void run() {
final CameraAdb cameraAdb = new CameraAdb(context, appPath);
cameraAdb.setCallback(new Runnable() {
@Override
public void run() {
List<String> targetPaths = cameraAdb.getTargetPaths();
for (String targetPath : targetPaths) {
mService.sendExternalPhoto(mService.mNativeHandle, targetPath);
}
}
});
cameraAdb.takePhoto();
}
}, 10000 * cnt);
}
}
// Register Next Photo Timer
@ -397,7 +464,7 @@ public class MicroPhotoService extends Service {
int restart = intent.getIntExtra("restart", 0);
Log.i(TAG, "UPD CFG Fired ACTION=" + action + " restart=" + restart);
if (restart != 0) {
MicroPhotoService.restartApp(context, context.getPackageName());
MicroPhotoService.restartApp(context, context.getPackageName(), "Config Updated");
} else if (mService.mNativeHandle != 0) {
mService.reloadConfigs(mService.mNativeHandle);
}
@ -500,13 +567,16 @@ public class MicroPhotoService extends Service {
String path = intent.getStringExtra("path");
String md5 = intent.getStringExtra("md5");
} else if (TextUtils.equals(ACTION_GPS_TIMEOUT, action)) {
mService.mPreviousGpsTimer = null;
Log.i(TAG, action);
try {
mService.mLocationManager.removeUpdates(mService.mLocationListener);
Log.i(TAG, "After removeUpdates");
} catch (Exception ex) {
ex.printStackTrace();
}
mService.enableGps(false);
Log.i(TAG, "After disable GPS");
mService.mPreviousGpsTimer = null;
} else if (TextUtils.equals(ACTION_RESTART, action)) {
String reason = intent.getStringExtra("reason");
@ -516,7 +586,7 @@ public class MicroPhotoService extends Service {
} catch (Exception ex) {
ex.printStackTrace();
}
MicroPhotoService.restartApp(context.getApplicationContext(), MicroPhotoContext.PACKAGE_NAME_MPAPP);
MicroPhotoService.restartApp(context.getApplicationContext(), MicroPhotoContext.PACKAGE_NAME_MPAPP, "Restart Cmd");
}
}
}
@ -838,9 +908,18 @@ public class MicroPhotoService extends Service {
}
String tfCardPath = MicroPhotoContext.getSecondaryStoragePath(context);
String nativeLibraryDir = null;
ApplicationInfo applicationInfo = null;
try {
applicationInfo = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_SHARED_LIBRARY_FILES);
nativeLibraryDir = applicationInfo.nativeLibraryDir;
} catch (Exception ex) {
ex.printStackTrace();
}
service.mNativeHandle = init(appPath, server, port, cmdid, protocol, networkProtocol,
encryptData, 0, service.getSignalLevel(), versionCode,
BuildConfig.BUILD_TIMESTAMP, simcard, tfCardPath);
BuildConfig.BUILD_TIMESTAMP, simcard, tfCardPath, nativeLibraryDir);
if (service.mNativeHandle != 0) {
isRunning = true;
@ -870,6 +949,7 @@ public class MicroPhotoService extends Service {
}
};
Log.d(TAG, "Start Service from MicroPhotoService");
Thread th = new Thread(runnable);
th.start();
}
@ -1225,14 +1305,14 @@ public class MicroPhotoService extends Service {
return true;
}
public void reboot(final int rebootType, final long timeout) {
public void reboot(final int rebootType, final long timeout, final String reason) {
Runnable runnable = new Runnable() {
@Override
public void run() {
if (rebootType == 0) {
Context context = MicroPhotoService.this.getApplicationContext();
restartApp(context, context.getPackageName());
restartApp(context, context.getPackageName(), reason);
} else {
Log.w(TAG, "Recv REBOOT command");
@ -1243,11 +1323,14 @@ public class MicroPhotoService extends Service {
mHander.postDelayed(runnable, timeout > 0 ? timeout : 1000);
}
public static void restartApp(Context context, String packageName) {
public static void restartApp(Context context, String packageName, String reason) {
Intent intent = new Intent(context, MainActivity.class);
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
context.startActivity(intent);
@ -1301,6 +1384,60 @@ public class MicroPhotoService extends Service {
SysApi.enableGps(getApplicationContext(), enabled);
}
private int execHdrplus(int rotation, int frontCamera, String outputPath, String pathsWithSpace) {
ApplicationInfo applicationInfo = null;
Context context = getApplicationContext();
try {
applicationInfo = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_SHARED_LIBRARY_FILES);
} catch (Exception ex) {
}
String exeFilePath = applicationInfo.nativeLibraryDir + '/' + "libhdrp.so";
File hdrpFile = new File(exeFilePath);
if (!hdrpFile.exists()) {
return -1;
}
String cmd = exeFilePath + " " + Integer.toString(rotation) + " ";
cmd += Integer.toString(frontCamera) + " ";
cmd += outputPath + " " + pathsWithSpace;
String[] params = new String[]{""};
File workDir = context.getFilesDir();
int exitCode = 0;
try {
Process process = Runtime.getRuntime().exec(cmd, params, workDir.getAbsoluteFile());
// Intrinsics.checkNotNullExpressionValue(process, "process");
InputStream inputStream = process.getInputStream();
BufferedReader reader = new BufferedReader((Reader)(new InputStreamReader(inputStream)));
// StringBuilder stringBuilder = new StringBuilder();
while(true) {
String line = reader.readLine();
if (line == null) {
exitCode = process.exitValue();
reader.close();
process.destroy();
break;
}
if (line != null) {
// this.outputCallback.invoke(var5);
Log.d("HDRPlus", line);
// stringBuilder.append(line);
// stringBuilder.append("\r\n");
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
return exitCode;
}
/*
TelephonyManager telephonyManager = (TelephonyManager)this.getSystemService(Context.TELEPHONY_SERVICE);
// for example value of first element
@ -1311,7 +1448,7 @@ cellSignalStrengthGsm.getDbm();
protected native long init(String appPath, String ip, int port, String cmdid, int protocol,
int networkProtocl, int encryptData, long netHandle, int signalLevel,
int versionCode, long buildTime, String simcard, String tfCardPath);
int versionCode, long buildTime, String simcard, String tfCardPath, String nativeLibraryDir);
protected native long getHeartbeatDuration(long handler);
protected native long[] getPhotoTimeData(long handler, long startTime);
protected native long[] getPhotoTimeData2(long handler);
@ -1327,7 +1464,7 @@ cellSignalStrengthGsm.getDbm();
protected native void burstCaptureFinished(long handler, boolean result, int numberOfCaptures, String pathsJoinedByTab, boolean frontCamera, int rotation, long photoId);
public static native long takePhoto(int channel, int preset, boolean photoOrVideo, String configFilePath, String path);
public static native void releaseDeviceHandle(long deviceHandle);
public static native boolean sendExternalPhoto(long deviceHandle, String path);
public static native boolean sendExternalPhoto(long deviceHandle, String path, long photoInfo);
public static native void infoLog(String log);
public static native void setOtgState(boolean enabled);
@ -1417,6 +1554,58 @@ cellSignalStrengthGsm.getDbm();
}
};
public void callSystemCamera(final int cameraId, final long photoId) {
Context context = getApplicationContext();
/*
Intent intent = null;
if (cameraId == 1) {
intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra("android.intent.extras.CAMERA_FACING", android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
intent.putExtra("android.intent.extras.LENS_FACING_FRONT", 1);
intent.putExtra("android.intent.extra.USE_FRONT_CAMERA", true);
} else{
intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
}
String appPath = MicroPhotoContext.buildMpAppDir(context);
File targetPath = new File(new File(appPath), "tmp/" + Long.toString(photoId) + ".jpg");
// Uri uri = FileProvider.getUriForFile(this, getPackageName() + ".fileprovider", targetPath);
intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(targetPath));
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if (intent.resolveActivity(getPackageManager()) != null) {
startActivity(intent);
}
*/
final CameraAdb cameraAdb = new CameraAdb(context, MicroPhotoContext.buildMpAppDir(context));
cameraAdb.setCallback(new Runnable() {
@Override
public void run() {
List<String> targetPaths = cameraAdb.getTargetPaths();
if (targetPaths.isEmpty()) {
recordingFinished(mNativeHandle, true, false, null, photoId);
} else {
for (String targetPath : targetPaths) {
recordingFinished(mNativeHandle, true, true, targetPath, photoId);
}
}
}
});
cameraAdb.takePhoto(cameraId);
}
////////////////////////GPS////////////////////
private void setDefaultDataSubId(int subId) {

@ -8,6 +8,7 @@ import android.util.Log;
import com.dev.devapi.api.SysApi;
import com.xypower.common.FilesUtils;
import com.xypower.common.MicroPhotoContext;
import com.xypower.mpapp.BuildConfig;
import com.xypower.mpapp.MicroPhotoService;
import com.xypower.mpapp.v2.Camera2VideoActivity;
@ -78,19 +79,19 @@ public class CameraAdb {
mAdbKeyPair = AdbKeyPair.read(priKeyFile, pubKeyFile);
}
public void takePhoto() {
public void takePhoto(final int cameraId) {
new Thread(new Runnable() {
@Override
public void run() {
takePhotoImpl();
takePhotoImpl(cameraId);
}
}).start();
}
private void takePhotoImpl() {
private void takePhotoImpl(final int cameraId) {
long requestTime = System.currentTimeMillis() / 1000;
takePhoto(false);
takePhoto(cameraId == 1);
long takingTime = System.currentTimeMillis() / 1000;
sleep(1500);
String path = movePhoto(false, requestTime, takingTime);
@ -100,21 +101,6 @@ public class CameraAdb {
sleep(100);
SysApi.forceStopApp(mContext, "com.mediatek.camera");
sleep(1000);
requestTime = System.currentTimeMillis() / 1000;
takePhoto(true);
takingTime = System.currentTimeMillis() / 1000;
sleep(200);
SysApi.forceStopApp(mContext, "com.mediatek.camera");
sleep(250);
path = movePhoto(true, requestTime, takingTime);
if (!TextUtils.isEmpty(path)) {
mTargetPaths.add(path);
}
if (mRunnable != null) {
mRunnable.run();
}
@ -140,8 +126,12 @@ public class CameraAdb {
File targetFile = new File(new File(photoPath), photoFile);
try {
File srcFile = opFile.get();
res = srcFile.renameTo(targetFile);
if (BuildConfig.DEBUG) {
FilesUtils.copyFile(srcFile, targetFile);
res = true;
} else {
res = srcFile.renameTo(targetFile);
}
if (res) {
targetPath = targetFile.getAbsolutePath();
break;
@ -168,7 +158,7 @@ public class CameraAdb {
}
}
public void takePhoto(final boolean frontCamera) {
protected void takePhoto(final boolean frontCamera) {
Dadb adb = Dadb.create(mDeviceIp, 5555, mAdbKeyPair);
if (adb == null) {

@ -352,85 +352,6 @@
app:layout_constraintStart_toStartOf="@+id/btnCameraInfo"
app:layout_constraintTop_toTopOf="@+id/btnCameraInfo" />
<Button
android:id="@+id/simchange"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="8dp"
android:layout_marginTop="8dp"
android:text="sim卡获取"
android:visibility="gone"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btnStartServ" />
<Button
android:id="@+id/simchange2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="4dp"
android:layout_marginTop="8dp"
android:text="切换sim2"
android:visibility="gone"
app:layout_constraintStart_toEndOf="@+id/simchange"
app:layout_constraintTop_toBottomOf="@+id/btnStartServ" />
<Button
android:id="@+id/gps"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:visibility="gone"
android:text="gps" />
<Button
android:id="@+id/netgps"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:visibility="gone"
android:text="network_gps" />
<Button
android:id="@+id/tcpudp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:visibility="gone"
android:text="网络通信" />
<Button
android:id="@+id/tcpudpsend"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:visibility="gone"
android:text="心跳发送" />
<Button
android:id="@+id/video"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:visibility="gone"
android:text="录制视频" />
<Button
android:id="@+id/video2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:visibility="gone"
android:text="录制视频结束"
tools:layout_editor_absoluteX="426dp"
tools:layout_editor_absoluteY="380dp" />
<androidx.constraintlayout.helper.widget.Flow
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="8dp"
android:layout_marginTop="32dp"
app:constraint_referenced_ids="gps,netgps,tcpudp,tcpudpsend,video,video2"
app:flow_horizontalGap="20dp"
app:flow_wrapMode="chain"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toStartOf="@+id/logs"
app:layout_constraintTop_toBottomOf="@+id/btnSendHb" />
<androidx.constraintlayout.widget.Barrier
android:id="@+id/leftBarrier"
android:layout_width="wrap_content"
@ -459,13 +380,4 @@
app:layout_constraintStart_toEndOf="@+id/leftBarrier"
app:layout_constraintTop_toTopOf="parent" />
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="411dp"
android:layout_height="441dp"
android:layout_marginStart="2dp"
android:visibility="gone"
app:layout_constraintStart_toStartOf="parent"
tools:layout_editor_absoluteY="288dp" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -343,53 +343,6 @@
app:layout_constraintStart_toStartOf="@+id/btnCameraInfo"
app:layout_constraintTop_toTopOf="@+id/btnCameraInfo" />
<Button
android:id="@+id/gps"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="gps" />
<Button
android:id="@+id/netgps"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="network_gps" />
<Button
android:id="@+id/tcpudp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="网络通信" />
<Button
android:id="@+id/tcpudpsend"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="心跳发送" />
<Button
android:id="@+id/video"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="录制视频" />
<Button
android:id="@+id/video2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="录制视频结束" />
<androidx.constraintlayout.helper.widget.Flow
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginStart="16dp"
android:layout_marginTop="48dp"
app:constraint_referenced_ids="gps,netgps,tcpudp,tcpudpsend,video,video2"
app:flow_horizontalGap="20dp"
app:flow_wrapMode="chain"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/btnTakePhoto4" />
<androidx.constraintlayout.widget.Barrier
android:id="@+id/leftBarrier"
android:layout_width="wrap_content"
@ -416,13 +369,6 @@
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto4"
app:layout_constraintTop_toTopOf="parent" />
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="411dp"
android:layout_height="441dp"
android:layout_marginStart="2dp"
android:visibility="gone"
app:layout_constraintStart_toStartOf="parent"
tools:layout_editor_absoluteY="288dp" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -362,7 +362,7 @@ public class MicroPhotoContext {
}
}
public static void restartMpApp(Context context) {
public static void restartMpApp(Context context, String reason) {
/*
Context context = MicroPhotoService.this.getApplicationContext();
Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName());
@ -376,10 +376,10 @@ public class MicroPhotoContext {
*/
restartApp(context, PACKAGE_NAME_MPAPP);
restartApp(context, PACKAGE_NAME_MPAPP, reason);
}
public static void restartApp(Context context, String packageName) {
public static void restartApp(Context context, String packageName, String reason) {
/*
Context context = MicroPhotoService.this.getApplicationContext();
Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName());
@ -398,6 +398,9 @@ public class MicroPhotoContext {
Intent intent = new Intent(ACTION_RESTART_MP);
intent.putExtra("noDelay", 1);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.setPackage(PACKAGE_NAME_MPAPP);
context.sendBroadcast(intent);
@ -407,6 +410,9 @@ public class MicroPhotoContext {
Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
if (intent != null) {
intent.putExtra("noDelay", 1);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
context.startActivity(intent);
}

@ -26,6 +26,7 @@ COMPILE_MIN_SDK_VERSION=28
opencvsdk=D:/Workspace/deps/opencv-mobile-4.9.0-android
coreroot=D:/Workspace/Github/xymp/xymp/Core
hdrplusroot=D:/Workspace/deps/hdrplus_libs
halideroot=D:/Workspace/deps/Halide/18.0.0
ncnnroot=D:/Workspace/deps/ncnn-20240410-android-vulkan
# ncnnroot=D:/Workspace/deps/ncnn-20230517-android-vulkan
libzipsdkroot=D:/Workspace/deps/libzip-android-sdk

@ -1 +1 @@
{"bsManufacturer":"\u4e0a\u6d77\u6b23\u5f71\u7535\u529b\u79d1\u6280\u80a1\u4efd\u6709\u9650\u516c\u53f8","encryption":0,"equipName":"\u56fe\u50cf\u5728\u7ebf\u76d1\u6d4b","heartbeat":10,"imgQuality":0,"model":"MSRDT-1-WP","network":0,"networkProtocol":0,"packetBase":1,"packetSize":32768,"port":6891,"postDataPaused":0,"productionDate":1717200000,"protocol":65298,"quality":80,"reportFault":0,"server":"61.169.135.146","timeForKeepingLogs":1296000,"timeForKeepingPhotos":1296000,"upgradePacketBase":1,"workStatusTimes":3}
{"bsManufacturer":"\u4e0a\u6d77\u6b23\u5f71\u7535\u529b\u79d1\u6280\u80a1\u4efd\u6709\u9650\u516c\u53f8","channels":3,"encryption":0,"equipName":"\u56fe\u50cf\u5728\u7ebf\u76d1\u6d4b","heartbeat":10,"imgQuality":80,"model":"MSRDT-1-WP","network":0,"networkProtocol":0,"outputDbgInfo":0,"packetBase":1,"packetSize":32768,"port":6891,"postDataPaused":0,"productionDate":1717200000,"protocol":65298,"quality":80,"reportFault":0,"server":"61.169.135.146","timeForKeepingLogs":1296000,"timeForKeepingPhotos":1296000,"upgradePacketBase":1,"workStatusTimes":3}

@ -1 +1 @@
{"autoExposure":1,"autoFocus":1,"compensation":0,"exposureTime":0,"ldrEnabled":0,"orientation":0,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"quality":80,"recognization":2,"resolutionCX":5376,"resolutionCY":3024,"sceneMode":0,"sensitivity":0,"usbCamera":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"zoom":0,"zoomRatio":1}
{"autoExposure":1,"autoFocus":1,"awbMode":1,"compensation":0,"exposureTime":0,"ldrEnabled":0,"orientation":0,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"quality":80,"recognization":2,"requestTemplate":2,"resolutionCX":5376,"resolutionCY":3024,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +1 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"compensation":0,"exposureTime":0,"ldrEnabled":0,"orientation":3,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"quality":80,"recognization":2,"requestTemplate":1,"resolutionCX":1920,"resolutionCY":1080,"sceneMode":0,"sensitivity":0,"usbCamera":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}
{"autoExposure":1,"autoFocus":1,"awbMode":1,"burstCaptures":4,"compensation":0,"exposureTime":0,"ldrEnabled":0,"orientation":3,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"quality":80,"recognization":2,"requestTemplate":2,"resolutionCX":1920,"resolutionCY":1080,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":2,"usingSysCamera":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +1 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"compensation":0,"ldrEnabled":0,"orientation":4,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"recognization":2,"requestTemplate":1,"resolutionCX":3840,"resolutionCY":2160,"sceneMode":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0}
{"autoExposure":1,"autoFocus":1,"awbMode":1,"compensation":0,"ldrEnabled":0,"orientation":4,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%% \u7248\u672c:%%V%%\r\n\u4fe1\u53f7:%%SL%% \u7535\u6c60\u7535\u538b:%%BV%% \u7535\u6c60\u7535\u91cf:%%BP%% \u5145\u7535\u7535\u538b:%%CV%%"},"recognization":2,"requestTemplate":1,"resolutionCX":3264,"resolutionCY":2448,"sceneMode":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0}

@ -1 +1 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":1,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -900,7 +900,7 @@ public class AppMaster {
String packageName = jsonObject.optString("packageName", null);
if (packageName != null) {
MicroPhotoContext.restartApp(mService.getApplicationContext(), packageName);
MicroPhotoContext.restartApp(mService.getApplicationContext(), packageName, "Config Updated");
}
}
} catch (Exception ex) {
@ -952,7 +952,7 @@ public class AppMaster {
} catch (Exception ex) {
}
MicroPhotoContext.restartMpApp(context);
MicroPhotoContext.restartMpApp(context, "CMA Updated");
}
});
@ -978,7 +978,7 @@ public class AppMaster {
MicroPhotoContext.saveMpAppConfig(context, appConfig);
MicroPhotoContext.restartMpApp(mService.getApplicationContext());
MicroPhotoContext.restartMpApp(mService.getApplicationContext(), "HB Duration Updated");
return true;
}

@ -130,7 +130,7 @@ public class MainActivity extends AppCompatActivity {
}
break;
case R.id.action_reboot_mp:{
MicroPhotoContext.restartMpApp(getApplicationContext());
MicroPhotoContext.restartMpApp(getApplicationContext(), "Manual Restart from MpMst");
}
break;
case R.id.action_reboot_mpmst:{

@ -45,11 +45,14 @@ import org.json.JSONObject;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.lang.reflect.Method;
import java.nio.channels.FileLock;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.logging.Logger;
@ -141,6 +144,20 @@ public class MpMasterService extends Service {
public MpMasterService() {
}
@Override
public void onLowMemory() {
final Context context = getApplicationContext();
try {
Intent intent = new Intent(this, MainActivity.class);
PendingIntent pi = PendingIntent.getActivity(this,0, intent,0);
AlarmManager alarmManager=(AlarmManager)getSystemService(ALARM_SERVICE);
alarmManager.set(AlarmManager.RTC_WAKEUP,System.currentTimeMillis() + 5000, pi);
logger.info("Restart MpApp after 5s as for LowMemory");
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
@ -373,10 +390,55 @@ public class MpMasterService extends Service {
final Context context = getApplicationContext();
long ts = System.currentTimeMillis();
FileOutputStream runningFile = null;
FileLock fileLock = null;
try {
boolean isMpAppRunning = true;
String mpappDir = MicroPhotoContext.buildMpAppDir(getApplicationContext());
File file = new File(mpappDir);
file = new File(file, "data/alive/running");
if (file.exists()) {
runningFile = new FileOutputStream(file);
fileLock = runningFile.getChannel().tryLock();
if (fileLock != null && fileLock.isValid()) {
isMpAppRunning = false;
}
try {
if (fileLock != null) {
fileLock.close();
fileLock = null;
}
} catch (Exception ex) {
ex.printStackTrace();
}
FilesUtils.closeFriendly(runningFile);
} else {
isMpAppRunning = false;
}
if (!isMpAppRunning) {
// Restart MpApp
MicroPhotoContext.restartMpApp(context, "MpMST Keep Alive Detection: NO Lock");
logger.warning("Restart MpAPP as There is NO Lock");
return;
}
} catch (Exception ex) {
ex.printStackTrace();
} finally {
try {
if (fileLock != null) {
fileLock.close();
}
} catch (Exception ex) {
ex.printStackTrace();
}
FilesUtils.closeFriendly(runningFile);
}
if (mPreviousMpHbTime <= ts && ts - mPreviousMpHbTime > mMpHeartbeatDuration * 2) {
// MpApp is not running
if (ts - mTimeToStartMpApp >= 30000) {
MicroPhotoContext.restartMpApp(context);
MicroPhotoContext.restartMpApp(context, "MpMST Keep Alive Detection");
mTimeToStartMpApp = ts;
logger.warning("Restart MpAPP as it is NOT Running Prev MPAPP HB=" +
Long.toString((ts - mPreviousMpHbTime) / 1000) + " MPAPP HBDuration=" + Long.toString(mMpHeartbeatDuration));
@ -397,19 +459,23 @@ public class MpMasterService extends Service {
((ts - modifiedTimeOfPhoto) > mTimeOfMpAppAlive * 4) ||
((ts - modifiedTimeOfUpload) > mTimeOfMpAppAlive * 4)) {
String msg = "Restart MpAPP as it is NOT Running hb=" + Long.toString(ts - modifiedTimeOfHb) +
" taking=" + Long.toString(ts - modifiedTimeOfPhoto) + " sending=" + Long.toString(ts - modifiedTimeOfUpload) +
" Will restart MpApp in " + Long.toString(mDelayedRestartMpTime / 1000) + " seconds";
logger.warning(msg);
if (ts - mTimeToStartMpApp >= 30000) {
String msg = "Restart MpAPP as it is NOT Running hb=" + Long.toString(ts - modifiedTimeOfHb) +
" taking=" + Long.toString(ts - modifiedTimeOfPhoto) + " sending=" + Long.toString(ts - modifiedTimeOfUpload) +
" Will restart MpApp in " + Long.toString(mDelayedRestartMpTime / 1000) + " seconds";
logger.warning(msg);
AlarmManager alarmManager = (AlarmManager) getSystemService(ALARM_SERVICE);
Intent alarmIntent = new Intent();
alarmIntent.setAction(ACTION_MP_RESTART);
alarmIntent.putExtra("reason", msg);
AlarmManager alarmManager = (AlarmManager) getSystemService(ALARM_SERVICE);
Intent alarmIntent = new Intent();
alarmIntent.setAction(ACTION_MP_RESTART);
alarmIntent.putExtra("reason", msg);
PendingIntent pendingIntent = PendingIntent.getBroadcast(this, 0, alarmIntent, PendingIntent.FLAG_UPDATE_CURRENT);
PendingIntent pendingIntent = PendingIntent.getBroadcast(this, 0, alarmIntent, PendingIntent.FLAG_UPDATE_CURRENT);
alarmManager.setExactAndAllowWhileIdle(AlarmManager.RTC_WAKEUP, ts + mDelayedRestartMpTime, pendingIntent);
alarmManager.setExactAndAllowWhileIdle(AlarmManager.RTC_WAKEUP, ts + mDelayedRestartMpTime, pendingIntent);
mTimeToStartMpApp = ts;
}
}
} catch (Exception ex) {
ex.printStackTrace();
@ -562,7 +628,8 @@ public class MpMasterService extends Service {
if (intent.hasExtra("HeartbeatDuration")) {
mService.mMpHeartbeatDuration = intent.getIntExtra("HeartbeatDuration", 600000);
int hbDuration = intent.getIntExtra("HeartbeatDuration", 600000);
mService.mMpHeartbeatDuration = hbDuration > 0 ? hbDuration : 600000;
}
mService.mPreviousMpHbTime = intent.getLongExtra("HeartbeatTime", System.currentTimeMillis());
mService.logger.info("Heartbeat Timer Fired By MpAPP ACTION=" + action + " MpHB=" + Long.toString(mService.mMpHeartbeatDuration));
@ -578,7 +645,7 @@ public class MpMasterService extends Service {
int restart = intent.getIntExtra("restart", 0);
mService.logger.info("Update Config Fired ACTION=" + action + " restart=" + restart);
if (restart != 0) {
MicroPhotoContext.restartApp(context, context.getPackageName());
MicroPhotoContext.restartApp(context, context.getPackageName(), "Config Updated");
} else {
mService.loadConfig();
mService.registerHeartbeatTimer();
@ -1023,7 +1090,7 @@ public class MpMasterService extends Service {
th.start();
}
public void reboot(final int rebootType) {
public void reboot(final int rebootType, String reason) {
Runnable runnable = new Runnable() {
@Override
@ -1031,7 +1098,7 @@ public class MpMasterService extends Service {
if (rebootType == 0) {
logger.warning("Recv REBOOT MpMst APP cmd");
Context context = MpMasterService.this.getApplicationContext();
MicroPhotoContext.restartApp(context, context.getPackageName());
MicroPhotoContext.restartApp(context, context.getPackageName(), reason);
} else {
logger.warning("Recv RESET cmd");
@ -1167,7 +1234,7 @@ public class MpMasterService extends Service {
}
copyAssetsDir(context, "mpapp", destPath);
MicroPhotoContext.restartMpApp(context);
MicroPhotoContext.restartMpApp(context, "FIRST Config Init");
}
};
@ -1187,6 +1254,12 @@ public class MpMasterService extends Service {
}
}
try {
destPathFile.mkdirs();
} catch (Exception ex) {
ex.printStackTrace();
}
copyAssetsDir(context, "mpmst", destPath);
return true;
}

@ -31,7 +31,7 @@ public class UpdateReceiver extends BroadcastReceiver {
MpMasterService.resetVersions(context);
if (packageName.equals("package:" + targetPackageName)) {
// SysApi.enableApp(context, targetPackageName);
restartAPP(context, targetPackageName);
restartAPP(context, targetPackageName, "App Upgraded");
}
} else if (action.equals(Intent.ACTION_PACKAGE_ADDED)) {// Install broadcast
Log.e(TAG, "onReceive:Installed and Start the App:" + targetPackageName);
@ -39,7 +39,7 @@ public class UpdateReceiver extends BroadcastReceiver {
if (packageName.equals("package:" + targetPackageName)) {
/*SystemUtil.reBootDevice();*/
// SysApi.enableApp(context, targetPackageName);
restartAPP(context, targetPackageName);
restartAPP(context, targetPackageName, "App Installed");
}
} else if (action.equals(Intent.ACTION_PACKAGE_REMOVED)) { // Uninstall
// Logger.e(TAG, "onReceive:uninstall" + packageName);
@ -52,7 +52,7 @@ public class UpdateReceiver extends BroadcastReceiver {
MpMasterService.resetVersions(context);
if (packageName.equals("package:" + targetPackageName)) {
// SysApi.enableApp(context, targetPackageName);
tryToRestartApp(context, targetPackageName);
tryToRestartApp(context, targetPackageName, "App Upgraded");
}
} else if (action.equals(Intent.ACTION_PACKAGE_ADDED)) {// Install broadcast
Log.e(TAG, "onReceive:Installed and Start the App:" + targetPackageName);
@ -60,36 +60,39 @@ public class UpdateReceiver extends BroadcastReceiver {
if (packageName.equals("package:" + targetPackageName)) {
/*SystemUtil.reBootDevice();*/
// SysApi.enableApp(context, targetPackageName);
tryToRestartApp(context, targetPackageName);
tryToRestartApp(context, targetPackageName, "App Installed");
}
} else if (action.equals(Intent.ACTION_PACKAGE_REMOVED)) { // Uninstall
// Logger.e(TAG, "onReceive:uninstall" + packageName);
}
}
private void tryToRestartApp(final Context context, final String targetPackageName) {
private void tryToRestartApp(final Context context, final String targetPackageName, String reason) {
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (TextUtils.equals(targetPackageName, APP_PACKAGE_MPAPP)) {
startMpApp(context);
startMpApp(context, reason);
} else {
restartAPP(context, targetPackageName);
restartAPP(context, targetPackageName, reason);
}
}
}, 10000);
}
public static void restartAPP(Context context, String packageName) {
public static void restartAPP(Context context, String packageName, String reason) {
Intent intent = context.getPackageManager()
.getLaunchIntentForPackage(packageName);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
context.startActivity(intent);
// ActManager.getAppManager().finishAllActivity();
}
public void startMpApp(final Context context) {
private void startMpApp(final Context context, String reason) {
try {
if (MicroPhotoContext.isAppAlive(context, MicroPhotoContext.PACKAGE_NAME_MPAPP)) {
@ -107,7 +110,7 @@ public class UpdateReceiver extends BroadcastReceiver {
if ((ts - modifiedTimeOfDb) > 12 * 1000) {
// greater than 12 seconds
// logger.warning("Start MpAPP as it is NOT running");
MicroPhotoContext.restartMpApp(context);
MicroPhotoContext.restartMpApp(context, reason);
}
} catch (Exception ex) {

@ -777,7 +777,7 @@ public class SimUtil {
}
if (rebootMpApp != 0) {
MicroPhotoContext.restartMpApp(context);
MicroPhotoContext.restartMpApp(context, "Config Updated From SMS");
} else {
Intent intent = new Intent();
intent.setAction(MicroPhotoContext.ACTION_UPDATE_CONFIGS_MP);

Loading…
Cancel
Save