jxjajs 3 months ago
commit e1721d52d7

@ -5,7 +5,7 @@ plugins {
// 10,00,000 major-minor-build
def AppMajorVersion = 1
def AppMinorVersion = 3
def AppBuildNumber = 39
def AppBuildNumber = 62
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -86,6 +86,7 @@
<intent>
<action android:name="android.media.action.STILL_IMAGE_CAMERA" />
</intent>
<package android:name="com.xypower.mplive" />
</queries>
<application

@ -21,7 +21,7 @@ if(ANDROID_ABI STREQUAL "armeabi-v7a")
add_definitions(-DUSING_N938)
elseif(ANDROID_ABI STREQUAL "arm64-v8a")
# add_definitions(-DUSING_N938)
#add_definitions(-DUSING_PLZ)
add_definitions(-DUSING_PLZ)
endif()
# OUTPUT_DBG_INFO:
@ -31,7 +31,7 @@ add_definitions(-DOUTPUT_DBG_INFO)
add_definitions(-DOUTPUT_SOCKET_DBG_INFO)
# OUTPUT_DB_DBG_INFO Depends ON OUTPUT_DBG_INFO
# Database.cpp
add_definitions(-DOUTPUT_DB_DBG_INFO)
# add_definitions(-DOUTPUT_DB_DBG_INFO)
IF (CMAKE_BUILD_TYPE STREQUAL Debug)
ADD_DEFINITIONS(-D_DEBUG)
@ -146,6 +146,16 @@ include_directories(hdrplus2/${ANDROID_ABI})
include_directories(${HALIDE_ROOT}/${ANDROID_ABI}/include)
SET(ZLMEDIAKIT_LIBS "")
SET(STREAMING_SRCS "")
add_definitions(-DDISABLE_RTTI)
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLMediaKit )
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLToolKit/src/ )
# SET(ZLMEDIAKIT_LIBS ${ZLMEDIAKIT_LIBS} zlmediakit zltoolkit)
SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp )
SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
@ -175,9 +185,6 @@ SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp)
SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include)
SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
SET(BREAKPAD_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2)
@ -190,17 +197,10 @@ include_directories(${YAMC_INC_DIR})
include_directories(${BREAKPAD_ROOT} ${BREAKPAD_ROOT}/common/android/include)
include_directories(${ASIO_ROOT}/include)
add_library( # Sets the name of the library.
sqlite3
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${SQLITE_SRC_DIR}/sqlite3.c
)
INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR})
# SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
# SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
# add_library(sqlite3 STATIC ${SQLITE_SRC_DIR}/sqlite3.c )
# INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR})
file(GLOB BREAKPAD_SOURCES_COMMON
native-lib.cpp
@ -340,10 +340,8 @@ include_directories(${TERM_CORE_ROOT})
add_library( # Sets the name of the library.
jsoncpp
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${JSONCPP_SOURCES}
)
@ -399,6 +397,8 @@ add_library( # Sets the name of the library.
netcamera/httpclient.cpp
${STREAMING_SRCS}
#serial/WeatherComm.cpp
# camera2/OpenCVFont.cpp
@ -467,20 +467,15 @@ find_library( # Sets the name of the path variable.
target_link_libraries( # Specifies the target library.
${PROJECT_NAME}
jsoncpp
freetype
# breakpad
# Links the target library to the log library
# included in the NDK.
avcodec avfilter avformat avutil swresample swscale x264
${log-lib}
android camera2ndk mediandk z curl
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED}
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED} ${ZLMEDIAKIT_LIBS}
)
# set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all")

@ -14,7 +14,6 @@
#include <climits>
#include "GPIOControl.h"
#include <LogThread.h>
#ifdef _DEBUG
#include <AndroidHelper.h>
@ -62,7 +61,7 @@ size_t GpioControl::turnOnImpl(const IOT_PARAM& param)
SetCamerastatus(param.cmd, true);
}
if (oldRef == 0)
if (oldRef == 0 || param.cmd != CMD_SET_3V3_PWR_EN)
{
fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
@ -210,6 +209,10 @@ size_t GpioControl::TurnOn(const std::vector<int>& cmds)
m_locker.lock();
for (it = cmds.cbegin(); it != cmds.cend(); ++it)
{
if (*it == 0)
{
continue;
}
param.cmd = *it;
turnOnImpl(param);
}

@ -13,6 +13,7 @@
#include <utility>
#include <SemaphoreEx.h>
#include <LogThread.h>
#ifndef USING_N938
@ -50,21 +51,25 @@
#define CMD_SET_3V3_PWR_EN 132
#endif
#define CMD_GET_CAMERA_STATUS 310
#define CMD_SET_INIT_STATUS 401
#else // defined(USING_PLZ)
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
#define CMD_SET_SPI_POWER 129
#define CMD_SET_MADA_MOVE_STATUS 311
#define CMD_SET_12V_EN_STATE 0 // TO BE ADDED
#define CMD_SET_SYSTEM_RESET 202
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
// #define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
// #define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 117
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 112
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
//#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 117
//#define CMD_GET_BAT_BUS_VOLTAGE_STATE 112
#define CMD_SET_SPI_MODE 0 // TO BE ADDED
#define CMD_SET_SPI_BITS_PER_WORD 0 // TO BE ADDED
#define CMD_SET_SPI_MAXSPEEDHZ 0 // TO BE ADDED
@ -98,6 +103,9 @@
#define CMD_SET_LIGHT1_RESISTOR_ENABLE 524
#define CMD_SET_100M_RESET 526
#define CMD_GET_CAMERA_STATUS 310
#define CMD_SET_INIT_STATUS 401
#endif // USING_PLZ
#else // defined(USING_N938)
@ -501,14 +509,50 @@ public:
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, int cmd7, int cmd8, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(8, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
m_cmds[6] = cmd7;
m_cmds[7] = cmd8;
TurnOn();
}
virtual ~PowerControl()
{
GpioControl::TurnOff(m_cmds, m_delayCloseTime);
#ifdef OUTPUT_DBG_INFO
std::string status = GetStatus();
XYLOG(XYLOG_SEVERITY_INFO, "PWR After TurnOff %s", status.c_str());
#endif
}
std::string GetStatus()
{
std::string result;
for (auto it = m_cmds.cbegin(); it != m_cmds.cend(); ++it)
{
if (*it == 0)
{
continue;
}
result += std::to_string(*it) + "=" + std::to_string(GpioControl::getInt(*it)) + " ";
}
return result;
}
protected:
void TurnOn()
{
#ifdef OUTPUT_DBG_INFO
std::string status = GetStatus();
XYLOG(XYLOG_SEVERITY_INFO, "PWR Before TurnOn %s", status.c_str());
#endif
GpioControl::TurnOn(m_cmds);
}
@ -545,7 +589,7 @@ public:
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PLZ
// MicroPhoto
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime)
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_OTG_STATE, closeDelayTime)
#endif // USING_PLZ
#endif // USING_N938
{
@ -560,7 +604,7 @@ public:
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_485_ENABLE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime)
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_5V_PWR_ENABLE, CMD_SET_OTG_STATE, CMD_SET_485_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_12V_EN_STATE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
#else // USING_PLZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PLZ
@ -577,9 +621,10 @@ public:
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, closeDelayTime)
#else // USING_N938
#ifdef USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_5V_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
#else // USING_PLZ
PowerControl(CMD_SET_OTG_STATE, closeDelayTime)
// Micro Photo
PowerControl(CMD_SET_OTG_STATE, CMD_SET_485_EN_STATE/* Only for wp6*/, closeDelayTime)
#endif // USING_PLZ
#endif // USING_N938
{

@ -23,6 +23,10 @@
#include "client/linux/handler/minidump_descriptor.h"
#endif
#ifdef USING_MQTT
#include <mosquitto.h>
#endif
#include <android/native_window.h>
#include <android/native_window_jni.h>
@ -227,9 +231,22 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
curl_global_init(CURL_GLOBAL_ALL);
#ifdef USING_MQTT
mosquitto_lib_init();
#endif
return result;
}
JNIEXPORT void JNICALL JNI_OnUnload(JavaVM* vm, void* reserved)
{
#ifdef USING_MQTT
mosquitto_lib_cleanup();
#endif
curl_global_cleanup();
}
bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread)
{
didAttachThread = false;
@ -1362,7 +1379,6 @@ Java_com_xypower_mpapp_MicroPhotoService_updateEhernet(
return JNI_TRUE;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_updateActiveNetwork(
JNIEnv* env, jobject pThis, jlong handle, jlong networkHandle, jboolean available) {
@ -1382,3 +1398,26 @@ Java_com_xypower_mpapp_MicroPhotoService_updateActiveNetwork(
return JNI_TRUE;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_xypower_mpapp_MicroPhotoService_requestPowerControl(
JNIEnv* env, jclass cls, jint type) {
if (type == 1) // Net
{
NetCameraPowerCtrl* powerControl = new NetCameraPowerCtrl(2);
return reinterpret_cast<jlong>(powerControl);
}
return 0L;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_releasePowerControl(
JNIEnv* env, jclass cls, jlong powerControlHandle) {
PowerControl* powerControl = reinterpret_cast<PowerControl*>(powerControlHandle);
delete powerControl;
return JNI_TRUE;
}

@ -10,6 +10,8 @@
#include "PositionHelper.h"
#include "DngCreator.h"
#include "media/RTSPRecorder.h"
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
@ -1156,16 +1158,11 @@ bool CPhoneDevice::Reboot(int resetType, bool manually, const std::string& reaso
return false;
}
}
std::thread t([manually, timeout]()
{
XYLOG(XYLOG_SEVERITY_WARNING, "Recv REBOOT command Manually=%d", manually ? 1 : 0);
std::this_thread::sleep_for(std::chrono::milliseconds(timeout));
if (manually)
{
GpioControl::reboot();
// GpioControl::reboot();
RestartApp(resetType, timeout, reason);
}
});
t.detach();
}
else
{
@ -1346,15 +1343,12 @@ void CPhoneDevice::handleRebootTimer(union sigval v)
}
#endif
CPhoneDevice* pDevice = (CPhoneDevice*)(v.sival_ptr);
const IDevice::PHOTO_INFO& photoInfo = pDevice->mPhotoInfo;
// Reboot APP
XYLOG(XYLOG_SEVERITY_ERROR, "Camera Close Thread is DEAD, will RESTART app");
pDevice->RestartApp(REBOOT_TYPE_APP, 30000, "Camera Can't Close");
XYLOG(XYLOG_SEVERITY_ERROR, "Camera Close Thread is DEAD, will RESTART app CH=%u PR=%X", photoInfo.channel, photoInfo.preset);
pDevice->RestartApp(REBOOT_TYPE_APP, 30000, "Camera Cant Close");
}
// void CPhoneDevice::handleRebootTimerImpl()
// {
// }
IDevice::timer_uid_t CPhoneDevice::RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times/* = 0*/)
{
struct sigevent evp = { 0 };
@ -1533,7 +1527,9 @@ bool CPhoneDevice::TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
}
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON");
std::shared_ptr<PowerControl> ethernetPowerCtrl = std::make_shared<EthernetPowerCtrl>(1);
uint32_t netWaitTime = (localPhotoInfo.cameraType == CAM_TYPE_PLZ) ? 20 : 4;
std::shared_ptr<PowerControl> ethernetPowerCtrl = std::make_shared<EthernetPowerCtrl>(netWaitTime);
net_handle_t netHandle = GetEthnetHandle();
if (netHandle == 0)
@ -1554,16 +1550,30 @@ bool CPhoneDevice::TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
if (netHandle == 0)
{
// timeout
XYLOG(XYLOG_SEVERITY_ERROR, "Ethernet not existing CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
std::string pwrStatus = powerCtrlPtr->GetStatus();
pwrStatus += ethernetPowerCtrl->GetStatus();
XYLOG(XYLOG_SEVERITY_ERROR, "Ethernet Not Existing CH=%u PR=%X PHOTOID=%u PWR:%s",
(uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId, pwrStatus.c_str());
TakePhotoCb(0, localPhotoInfo, "", 0);
return false;
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "Ethernet is Available CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
unsigned int ip = 0;
unsigned int netMask = 0;
unsigned int gateway = 0;
char buf[32] = { 0 };
if (GetNetInfo("eth0", ip, netMask, gateway))
{
// const
sockaddr_in addrIn = { AF_INET, 0, ip};
inet_ntop(AF_INET, &addrIn.sin_addr, buf, sizeof(buf)); //其中recvAddr为SOCKADDR_IN类型
}
SetStaticIp();
XYLOG(XYLOG_SEVERITY_INFO, "Ethernet is Available Handle=%ld IP=%s CH=%u PR=%X PHOTOID=%u", (uint64_t)netHandle, buf, (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
}
// SetStaticIp();
std::this_thread::sleep_for(std::chrono::milliseconds(256));
NET_PHOTO_INFO netPhotoInfo = { netHandle, 0 };
@ -1620,7 +1630,7 @@ bool CPhoneDevice::TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
std::vector<uint8_t> img;
bool netCaptureResult = false;
for (int idx = 0; idx < 3; idx++)
for (int idx = 0; idx < 64; idx++)
{
netHandle = GetEthnetHandle();
netPhotoInfo.netHandle = netHandle;
@ -1634,7 +1644,7 @@ bool CPhoneDevice::TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
img.clear();
netCaptureResult = requestCapture(localPhotoInfo.channel, localPhotoInfo.preset, netPhotoInfo, img);
if (netCaptureResult)
if (netCaptureResult && !img.empty())
{
XYLOG(XYLOG_SEVERITY_INFO, "NetCapture Succeeded PHOTOID=%u Img Size=%u", localPhotoInfo.photoId, (uint32_t)img.size());
break;
@ -1686,6 +1696,147 @@ bool CPhoneDevice::TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, c
return true;
}
bool CPhoneDevice::TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr)
{
// AutoEnv autoEnv(pThis->m_vm);
time_t ts = time(NULL);
uint32_t waitTime = localPhotoInfo.selfTestingTime;
if(!GpioControl::GetSelftestStatus(waitTime))
{
m_isSelfTesting.store(true);
waitTime = (waitTime != 0) ? (waitTime * 1024) : 10240;
std::this_thread::sleep_for(std::chrono::milliseconds(waitTime));
m_isSelfTesting.store(false);
}
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power ON");
// std::shared_ptr<PowerControl> ethernetPowerCtrl = std::make_shared<EthernetPowerCtrl>(1);
std::shared_ptr<PowerControl> ethernetPowerCtrl;
net_handle_t netHandle = GetEthnetHandle();
if (netHandle == 0)
{
// Wait about 10s
for (int idx = 0; idx < 84; idx++)
{
std::this_thread::sleep_for(std::chrono::milliseconds(128));
netHandle = GetEthnetHandle();
if (netHandle != 0)
{
break;
}
}
}
if (netHandle == 0)
{
// timeout
XYLOG(XYLOG_SEVERITY_ERROR, "Ethernet not existing CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
#ifdef NDEBUG
TakePhotoCb(0, localPhotoInfo, "", 0);
return false;
#endif
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "Ethernet is Available CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset, localPhotoInfo.photoId);
}
// SetStaticIp();
std::this_thread::sleep_for(std::chrono::milliseconds(256));
NET_PHOTO_INFO netPhotoInfo = { netHandle, 0 };
if (localPhotoInfo.vendor == 1)
{
// Hai Kang
netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/ISAPI/Streaming/channels/1/picture?");
}
else if (localPhotoInfo.vendor == 2)
{
// Hang Yu
strcpy(netPhotoInfo.url, "/cgi-bin/snapshot.cgi");
}
else if (localPhotoInfo.vendor == 3)
{
// Yu Shi
netPhotoInfo.authType = HTTP_AUTH_TYPE_DIGEST;
int streamSid = 0; // should put into config
// rtsp://192.168.0.13:554/media/video1
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/media/video%u", (uint32_t)localPhotoInfo.cameraId);
// strcpy(netPhotoInfo.url, "rtsp://192.168.50.224/live/0");
}
else if (localPhotoInfo.vendor == 5)
{
// Hang Yu - New
netPhotoInfo.authType = HTTP_AUTH_TYPE_BASIC;
// http://192.168.1.46/Snapshot/%u/RemoteImageCapture?ImageFormat=2&HorizontalPixel=1920&VerticalPixel=1080
// http://192.168.1.101/Snapshot/1/2/RemoteImageCaptureV2?ImageFormat=jpg
// http://192.168.1.101/Snapshot/1/1/RemoteImageCaptureV2?ImageFormat=jpg
snprintf(netPhotoInfo.url, sizeof(netPhotoInfo.url), "/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", (uint32_t)localPhotoInfo.cameraId);
}
else
{
XYLOG(XYLOG_SEVERITY_ERROR, "Vendor(%u) not Supported CH=%u PR=%X PHOTOID=%u", (uint32_t)localPhotoInfo.vendor, (uint32_t)localPhotoInfo.channel, (unsigned int)localPhotoInfo.preset, localPhotoInfo.photoId);
TakePhotoCb(0, localPhotoInfo, "", 0);
return false;
}
struct in_addr addr;
addr.s_addr = localPhotoInfo.ip;
strcpy(netPhotoInfo.ip, inet_ntoa(addr));
strcpy(netPhotoInfo.outputPath, path.c_str());
if (!localPhotoInfo.userName.empty())
{
size_t len = std::min<size_t>(sizeof(netPhotoInfo.userName) - 1, localPhotoInfo.userName.size());
strncpy(netPhotoInfo.userName, localPhotoInfo.userName.c_str(), len);
}
if (!localPhotoInfo.password.empty())
{
size_t len = std::min<size_t>(sizeof(netPhotoInfo.password) - 1, localPhotoInfo.password.size());
strncpy(netPhotoInfo.password, localPhotoInfo.password.c_str(), len);
}
// strcpy(netPhotoInfo.interface, "eth0");
localPhotoInfo.photoTime = time(NULL);
std::string tmpFile = m_appPath + (APP_PATH_TMP DIR_SEP_STR) + std::to_string(localPhotoInfo.photoId) + ".mp4";
// RTSPToMP4 dumper(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000);
// dumper.start();
dumpRtspToMp4(netPhotoInfo.url, tmpFile.c_str(), localPhotoInfo.duration * 1000);
ethernetPowerCtrl.reset();
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Power OFF");
std::string fullPath = endsWith(mPath, ".mp4") ? mPath : (mPath + CTerminal::BuildPhotoFileName(mPhotoInfo));
if (existsFile(tmpFile))
{
std::rename(tmpFile.c_str(), fullPath.c_str());
TakePhotoCb(3, localPhotoInfo, "", localPhotoInfo.photoTime);
}
else
{
TakePhotoCb(0, localPhotoInfo, "", 0);
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url);
}
// Notify to take next photo
// TakePhotoCb(1, localPhotoInfo, "", takingTime);
// XYLOG(XYLOG_SEVERITY_ERROR, "Failed to TP on NET Camera CH=%u PR=%X PHOTOID=%u URL=http://%s%s", (uint32_t)localPhotoInfo.channel, (uint32_t)localPhotoInfo.preset,
// localPhotoInfo.photoId, netPhotoInfo.ip, netPhotoInfo.url);
// TakePhotoCb(0, localPhotoInfo, "", 0);
return true;
}
bool CPhoneDevice::StartPushStreaming(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr)
{
return true;
}
bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path)
{
if (photoInfo.width == 0 || photoInfo.height == 0)
@ -1973,6 +2124,89 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
t.detach();
}
else if ((mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM || mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF) && (mPhotoInfo.cameraType == CAM_TYPE_NET || mPhotoInfo.cameraType == CAM_TYPE_PLZ))
{
XYLOG(XYLOG_SEVERITY_INFO, "Start TP(Streaming) CH=%u PR=%X PHOTOID=%u", (uint32_t)mPhotoInfo.channel, (uint32_t)mPhotoInfo.preset, mPhotoInfo.photoId);
// Start Thread
CPhoneDevice* pThis = this;
vector<IDevice::OSD_INFO> osds;
osds.swap(mOsds);
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
std::thread t([localPhotoInfo, path, pThis, osds, powerCtrlPtr]() mutable
{
pThis->StartPushStreaming(localPhotoInfo, path, osds, powerCtrlPtr);
});
t.detach();
}
else if (mPhotoInfo.mediaType == 1 && (mPhotoInfo.cameraType == CAM_TYPE_PLZ))
{
uint64_t wid_serial = RequestWakelock(0);
CPhoneDevice* pThis = this;
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
IDevice::SerialsPhotoParam param = { "", 0, 0 };
GetPhotoSerialsParamCb(param);
vector<IDevice::OSD_INFO> osds;
osds.swap(mOsds);
std::thread t([localPhotoInfo, param, pThis, path, osds, wid_serial, powerCtrlPtr]() mutable
{
uint32_t waitTime = localPhotoInfo.selfTestingTime;
if(!GpioControl::GetSelftestStatus(waitTime))
{
pThis->m_isSelfTesting.store(true);
time_t remaintime = GpioControl::GetSelfTestRemain(waitTime);
XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime);
remaintime = (remaintime != 0) ? (remaintime * 1024) : 10240;
std::this_thread::sleep_for(std::chrono::milliseconds(remaintime));
pThis->m_isSelfTesting.store(false);
XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over");
}
if (localPhotoInfo.preset != 0 && localPhotoInfo.preset != 0xFF)
{
XYLOG(XYLOG_SEVERITY_INFO,"Recv CameraCtrl Command, action= MOVE_PRESETNO, preset = %u", localPhotoInfo.preset);
CameraPhotoCmd(time(NULL), localPhotoInfo.channel, MOVE_PRESETNO, 0, localPhotoInfo.preset, param.serfile, param.baud, param.addr);
std::this_thread::sleep_for(std::chrono::seconds(10));
}
pThis->TakeVideoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr);
pThis->ReleaseWakelock(wid_serial);
});
t.detach();
}
else if (mPhotoInfo.mediaType == 1 && (mPhotoInfo.cameraType == CAM_TYPE_NET))
{
uint64_t wid_serial = RequestWakelock(0);
CPhoneDevice* pThis = this;
IDevice::PHOTO_INFO localPhotoInfo = mPhotoInfo;
vector<IDevice::OSD_INFO> osds;
osds.swap(mOsds);
std::thread t([localPhotoInfo, pThis, path, osds, wid_serial, powerCtrlPtr]() mutable
{
uint32_t waitTime = localPhotoInfo.selfTestingTime;
if(!GpioControl::GetSelftestStatus(waitTime))
{
pThis->m_isSelfTesting.store(true);
time_t remaintime = GpioControl::GetSelfTestRemain(waitTime);
XYLOG(XYLOG_SEVERITY_INFO, "Camera is SeltTesting,remaining selfTestingtime=%u", remaintime);
remaintime = (remaintime != 0) ? (remaintime * 1024) : 10240;
std::this_thread::sleep_for(std::chrono::milliseconds(remaintime));
pThis->m_isSelfTesting.store(false);
XYLOG(XYLOG_SEVERITY_INFO, "Camera SeltTesting is over");
}
pThis->TakeVideoWithNetCamera(localPhotoInfo, path, osds, powerCtrlPtr);
pThis->ReleaseWakelock(wid_serial);
});
t.detach();
}
else if (mPhotoInfo.usingSysCamera == 1)
{
JNIEnv* env = NULL;
@ -3360,8 +3594,17 @@ bool CPhoneDevice::OnImageReady(cv::Mat mat)
#endif
#endif // OUTPUT_DBG_INFO
bool imgExisted = std::filesystem::exists(std::filesystem::path(fullPath));
if (imgExisted)
{
size_t imgFileSize = getFileSize(fullPath);
if (imgFileSize == 0 || imgFileSize == (size_t)-1)
{
imgExisted = false;
}
}
if (!std::filesystem::exists(std::filesystem::path(fullPath)) || getFileSize(fullPath) == 0)
if (!imgExisted)
{
bool res = cv::imwrite(fullPath.c_str(), mat, params);
if (!res)
@ -3620,7 +3863,16 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<ID
bool res = false;
std::string fullPath = endsWith(path, ".jpg") ? path : (path + CTerminal::BuildPhotoFileName(photoInfo));
if (!std::filesystem::exists(std::filesystem::path(fullPath)) || getFileSize(fullPath) == 0)
bool imgExisted = std::filesystem::exists(std::filesystem::path(fullPath));
if (imgExisted)
{
size_t imgFileSize = getFileSize(fullPath);
if (imgFileSize == 0 || imgFileSize == (size_t)-1)
{
imgExisted = false;
}
}
if (!imgExisted)
{
#ifdef _DEBUG
char log[256] = { 0 };
@ -3643,7 +3895,7 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<ID
if (existsFile(tmpPath))
{
imgFileSize = getFileSize(tmpPath);
if (imgFileSize == 0)
if (imgFileSize == 0 || imgFileSize == -1)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Empty File Written: %s errno=%d", tmpPath.c_str() + m_appPath.size(), errcode);
remove(tmpPath.c_str());
@ -3671,8 +3923,8 @@ bool CPhoneDevice::PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<ID
res = (rename(tmpPath.c_str(), fullPath.c_str()) == 0);
if (res)
{
imgFileSize = getFileSize(tmpPath);
if (imgFileSize == 0)
imgFileSize = getFileSize(fullPath);
if (imgFileSize == 0 || imgFileSize == -1)
{
XYLOG(XYLOG_SEVERITY_ERROR, "Empty File after rename %s", fullPath.c_str() + m_appPath.size());
res = false;
@ -3887,6 +4139,7 @@ net_handle_t CPhoneDevice::GetEthnetHandle() const
void CPhoneDevice::SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway)
{
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool didAttachThread = false;
@ -3964,6 +4217,7 @@ int CPhoneDevice::GetSerialPhoto(int devno, D_IMAGE_DEF *photo)
return GetImage(devno, (IMAGE_DEF*)photo);
}
void CPhoneDevice::InitSerialComm(D_SENSOR_PARAM *sensorParam, char *filedir,const char *logpath)
{
Gm_InitSerialComm((SENSOR_PARAM *)sensorParam, filedir, logpath);
@ -4517,6 +4771,7 @@ bool CPhoneDevice::OpenSensors(int sensortype)
#ifndef USING_N938
#ifndef USING_PLZ
#else
GpioControl::TurnOn(CMD_SET_5V_PWR_ENABLE);
GpioControl::TurnOn(CMD_SET_PTZ_PWR_ENABLE);
#endif
#else
@ -4589,6 +4844,7 @@ bool CPhoneDevice::CloseSensors(int sensortype, uint32_t delayedCloseTime)
// GpioControl::TurnOff(CMD_SET_3V3_PWR_ENABLE);
#ifndef USING_PLZ
#else
GpioControl::TurnOffImmediately(CMD_SET_5V_PWR_ENABLE);
GpioControl::TurnOffImmediately(CMD_SET_PTZ_PWR_ENABLE);
#endif
#endif

@ -273,6 +273,8 @@ protected:
// bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
bool TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool TakeVideoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool StartPushStreaming(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat);
inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const
{

@ -117,111 +117,12 @@ void Gm_CloseSensorsPower()
/* 关闭电源*/
//switch(port)
/* 根据硬件具体布置最后调整,目前是微拍板子的来控制*/
/* set12VEnable(false);
setCam3V3Enable(false);
setRS485Enable(false);
#if 0
setInt(CMD_SET_WTH_POWER, 0);
setInt(CMD_SET_PULL_POWER, 0);
setInt(CMD_SET_ANGLE_POWER, 0);
setInt(CMD_SET_OTHER_POWER, 0);
setInt(CMD_SET_PIC1_POWER, 0);
sleep(3);
igpio = getInt(CMD_SET_WTH_POWER);
igpio = getInt(CMD_SET_PULL_POWER);
igpio = getInt(CMD_SET_ANGLE_POWER);
igpio = getInt(CMD_SET_OTHER_POWER);
igpio = getInt(CMD_SET_PIC1_POWER);
#endif
#if 1
setInt(CMD_SET_SPI_POWER, 1);
setInt(CMD_SET_485_EN0, 1);
setInt(CMD_SET_485_EN1, 1);
setInt(CMD_SET_485_EN2, 1);
setInt(CMD_SET_485_EN3, 1);
setInt(CMD_SET_485_EN4, 1);
#else
setInt(CMD_SET_SPI_POWER, 0);
setInt(CMD_SET_485_EN0, 0);
setInt(CMD_SET_485_EN1, 0);
setInt(CMD_SET_485_EN2, 0);
setInt(CMD_SET_485_EN3, 0);
setInt(CMD_SET_485_EN4, 0);
sleep(3);
igpio = getInt(CMD_SET_SPI_POWER);
igpio = getInt(CMD_SET_485_EN0);
igpio = getInt(CMD_SET_485_EN1);
igpio = getInt(CMD_SET_485_EN2);
igpio = getInt(CMD_SET_485_EN3);
igpio = getInt(CMD_SET_485_EN4);
#endif
*/
}
// 打开传感器电源
void Gm_OpenSensorsPower()
{
//char iIoNo;
/* int igpio;
char szbuf[128];
//if(0 == port)
// return;
//sprintf(szbuf, "Open Sensors port %d Power!", port);
//set12VEnable(true);
setCam3V3Enable(true);
setRS485Enable(true);
#if 0
setInt(CMD_SET_WTH_POWER, 0);
setInt(CMD_SET_PULL_POWER, 0);
setInt(CMD_SET_ANGLE_POWER, 0);
setInt(CMD_SET_OTHER_POWER, 0);
setInt(CMD_SET_PIC1_POWER, 0);
#else
setInt(CMD_SET_WTH_POWER, 1);
setInt(CMD_SET_PULL_POWER, 1);
setInt(CMD_SET_ANGLE_POWER, 1);
setInt(CMD_SET_OTHER_POWER, 1);
setInt(CMD_SET_PIC1_POWER, 1);
//sleep(3);
igpio = getInt(CMD_SET_WTH_POWER);
igpio = getInt(CMD_SET_PULL_POWER);
igpio = getInt(CMD_SET_ANGLE_POWER);
igpio = getInt(CMD_SET_OTHER_POWER);
igpio = getInt(CMD_SET_PIC1_POWER);
#endif
#if 1
setInt(CMD_SET_SPI_POWER, 1);
setInt(CMD_SET_485_EN0, 1);
setInt(CMD_SET_485_EN1, 1);
setInt(CMD_SET_485_EN2, 1);
setInt(CMD_SET_485_EN3, 1);
setInt(CMD_SET_485_EN4, 1);
//sleep(3);
igpio = getInt(CMD_SET_SPI_POWER);
igpio = getInt(CMD_SET_485_EN0);
igpio = getInt(CMD_SET_485_EN1);
igpio = getInt(CMD_SET_485_EN2);
igpio = getInt(CMD_SET_485_EN3);
igpio = getInt(CMD_SET_485_EN4);
#else
setInt(CMD_SET_485_EN0, 0);
setInt(CMD_SET_485_EN1, 0);
setInt(CMD_SET_485_EN2, 0);
setInt(CMD_SET_485_EN3, 0);
setInt(CMD_SET_485_EN4, 0);
#endif
// 打开电源
//switch(port)
*/
}
// 查询传感器电源状态

@ -166,6 +166,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
mCaptureTriggered = false;
mFocusTriggered = false;
mCaptureDispatched = false;
maxFrameDuration = 0;
afSupported = false;
@ -225,6 +226,7 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
NdkCamera::~NdkCamera()
{
XYLOG(XYLOG_SEVERITY_DEBUG, "NdkCamera::~NdkCamera %s", mCameraId.c_str());
close();
}
@ -999,6 +1001,8 @@ void NdkCamera::close()
}
*/
mPreviewResults.reset();
mCaptureResults.clear();
mCaptureFrames.clear();
if ((ACameraManager *)camera_manager != NULL)
@ -1008,7 +1012,8 @@ void NdkCamera::close()
if (capture_session)
{
// res = ACameraCaptureSession_stopRepeating(capture_session);
res = ACameraCaptureSession_stopRepeating(capture_session);
std::this_thread::sleep_for(std::chrono::milliseconds(512));
ACameraCaptureSession_close(capture_session);
capture_session = 0;
}
@ -1050,11 +1055,24 @@ void NdkCamera::close()
if (mPreviewImageReader != NULL)
{
#ifdef _DEBUG
ALOGD("Will Free mPreviewImageReader");
#endif
AImage* image = NULL;
media_status_t mstatus;
while ((mstatus = AImageReader_acquireNextImage(mPreviewImageReader, &image)) == AMEDIA_OK)
{
AImage_delete(image);
image = NULL;
}
AImageReader_setImageListener(mPreviewImageReader, NULL);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::AImageReader_delete %s", mCameraId.c_str());
AImageReader_delete(mPreviewImageReader);
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mPreviewImageReader = 0;
#ifdef _DEBUG
ALOGD("After Free mPreviewImageReader");
#endif
}
if (mOutputTarget != NULL)
@ -1071,9 +1089,13 @@ void NdkCamera::close()
if (mImageReader != NULL)
{
#ifdef _DEBUG
ALOGD("Will Free mImageReader");
#endif
AImage* image = NULL;
int32_t status;
while ((status = AImageReader_acquireNextImage(mImageReader, &image)) == AMEDIA_OK) {
media_status_t mstatus;
while ((mstatus = AImageReader_acquireNextImage(mImageReader, &image)) == AMEDIA_OK)
{
AImage_delete(image);
image = NULL;
}
@ -1084,6 +1106,9 @@ void NdkCamera::close()
//XYLOG(XYLOG_SEVERITY_DEBUG, "CameraStatus::End AImageReader_delete %s", mCameraId.c_str());
mImageReader = 0;
#ifdef _DEBUG
ALOGD("After Free mImageReader");
#endif
}
if (mOutputTarget2 != NULL)
{
@ -1300,16 +1325,24 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
std::shared_ptr<ACameraMetadata> result;
bool captureCompleted = false;
bool captureDispatchable = false;
m_locker.lock();
if (!mCaptureResults.empty())
{
captureCompleted = true;
result = mCaptureResults[0];
}
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted)
if (captureCompleted && captureDispatchable)
{
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onImageAvailable");
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
onOneCapture(mCharacteristics, result, mFinalLdr, ts - m_startTime, mOneFrame);
}
}
@ -1340,17 +1373,23 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
}
bool captureCompleted = false;
bool captureDispatchable = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
m_locker.lock();
captureCompleted = mCaptureResults.size() >= expectedTimes && mCaptureFrames.size() >= expectedTimes;
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted)
if (captureCompleted && captureDispatchable)
{
FireBurstCapture();
}
}
}
}
@ -1765,6 +1804,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
ACameraMetadata* pCopy = ACameraMetadata_copy(result);
bool captureCompleted = false;
bool captureDispatchable = false;
size_t expectedTimes = mCaptureRequests.size() - 1;
std::shared_ptr<ACameraMetadata> captureResult(pCopy, ACameraMetadata_free);
@ -1773,10 +1813,17 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = !mOneFrame.empty();
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted)
if (captureCompleted && captureDispatchable)
{
XYLOG(XYLOG_SEVERITY_INFO,"onOneCapture from onCaptureCompleted");
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
onOneCapture(mCharacteristics, captureResult, mFinalLdr, ts - m_startTime, mOneFrame);
}
}
@ -1785,9 +1832,14 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
m_locker.lock();
mCaptureResults.push_back(captureResult);
captureCompleted = mCaptureFrames.size() >= expectedTimes && mCaptureResults.size() >= expectedTimes;
if (captureCompleted && !mCaptureDispatched)
{
mCaptureDispatched = true;
captureDispatchable = true;
}
m_locker.unlock();
if (captureCompleted)
if (captureCompleted && captureDispatchable)
{
FireBurstCapture();
}
@ -1798,6 +1850,8 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
void NdkCamera::FireBurstCapture()
{
camera_status_t status = ACameraCaptureSession_stopRepeating(capture_session);
unsigned long long ts = GetMicroTimeStamp();
size_t expectedTimes = mCaptureRequests.size() - 1;
@ -1885,7 +1939,7 @@ void NdkCamera::onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest*
{
bool isPreview = (request == mCaptureRequests[PREVIEW_REQUEST_IDX]->request);
XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d PhotoTaken=%d Preview=%d", session, request, failure->reason, m_photoTaken ? 1 : 0, isPreview ? 1 : 0);
XYLOG(XYLOG_SEVERITY_WARNING, "onCaptureFailed session=%p request=%p reason=%d CameraId=%s PhotoTaken=%d Preview=%d", session, request, failure->reason, mCameraId.c_str(), m_photoTaken ? 1 : 0, isPreview ? 1 : 0);
if (isPreview)
{

@ -236,6 +236,7 @@ protected:
bool mCaptureTriggered;
bool mFocusTriggered;
bool mCaptureDispatched;
CAPTURE_RESULT mResult;
unsigned long long m_startTime;
@ -267,6 +268,8 @@ protected:
std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests;
ACameraCaptureSession* capture_session;
std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
uint32_t mLdr;
@ -278,8 +281,6 @@ protected:
cv::Mat mOneFrame;
std::vector<std::vector<uint8_t> > mRawFrames;
ACameraCaptureSession* capture_session;
// AImageReader* image_reader;
// ANativeWindow* image_reader_surface;
// ACameraOutputTarget* image_reader_target;

@ -0,0 +1,231 @@
//
// Created by Matthew on 2025/3/1.
//
#include "RTSPRecorder.h"
#include <chrono>
#include <thread>
#include <android/log.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
}
#define LOG_TAG "libcurl"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
av_register_all();
avformat_network_init();
// Open input RTMP stream
if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) {
fprintf(stderr, "Could not open input file '%s'\n", rtmpUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
fprintf(stderr, "Could not find stream information\n");
avformat_close_input(&inputFormatContext);
return;
}
// Open output MP4 file
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
fprintf(stderr, "Could not create output context\n");
avformat_close_input(&inputFormatContext);
return;
}
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
fprintf(stderr, "Failed to allocate output stream\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
fprintf(stderr, "Failed to copy codec parameters\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
outStream->codecpar->codec_tag = 0;
}
// Open output file
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) {
fprintf(stderr, "Could not open output file '%s'\n", outputPath);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
}
// Write output file header
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error occurred when writing header to output file\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
// Read packets from input and write them to output
while (av_read_frame(inputFormatContext, &packet) >= 0) {
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base);
packet.pos = -1;
if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
fprintf(stderr, "Error muxing packet\n");
break;
}
av_packet_unref(&packet);
}
stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);
// Clean up
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&outputFormatContext->pb);
}
avformat_free_context(outputFormatContext);
}
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
av_register_all();
avformat_network_init();
// Open input RTSP stream
if (avformat_open_input(&inputFormatContext, rtspUrl, nullptr, nullptr) != 0) {
fprintf(stderr, "Could not open input file '%s'\n", rtspUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
fprintf(stderr, "Could not find stream information\n");
avformat_close_input(&inputFormatContext);
return;
}
// Open output MP4 file
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
fprintf(stderr, "Could not create output context\n");
avformat_close_input(&inputFormatContext);
return;
}
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
fprintf(stderr, "Failed to allocate output stream\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
fprintf(stderr, "Failed to copy codec parameters\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
outStream->codecpar->codec_tag = 0;
}
// Open output file
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) {
fprintf(stderr, "Could not open output file '%s'\n", outputPath);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
}
// Write output file header
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error occurred when writing header to output file\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
// Read packets from input and write them to output
while (av_read_frame(inputFormatContext, &packet) >= 0) {
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base);
packet.pos = -1;
if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
fprintf(stderr, "Error muxing packet\n");
break;
}
av_packet_unref(&packet);
}
stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);
// Clean up
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&outputFormatContext->pb);
}
avformat_free_context(outputFormatContext);
}

@ -0,0 +1,19 @@
//
// Created by Matthew on 2025/3/1.
//
#ifndef MICROPHOTO_RTSPRECORDER_H
#define MICROPHOTO_RTSPRECORDER_H
#include <string>
// void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs);
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration);
class RTSPRecorder {
};
#endif //MICROPHOTO_RTSPRECORDER_H

@ -0,0 +1,186 @@
//
// Created by Matthew on 2025/2/28.
//
#include "RTSPToMP4.h"
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <cstring>
#include <limits>
int32_t getMaxInputSize(AMediaExtractor* extractor, size_t trackIndex)
{
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, trackIndex);
int32_t maxInputSize = 0;
if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, &maxInputSize)) {
// LOGI("Max input size for track %zu: %d", trackIndex, maxInputSize);
} else {
// LOGE("Failed to get max input size for track %zu", trackIndex);
}
AMediaFormat_delete(format);
return maxInputSize;
}
RTSPToMP4::RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs/* = 0*/)
: fd(-1), codec(nullptr), extractor(nullptr), muxer(nullptr), videoTrackIndex(-1), durationInMs(durationInMs), running(false) {
initExtractor(rtspUrl);
initCodec("video/avc");
initMuxer(outputPath);
}
RTSPToMP4::~RTSPToMP4() {
if (codec) AMediaCodec_delete(codec);
if (extractor) AMediaExtractor_delete(extractor);
if (muxer) AMediaMuxer_delete(muxer);
if (fd != -1)
{
fdatasync(fd);
close(fd);
fd = -1;
}
}
void RTSPToMP4::initCodec(const char* mime) {
codec = AMediaCodec_createDecoderByType(mime);
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mime);
// Set other format parameters as needed
// ...
AMediaCodec_configure(codec, format, nullptr, nullptr, 0);
AMediaFormat_delete(format);
}
void RTSPToMP4::initExtractor(const char* rtspUrl) {
extractor = AMediaExtractor_new();
media_status_t status = AMediaExtractor_setDataSource(extractor, rtspUrl);
if (status != AMEDIA_OK) {
// Handle error
// ...
}
}
void RTSPToMP4::initMuxer(const char* outputPath) {
fd = open(outputPath, O_CREAT | O_WRONLY, 0644);
muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
int numTracks = AMediaExtractor_getTrackCount(extractor);
if (numTracks <= 0) {
// LOGE("No tracks found in RTSP stream");
AMediaExtractor_delete(extractor);
return;
}
for (int i = 0; i < numTracks; ++i) {
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, i);
const char* mime;
if (AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime) && strncmp(mime, "video/", 6) == 0) {
videoTrackIndex = AMediaMuxer_addTrack(muxer, format);
AMediaExtractor_selectTrack(extractor, i);
}
AMediaFormat_delete(format);
}
if (videoTrackIndex == -1) {
// LOGE("No video track found in RTSP stream");
AMediaExtractor_delete(extractor);
AMediaMuxer_delete(muxer);
return;
}
int32_t maxInputSize = getMaxInputSize(extractor, videoTrackIndex);
if (maxInputSize <= 0) {
// LOGE("Invalid max input size");
// releaseMediaExtractor(extractor);
sampleData.resize(1920 * 1080 * 4, 0);
return;
}
sampleData.resize(maxInputSize, 0);
}
void RTSPToMP4::startDecodingAndMuxing() {
AMediaCodec_start(codec);
size_t bufferSize = sampleData.size();
uint8_t* buffer = &sampleData[0];
int64_t sampleTime = 0;
int64_t startTime = 0;
bool firstSampleData = true;
int64_t durationTime = (durationInMs == 0) ? std::numeric_limits<int64_t>::max() : (int64_t)durationInMs * 1000;
while (running) {
// Extract data from RTSP stream
ssize_t sampleSize = AMediaExtractor_readSampleData(extractor, buffer, bufferSize);
if (sampleSize < 0) {
break; // End of stream
}
sampleTime = AMediaExtractor_getSampleTime(extractor);
if (firstSampleData)
{
startTime = sampleTime;
firstSampleData = false;
}
sampleTime -= startTime;
// Feed data to codec
size_t inputBufferIndex;
uint8_t* inputBuffer = AMediaCodec_getInputBuffer(codec, inputBufferIndex, &bufferSize);
memcpy(inputBuffer, buffer, sampleSize);
AMediaCodec_queueInputBuffer(codec, inputBufferIndex, 0, sampleSize, sampleTime, 0);
// Retrieve decoded frames and write to muxer
AMediaCodecBufferInfo bufferInfo;
ssize_t outputBufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &bufferInfo, 0);
if (outputBufferIndex >= 0) {
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.presentationTimeUs = sampleTime;
bufferInfo.flags = AMediaExtractor_getSampleFlags(extractor);
uint8_t* outputBuffer = AMediaCodec_getOutputBuffer(codec, outputBufferIndex, &bufferSize);
AMediaMuxer_writeSampleData(muxer, videoTrackIndex, outputBuffer, &bufferInfo);
AMediaCodec_releaseOutputBuffer(codec, outputBufferIndex, false);
}
AMediaExtractor_advance(extractor);
if (sampleTime > durationTime)
{
break;
}
}
AMediaCodec_stop(codec);
AMediaMuxer_stop(muxer);
if (fd != -1)
{
fdatasync(fd);
close(fd);
fd = -1;
}
}
void RTSPToMP4::start() {
// Add video track to muxer
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, 0);
videoTrackIndex = AMediaMuxer_addTrack(muxer, format);
running = true;
AMediaMuxer_start(muxer);
startDecodingAndMuxing();
}
void RTSPToMP4::stop() {
running = false;
}

@ -0,0 +1,38 @@
//
// Created by Matthew on 2025/2/28.
//
#ifndef MICROPHOTO_RTSPTOMP4_H
#define MICROPHOTO_RTSPTOMP4_H
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaExtractor.h>
#include <media/NdkMediaMuxer.h>
#include <vector>
class RTSPToMP4 {
public:
RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs = 0);
~RTSPToMP4();
void start();
void stop();
private:
void initCodec(const char* mime);
void initExtractor(const char* rtspUrl);
void initMuxer(const char* outputPath);
void startDecodingAndMuxing();
int fd;
AMediaCodec* codec;
AMediaExtractor* extractor;
AMediaMuxer* muxer;
int videoTrackIndex;
uint64_t durationInMs;
bool running;
std::vector<uint8_t> sampleData;
};
#endif //MICROPHOTO_RTSPTOMP4_H

@ -0,0 +1,20 @@
//
// Created by Matthew on 2025/3/4.
//
#include "VendorCtrl.h"
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password) :
m_ip(ip), m_userName(userName), m_password(password), m_channel(channel)
{
}
std::string VendorCtrl::CvtJSONToString(const Json::Value& data)
{
Json::StreamWriterBuilder builder;
#ifndef NDEBUG
builder["indentation"] = "\t"; // assume default for comments is None
builder["emitUTF8"] = true;
#else
builder["indentation"] = "";
#endif
return Json::writeString(builder, data);
}

@ -0,0 +1,34 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef MICROPHOTO_VENDORCTRL_H
#define MICROPHOTO_VENDORCTRL_H
#include <string>
#include <json/json.h>
class VendorCtrl {
public:
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel);
virtual ~VendorCtrl() = 0;
virtual bool SetOsd() = 0;
virtual void EnableOsd(bool enable) = 0;
virtual std::string GetStreamingUrl(uint8_t channel) = 0;
virtual bool UpdateTime(time_t ts) = 0;
virtual bool TakePhoto(std::vector<uint8_t>& img) = 0;
protected:
std::string CvtJSONToString(const Json::Value& data);
protected:
std::string m_ip;
std::string m_userName;
std::string m_password;
uint8_t m_channel;
};
#endif //MICROPHOTO_VENDORCTRL_H

@ -0,0 +1,47 @@
//
// Created by Matthew on 2025/3/4.
//
#include "YuShiCtrl.h"
#include "httpclient.h"
YuShiCtrl::~YuShiCtrl()
{
}
bool YuShiCtrl::SetOsd()
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
}
void YuShiCtrl::EnableOsd(bool enable)
{
return false;
}
std::string GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
return "";
}
bool YuShiCtrl::UpdateTime(time_t ts)
{
/LAPI/V1.0/System/Time
Json::Value jsonData(Json::objectValue);
jsonData["TimeZone"] = "GMT+08:00";
jsonData["DeviceTime"] = ts;
jsonData["DateFormat"] = 0; // YYYY-MM-DD
jsonData["HourFormat"] = 1; // 24H
return false;
}
bool YuShiCtrl::TakePhoto(std::vector<uint8_t>& img)
{
return false;
}

@ -0,0 +1,27 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef MICROPHOTO_YUSHICTRL_H
#define MICROPHOTO_YUSHICTRL_H
#include "VendorCtrl.h"
class YuShiCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~YuShiCtrl();
virtual bool SetOsd();
virtual void EnableOsd(bool enable);
virtual std::string GetStreamingUrl(uint8_t channel);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(std::vector<uint8_t>& img);
private:
};
#endif //MICROPHOTO_YUSHICTRL_H

@ -10,6 +10,7 @@ static size_t OnWriteData(void* buffer, size_t size, size_t nmemb, void* lpVoid)
std::vector<uint8_t>* data = (std::vector<uint8_t>*)lpVoid;
if( NULL == data || NULL == buffer )
{
XYLOG(XYLOG_SEVERITY_ERROR,"OnWriteData callback -1");
return -1;
}
uint8_t* begin = (uint8_t *)buffer;
@ -27,6 +28,7 @@ static int SockOptCallback(void *clientp, curl_socket_t curlfd, curlsocktype pur
{
int errcode = errno;
printf("android_setsocknetwork errno=%d", errcode);
XYLOG(XYLOG_SEVERITY_ERROR,"setsocknetwork -1, errcode=%d",errcode);
}
return res == 0 ? CURL_SOCKOPT_OK : CURL_SOCKOPT_ERROR;
}

@ -112,37 +112,6 @@ int set_port_attr (int fd, int baudrate, int databit, const char *stopbit, char
return (tcsetattr (fd, TCSANOW, &opt));
}
static void setInt(int cmd, int value)
{
int fd = open("/dev/mtkgpioctrl", O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value = value;
// LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value);
if( fd > 0 )
{
int res = ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
close(fd);
}
return;
}
static void setRS485Enable(bool z) {
setInt(CMD_SET_485_EN_STATE, z ? 1 : 0);
}
static void set485WriteMode() {
setInt(CMD_SET_485_STATE, 1);
}
static void set485ReadMode() {
setInt(CMD_SET_485_STATE, 0);
}
static void set12VEnable(bool z) {
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
}
/*********************************************************************************
* *
**********************************************************************************/

@ -8,10 +8,6 @@
#include <string>
#include "GPIOControl.h"
#define MAX_STRING_LEN 32
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, "serial_port_comm", fmt, ##args)
// 串口参数
@ -34,14 +30,6 @@ typedef struct
unsigned char m_au8RecvBuf[128];/* */
} SIO_PARAM_SERIAL_DEF;
typedef struct
{
int cmd;
int value;
int result;
long value2;
char str[MAX_STRING_LEN];
}IOT_PARAM;
void PortDataProcess( void );
int serial_port_comm();

@ -46,6 +46,9 @@ public class BridgeProvider extends ContentProvider {
private final static String PATH_RECOG_PIC = "/recogPic";
private final static String PATH_REQUEST_PWR_CTRL = "/requestPwrCtrl";
private final static String PATH_RELEASE_PWR_CTRL = "/releasePwrCtrl";
public BridgeProvider() {
Log.i(TAG, "BridgeProvider");
}
@ -85,6 +88,9 @@ public class BridgeProvider extends ContentProvider {
matcher.addURI(AUTHORITY, PATH_QUERY_SEC_VERSION, 1);
matcher.addURI(AUTHORITY, PATH_QUERY_BATTERY_VOLTAGE, 2);
matcher.addURI(AUTHORITY, PATH_RECOG_PIC, 3);
matcher.addURI(AUTHORITY, PATH_REQUEST_PWR_CTRL, 4);
matcher.addURI(AUTHORITY, PATH_RELEASE_PWR_CTRL, 5);
Cursor cursor = null;
int matched = matcher.match(uri);
@ -98,6 +104,12 @@ public class BridgeProvider extends ContentProvider {
case 3:
cursor = recoganizePicture(uri, selection, selectionArgs);
break;
case 4:
cursor = requestPowerControl(uri, selection, selectionArgs);
break;
case 5:
cursor = recoganizePicture(uri, selection, selectionArgs);
break;
default:
break;
}
@ -169,6 +181,48 @@ public class BridgeProvider extends ContentProvider {
return matrixCursor;
}
private Cursor requestPowerControl(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);
int type = 0;
if (!TextUtils.isEmpty(decodedSelection)) {
Uri u = Uri.parse("http://a.com/?" + decodedSelection);
String val = u.getQueryParameter("type");
try {
type = Integer.parseInt(val);
} catch (Exception ex) {
ex.printStackTrace();
}
}
long nativeHandle = MicroPhotoService.requestPowerControl(type);
String[] columns = { "pwrCtrl" };
MatrixCursor matrixCursor = new MatrixCursor(columns, 1);
matrixCursor.addRow(new Object[] { Long.valueOf(nativeHandle) });
return matrixCursor;
}
private Cursor releasePowerControl(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);
long nativeHandle = 0;
if (!TextUtils.isEmpty(decodedSelection)) {
Uri u = Uri.parse("http://a.com/?" + decodedSelection);
String val = u.getQueryParameter("handle");
try {
nativeHandle = Long.parseLong(val);
} catch (Exception ex) {
ex.printStackTrace();
}
}
boolean res = MicroPhotoService.releasePowerControl(nativeHandle);
String[] columns = { "result" };
MatrixCursor matrixCursor = new MatrixCursor(columns, 1);
matrixCursor.addRow(new Object[] { Integer.valueOf(res ? 1 : 0) });
return matrixCursor;
}
private Cursor recoganizePicture(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);

@ -174,12 +174,13 @@ public class MainActivity extends AppCompatActivity {
}
if (MicroPhotoContext.hasMpAppConfig(appContext)) {
Runnable runnable = new Runnable() {
final Runnable runnable = new Runnable() {
@Override
public void run() {
if (!MicroPhotoService.isRunning && !TextUtils.isEmpty(appConfig.cmdid) && !TextUtils.isEmpty(appConfig.server) && appConfig.port != 0) {
if (binding.btnStartServ.isEnabled()) {
Log.i(TAG, "Perform AutoStart");
binding.btnStartServ.performClick();
}
}
@ -189,10 +190,11 @@ public class MainActivity extends AppCompatActivity {
long timeout = 500;
if (SystemClock.elapsedRealtime() < 180000) {
// In 3 minutes
timeout = 30000; // in 30 seconds
timeout = 10000; // in 10 seconds
}
Handler handler = new Handler();
handler.postDelayed(runnable, timeout);
Log.i(TAG, "Set AutoStart after " + Long.toString(timeout) + "ms");
}
}
@ -232,6 +234,7 @@ public class MainActivity extends AppCompatActivity {
startMicroPhotoService(appContext, curAppConfig, mMessenger);
Log.i(TAG, "Service auto-started");
binding.btnStartServ.setEnabled(false);
binding.btnStopServ.setEnabled(true);
}

@ -91,6 +91,7 @@ public class MicroPhotoService extends Service {
// Used to load the 'microphoto' library on application startup.
static {
loadLibrary("microphoto");
}
@ -316,6 +317,7 @@ public class MicroPhotoService extends Service {
intentFilter.addAction(ACTION_UPDATE_CONFIGS);
intentFilter.addAction(ACTION_IMP_PUBKRY);
intentFilter.addAction(ACTION_TAKE_PHOTO_MANUALLY);
intentFilter.addAction(ACTION_HEARTBEAT_MANUALLY);
intentFilter.addAction(ACTION_GPS_TIMEOUT);
intentFilter.addAction(ACTION_RESTART);
getApplicationContext().registerReceiver(mAlarmReceiver, intentFilter, Context.RECEIVER_EXPORTED | Context.RECEIVER_VISIBLE_TO_INSTANT_APPS);
@ -455,8 +457,7 @@ public class MicroPhotoService extends Service {
ex.printStackTrace();
}
}
private void restartSelf(Context context, String reason) {
private void restartSelfImpl(Context context, String reason) {
Intent intent = context.getPackageManager().getLaunchIntentForPackage(context.getPackageName());
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
@ -549,7 +550,7 @@ public class MicroPhotoService extends Service {
int restart = intent.getIntExtra("restart", 0);
Log.i(TAG, "UPD CFG Fired ACTION=" + action + " restart=" + restart);
if (restart != 0) {
restartSelf(context, "Cfg Updated");
restartSelfImpl(context, "Cfg Updated");
} else if (mService.mNativeHandle != 0) {
mService.reloadConfigs(mService.mNativeHandle);
}
@ -670,7 +671,7 @@ public class MicroPhotoService extends Service {
} catch (Exception ex) {
ex.printStackTrace();
}
restartSelf(context, reason);
restartSelfImpl(context, reason);
}
}
}
@ -1679,6 +1680,9 @@ cellSignalStrengthGsm.getDbm();
public static native boolean exportPublicKeyFile(int index, String outputPath);
public static native boolean exportPrivateFile(int index, String outputPath);
public static native long requestPowerControl(int type);
public static native boolean releasePowerControl(long powerControlHandle);
////////////////////////GPS////////////////////
// private static final String GPS_LOCATION_NAME = android.location.LocationManager.GPS_PROVIDER;
private LocationManager mLocationManager;

@ -6,8 +6,8 @@
<item>65282-江苏</item>
<item>65283-湖南</item>
<item>65284-浙江</item>
<item>65285-河南</item>
<item>65286-郑州</item>
<item>65290-河南</item>
<item>65298-宁夏</item>
</string-array>
</resources>

@ -6,7 +6,6 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.text.TextUtils;
import android.util.Log;
import android.util.Size;
@ -45,6 +44,16 @@ public class CameraUtils {
Integer orientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
builder.append(orientation == null ? "" : orientation.toString());
int[] capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean hasRaw = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) {
hasRaw = true;
break;
}
}
builder.append(" raw=" + (hasRaw ? "1" : "0"));
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] sizes = map.getOutputSizes(ImageFormat.YUV_420_888);

@ -418,19 +418,6 @@ public class MicroPhotoContext {
*/
try {
if (TextUtils.equals(packageName, PACKAGE_NAME_MPAPP)) {
Intent intent = new Intent(ACTION_RESTART_MP);
intent.putExtra("noDelay", 1);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.setPackage(PACKAGE_NAME_MPAPP);
context.sendBroadcast(intent);
} else {
SysApi.forceStopApp(context, packageName);
}
Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
if (intent != null) {
intent.putExtra("noDelay", 1);

@ -4,7 +4,7 @@ plugins {
def AppMajorVersion = 1
def AppMinorVersion = 1
def AppBuildNumber = 1
def AppBuildNumber = 8
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber

@ -19,6 +19,7 @@ import android.view.Menu;
import android.view.MenuItem;
import android.widget.TextView;
import com.dev.devapi.api.SysApi;
import com.xypower.common.MicroPhotoContext;
import java.text.SimpleDateFormat;
@ -182,7 +183,17 @@ public class MainActivity extends AppCompatActivity {
Context context = getApplicationContext();
MicroPhotoContext.AppConfig appConfig = MicroPhotoContext.getMpAppConfig(context);
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("CMDID" + (TextUtils.isEmpty(appConfig.cmdid) ? "" : appConfig.cmdid));
if (TextUtils.isEmpty(appConfig.cmdid)) {
String sn = SysApi.getSerialNo(getApplicationContext());
if (TextUtils.isEmpty(sn)) {
stringBuilder.append("CMDID");
} else {
stringBuilder.append("CMDID" + sn + " (SN)");
}
} else {
stringBuilder.append("CMDID" + appConfig.cmdid);
}
MicroPhotoContext.MasterConfig masterConfig = MicroPhotoContext.getMasterConfig(context);
stringBuilder.append("\r\n");

@ -19,6 +19,7 @@ import android.os.Environment;
import android.os.Handler;
import android.os.IBinder;
import android.os.PowerManager;
import android.os.SystemClock;
import android.telephony.SubscriptionInfo;
import android.telephony.SubscriptionManager;
import android.telephony.TelephonyManager;
@ -81,6 +82,7 @@ public class MpMasterService extends Service {
private static final String ACTION_HEARTBEAT = "com.xypower.mpmaster.ACT_HB";
private static final String ACTION_TAKE_PHOTO = "com.xypower.mpapp.ACT_TP";
private static final String ACTION_MP_HEARTBEAT_MANUALLY = "com.xypower.mpapp.ACT_HB_M";
public static final String ACTION_MP_RESTART = "com.xypower.mpapp.ACT_RESTART";
public static final String ACTION_IMP_PUBKRY = "com.xypower.mpapp.ACT_IMP_PUBKEY";
@ -391,20 +393,34 @@ public class MpMasterService extends Service {
public boolean shouldSyncTime() { return mSyncTime; }
public void detectMpAppAlive() {
final MpMasterService thisObj = this;
Thread th = new Thread(new Runnable() {
@Override
public void run() {
thisObj.detectMpAppAliveImpl();
}
});
th.start();
}
private void detectMpAppAliveImpl() {
try {
final Context context = getApplicationContext();
long ts = System.currentTimeMillis();
try {
boolean isMpAppRunning = detectMpAppRunning();
if (!isMpAppRunning) {
try {
Thread.sleep(1000);
} catch (Exception ex) {
ex.printStackTrace();
int detectionCnt = 4;
if (SystemClock.elapsedRealtime() < 180000) {
// In 3 minutes after device reboot
detectionCnt = 16;
}
// Check twice
boolean isMpAppRunning = false;
for (int idx = 0; idx < detectionCnt; idx++) {
isMpAppRunning = detectMpAppRunning();
if (isMpAppRunning) {
break;
}
sleep(1000);
}
if (!isMpAppRunning) {
@ -1060,7 +1076,7 @@ public class MpMasterService extends Service {
public static String getBatteryVoltage() {
int val = 0;
for (int idx = 0; idx < 3; idx++) {
val = MpMasterService.getInt(115);
val = MpMasterService.getInt(117);
if (val > 0) {
return Integer.toString(val / 1000) + "." + Integer.toString((val % 1000) / 100);
}
@ -1192,6 +1208,20 @@ public class MpMasterService extends Service {
} else {
SysApi.selectSimCard4Data(context, num);
}
if (num == 1) {
// If it's back to card 1, let MpAPP send heartbeat manully after 10s
mHander.postDelayed(new Runnable() {
@Override
public void run() {
Intent intent = new Intent();
intent.setAction(ACTION_MP_HEARTBEAT_MANUALLY);
intent.setPackage(MicroPhotoContext.PACKAGE_NAME_MPAPP);
sendBroadcast(intent);
}
}, 10000);
}
}
private void setDefaultDataSubId(int subId) {
@ -1293,11 +1323,8 @@ public class MpMasterService extends Service {
@Override
public void run() {
try {
Thread.sleep(5000);
} catch (Exception ex) {
sleep(5000);
}
File tmpDestPath = new File(MicroPhotoContext.buildMasterAppDir(context));
tmpDestPath = new File(tmpDestPath, "mpdata");
if (tmpDestPath.exists()) {
@ -1415,6 +1442,13 @@ public class MpMasterService extends Service {
}
}
private static void sleep(long ms) {
try {
Thread.sleep(ms);
} catch (Exception ex) {
}
}
public native static int getInt(int cmd);
public native static int setInt(int cmd, int val);
public native static int[] getStats(long ts);

@ -81,7 +81,6 @@ public class SimUtil {
}
int slot = smsInfo.getSlot();//那张卡收到的短信
String sender = smsInfo.getSender();//收到的短信的手机号
String sendmessage = "ERROR";//要回复的短信
List<Integer> abslist = new ArrayList<>();//收到的短信内容拆分包装成数组
boolean ifmessageCorrect = false;//用来判断收到的短信内容是否正确
if (StringUtils.isEmpty(content)) {
@ -122,14 +121,13 @@ public class SimUtil {
String s = split1[1];
String[] split2 = StringUtils.splitString2(s);
int spilt2len = split2.length;
if (split2 != null && spilt2len > 1 && spilt2len % 2 == 1) {
if (split2 != null && spilt2len > 0) {
String num = split2[0];
Integer integer = StringUtils.convert2Int(num);
if (integer != null) {
if (integer == 0) {//删除所有运维
ifmessageCorrect = true;
} else {
if (spilt2len == integer * 2 + 1) {
int times = 0;
for (int i = 0; i < spilt2len; i++) {
if (i == 0) {
@ -137,6 +135,7 @@ public class SimUtil {
}
String ts = split2[i];
Integer time = StringUtils.convert2Int(ts);
if (time != null) {
if (i % 2 == 1) {
if (time > 23) {
ifmessageCorrect = false;
@ -181,7 +180,9 @@ public class SimUtil {
String[] split1 = StringUtils.splitString1(content);
if (split1 != null && split1.length > 1) {
String s = split1[1];
Integer integer = StringUtils.convert2Int(s);
String[] strings = StringUtils.splitString2(s);
if (strings != null && strings.length >= 1) {
Integer integer = StringUtils.convert2Int(strings[0]);
if (integer != null) {
if (integer == 0 || integer == 1) {
ifmessageCorrect = true;
@ -189,6 +190,7 @@ public class SimUtil {
}
}
}
}
restartType = 2;
sendmessage = getSendString(content, ifmessageCorrect);
} else if (content.contains(SmsTypeEnum.GET_OPERATE.value())) {
@ -230,9 +232,15 @@ public class SimUtil {
String[] split1 = StringUtils.splitString1(content);
if (split1 != null && split1.length > 1) {
ifmessageCorrect = true;
String cmdid = split1[1];
String s = split1[1];
String[] strings = StringUtils.splitString2(s);
if (strings != null && strings.length >= 1) {
String cmdid = strings[0];
if (cmdid != null) {
UpdateSysConfigUtil.setCmdid(context, cmdid);
}
}
}
sendmessage = getSendString(content, ifmessageCorrect);
} else if (content.contains(SmsTypeEnum.GET_CMDID.value())) {
String cmdid = UpdateSysConfigUtil.getCmdid(context);
@ -242,32 +250,32 @@ public class SimUtil {
restartType = 1;
String[] split1 = StringUtils.splitString1(content);
if (split1 != null && split1.length > 1) {
String server = null;
Integer port = null;
Integer utcp = null;
Integer encrypto = null;
String s = split1[1];
String[] split2 = StringUtils.splitString2(s);
if (split2 != null && (split2.length == 2 || split2.length == 4)) {
String server;
Integer integer;
server = split2[0];
String port = split2[1];
integer = StringUtils.convert2Int(port);
Integer utcp = -1;
Integer encrypto = -1;
if (integer != null) {
if (split2 != null && split2.length > 1) {
ifmessageCorrect = true;
if (split2.length == 4) {
String s1 = split2[2];
utcp = StringUtils.convert2Int(s1);
utcp = getUtcp(utcp);
String s2 = split2[3];
encrypto = StringUtils.convert2Int(s2);
encrypto = getEncrypto(encrypto);
if (utcp == -1 || encrypto == -1) {
ifmessageCorrect = false;
if (split2.length > 0) {
server = split2[0];
}
if (split2.length > 1) {
String s1 = split2[1];
port = StringUtils.convert2Int(s1);
}
if (split2.length > 2) {
String s2 = split2[2];
utcp = StringUtils.convert2Int(s2);
utcp = getUtcp(utcp);
}
if (split2.length > 3) {
String s3 = split2[3];
encrypto = StringUtils.convert2Int(s3);
}
if (ifmessageCorrect) {
UpdateSysConfigUtil.setIP(context, server, integer, utcp, encrypto);
UpdateSysConfigUtil.setIP(context, server, port, utcp, encrypto);
}
}
}
@ -279,13 +287,13 @@ public class SimUtil {
restartType = 1;
String[] split1 = StringUtils.splitString1(content);
sendmessage = getSendString(content, ifmessageCorrect);
if (split1 != null && split1.length == 2) {
if (split1 != null && split1.length >= 2) {
ifmessageCorrect = true;
JSONObject osdmap = new JSONObject();
String s = split1[1];
String[] split2 = StringUtils.splitString2(s);
int spilt2len = split2.length;
if (split2 != null && spilt2len > 1 && spilt2len % 2 == 1) {
if (split2 != null && spilt2len > 1) {
String num = split2[0];
Integer integer = StringUtils.convert2Int(num);
if (integer != null) {
@ -417,7 +425,7 @@ public class SimUtil {
if (split1 != null && split1.length > 1) {
String s = split1[1];
String[] split2 = StringUtils.splitString2(s);
if (split2 != null && split2.length == 2) {
if (split2 != null && split2.length > 1) {
Integer channel = StringUtils.convert2Int(split2[0]);
if (channel != null) {
ifmessageCorrect = true;
@ -430,17 +438,21 @@ public class SimUtil {
} else if (content.contains(SmsTypeEnum.GET_PHOTO_SCHEDULE_LIST.value())) {
String[] split1 = StringUtils.splitString1(content);
if (split1 != null && split1.length == 2) {
Integer channel = StringUtils.convert2Int(split1[1]);
String photoSchedules = UpdateSysConfigUtil.getPhotoSchedules(channel);
String s = split1[1];
String[] strings = StringUtils.splitString2(s);
if (strings != null && strings.length >= 1) {
Integer integer = StringUtils.convert2Int(strings[0]);
String photoSchedules = UpdateSysConfigUtil.getPhotoSchedules(integer);
sendmessage = SmsTypeEnum.GET_PHOTO_SCHEDULE_LIST.value() + "=" + photoSchedules;
}
}
} else if (content.contains(SmsTypeEnum.SET_RESOLUTION.value())) {
restartType = 1;
String[] split1 = StringUtils.splitString1(content);
if (split1 != null && split1.length > 1) {
String s = split1[1];
String[] split2 = StringUtils.splitString1(s);
if (split2 != null && split2.length == 5) {
String[] split2 = StringUtils.splitString2(s);
if (split2 != null && split2.length >= 5) {
Integer channel = StringUtils.convert2Int(split2[0]);
Integer resolutionCX = StringUtils.convert2Int(split2[1]);
Integer resolutionCY = StringUtils.convert2Int(split2[2]);
@ -465,12 +477,15 @@ public class SimUtil {
sendmessage = SmsTypeEnum.GET_RESOLUTION.value() + "=" + resolutionCX + "," + resolutionCY + "," + videoCX + "," + videoCY;
} else if (content.contains(SmsTypeEnum.TAKE_PHOTO.value())) {
String[] split = StringUtils.splitString1(content);
if (split != null && split.length == 3) {
if (split != null && split.length > 1) {
String s = split[1];
String[] strings = StringUtils.splitString2(s);
if (strings != null && strings.length >= 3) {
ifmessageCorrect = true;
Integer channel = StringUtils.convert2Int(split[0]);
Integer preset = StringUtils.convert2Int(split[1]);
Integer type = StringUtils.convert2Int(split[2]);
if (channel != null) {
Integer channel = StringUtils.convert2Int(strings[0]);
Integer preset = StringUtils.convert2Int(strings[1]);
Integer type = StringUtils.convert2Int(strings[2]);
if (channel != null && preset != null && type != null) {
boolean photoOrVideo;
if (type == 0) {
photoOrVideo = true;
@ -479,6 +494,7 @@ public class SimUtil {
}
UpdateSysConfigUtil.takePhotoOrVideo(context, channel, preset, photoOrVideo);
}
}
sendmessage = getSendString(content, ifmessageCorrect);
}
} else if (content.contains(SmsTypeEnum.SET_HEART.value())) {
@ -487,9 +503,12 @@ public class SimUtil {
if (split1 != null && split1.length == 2) {
ifmessageCorrect = true;
String s = split1[1];
Integer integer = StringUtils.convert2Int(s);
String[] strings = StringUtils.splitString2(s);
if (strings != null && strings.length >= 1) {
Integer integer = StringUtils.convert2Int(strings[0]);
UpdateSysConfigUtil.setHB(context, integer);
}
}
sendmessage = getSendString(content, ifmessageCorrect);
} else if (content.contains(SmsTypeEnum.GET_HEART.value())) {
int hb = UpdateSysConfigUtil.getHB(context);
@ -505,9 +524,12 @@ public class SimUtil {
if (split1 != null && split1.length == 2) {
ifmessageCorrect = true;
String s = split1[1];
Integer integer = StringUtils.convert2Int(s);
String[] strings = StringUtils.splitString2(s);
if (strings != null && strings.length >= 1) {
Integer integer = StringUtils.convert2Int(strings[0]);
UpdateSysConfigUtil.setTB(context, integer);
}
}
sendmessage = getSendString(content, ifmessageCorrect);
} else if (content.contains(SmsTypeEnum.GET_TP.value())) {
int tb = UpdateSysConfigUtil.getTB(context);
@ -518,9 +540,12 @@ public class SimUtil {
if (split1 != null && split1.length == 2) {
ifmessageCorrect = true;
String s = split1[1];
Integer integer = StringUtils.convert2Int(s);
String[] strings = StringUtils.splitString2(s);
if (strings != null && strings.length > 0) {
Integer integer = StringUtils.convert2Int(strings[0]);
UpdateSysConfigUtil.setPackage(context, integer);
}
}
sendmessage = getSendString(content, ifmessageCorrect);
} else if (content.contains(SmsTypeEnum.GET_PACKAGE.value())) {
int aPackage = UpdateSysConfigUtil.getPackage(context);
@ -557,20 +582,34 @@ public class SimUtil {
return apputcp;
}
private static int getEncrypto(Integer encrypto) {
int appencrypto = -1;
//短信文档中 1密文,2明文 3不加密 app应用中 0不加密 1明文 2加密 所以需要转换一下
if (encrypto != null && (encrypto == 1 || encrypto == 2 || encrypto == 3)) {
if (encrypto == 1) {
appencrypto = 2;
} else if (encrypto == 2) {
appencrypto = 1;
} else if (encrypto == 3) {
appencrypto = 0;
}
}
return appencrypto;
}
public static int getSmsUtcp(Integer apputcp) {
int smsutcp = -1;
//短信文档中 0udp 1:tcp app应用中 0tcp 1:udp 所以需要转换一下
if (apputcp != null && (apputcp == 0 || apputcp == 1)) {
if (apputcp == 0) {
smsutcp = 1;
} else if (apputcp == 1) {
smsutcp = 0;
}
}
return smsutcp;
}
// private static int getEncrypto(Integer encrypto) {
// int appencrypto = -1;
// //短信文档中 1密文,2明文 3不加密 app应用中 0不加密 1明文 2加密 所以需要转换一下
// if (encrypto != null && (encrypto == 1 || encrypto == 2 || encrypto == 3)) {
// if (encrypto == 1) {
// appencrypto = 2;
// } else if (encrypto == 2) {
// appencrypto = 1;
// } else if (encrypto == 3) {
// appencrypto = 0;
// }
// }
// return appencrypto;
// }
private static String getSendString(String content, boolean ifmessageCorrect) {
String sendmessage;
@ -774,19 +813,14 @@ public class SimUtil {
} else if (configType == 3) { //数组
JSONArray objects = new JSONArray(configValue);
JSONUtils.updateConfigFile(filePath, fileName, configName, configType, objects);
} else if (configType == 4) { //对象
JSONObject objects = new JSONObject(configValue);
JSONUtils.updateConfigFile(filePath, fileName, configName, configType, objects);
}
} else {
ifmessageCorrect = false;
}
}
// if (rebootMpApp != 0) {
// MicroPhotoContext.restartMpApp(context, "Config Updated From SMS");
// } else {
// Intent intent = new Intent();
// intent.setAction(MicroPhotoContext.ACTION_UPDATE_CONFIGS_MP);
// intent.setPackage(MicroPhotoContext.PACKAGE_NAME_MPAPP);
// context.sendBroadcast(intent);
// }
} else {
ifmessageCorrect = false;
}
@ -901,6 +935,8 @@ public class SimUtil {
HashMap<String, String> hashMap = ValueTypeUtil.checkFilePathAndName(fileType);
filePath = hashMap.get(UpdateSysConfigUtil.FILEPATH);
fileName = hashMap.get(UpdateSysConfigUtil.FILENAME);
if (!TextUtils.isEmpty(filePath + fileName)) {
File file = new File(filePath + fileName);
if (file.exists()) {

@ -21,7 +21,7 @@ import java.util.Date;
public class SmsReceiver extends BroadcastReceiver {
public static final String SMS_BUNDLE = "pdus";
public static long mLastSmsTime = System.currentTimeMillis();
public static long mLastSmsTime = 0;
private static final SimpleDateFormat mSimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@Override
@ -34,10 +34,9 @@ public class SmsReceiver extends BroadcastReceiver {
// Log.i("SMS", "RECV " + mSimpleDateFormat.format(dt) + " " + smsMessage.getMessageBody());
if (smsMessage.getTimestampMillis() < mLastSmsTime) {
if (mLastSmsTime!= 0 &&smsMessage.getTimestampMillis() < mLastSmsTime) {
Date dt = new Date(smsMessage.getTimestampMillis());
Log.i("SMS", "MSG @" + mSimpleDateFormat.format(dt) + " Dropped: " + smsMessage.getMessageBody());
Log.e("SMS", "MSG @" + mSimpleDateFormat.format(dt) + " Dropped: " + smsMessage.getMessageBody()+" mLastSmsTime:"+mLastSmsTime+" "+mSimpleDateFormat.format(mLastSmsTime));
SimUtil.setSmsMessageRead(context, smsMessage);
continue;
}

@ -166,14 +166,18 @@ public class UpdateSysConfigUtil {
}
//修改app的ip
public static void setIP(Context context, String server, int port, int utcp, int encrypto) {
public static void setIP(Context context, String server, Integer port, Integer utcp, Integer encrypto) {
MicroPhotoContext.AppConfig mpAppConfig = MicroPhotoContext.getMpAppConfig(context);
if (server!=null) {
mpAppConfig.server = server;
}
if (port!=null && port!=-1) {
mpAppConfig.port = port;
if (utcp != -1) {
}
if (utcp != null&& utcp!=-1 ) {
mpAppConfig.networkProtocol = utcp;
}
if (encrypto != -1) {
if (encrypto != null&& encrypto!=-1) {
mpAppConfig.encryption = encrypto;
}
MicroPhotoContext.saveMpAppConfig(context, mpAppConfig);
@ -186,8 +190,9 @@ public class UpdateSysConfigUtil {
String server = mpAppConfig.server;
int port = mpAppConfig.port;
int networkProtocol = mpAppConfig.networkProtocol;
int smsUtcp = SimUtil.getSmsUtcp(networkProtocol);
int encryption = mpAppConfig.encryption;
return server + "," + port + "," + networkProtocol + "," + encryption;
return server + "," + port + "," + smsUtcp + "," + encryption;
}
//修改app的心跳
@ -338,23 +343,23 @@ public class UpdateSysConfigUtil {
//重启应用
public static void restartApp(Context context) {
Intent intent = new Intent(MicroPhotoContext.ACTION_RESTART_MP);
intent.putExtra("noDelay", 1);
intent.setPackage(MicroPhotoContext.PACKAGE_NAME_MPAPP);
context.sendBroadcast(intent);
try {
Thread.sleep(200);
} catch (Exception ex) {
ex.printStackTrace();
}
// Intent intent = new Intent(MicroPhotoContext.ACTION_RESTART_MP);
// intent.putExtra("noDelay", 1);
// intent.setPackage(MicroPhotoContext.PACKAGE_NAME_MPAPP);
//
// context.sendBroadcast(intent);
// try {
// Thread.sleep(200);
// } catch (Exception ex) {
// ex.printStackTrace();
// }
PackageManager packageManager = context.getPackageManager();
intent = packageManager.getLaunchIntentForPackage(MicroPhotoContext.PACKAGE_NAME_MPAPP);
Intent intent = packageManager.getLaunchIntentForPackage(MicroPhotoContext.PACKAGE_NAME_MPAPP);
intent.putExtra("noDelay", 1);
// intent.putExtra("reboot", 1);
intent.addFlags(/*Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK | */Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK );
context.startActivity(intent);
}
@ -372,8 +377,8 @@ public class UpdateSysConfigUtil {
List<Long> schedules = new ArrayList<>();
long ts = System.currentTimeMillis() / 1000;
long val = 0;
val |= (channel << 16);
val |= (preset << 8);
val |= (channel << 12);
val |= (preset << 4);
val |= photoOrVideo ? 0L : 1L;
schedules.add(Long.valueOf(val));

@ -1,10 +1,13 @@
package com.xypower.mpmaster.sms;
import android.os.Environment;
import com.xypower.common.MicroPhotoContext;
import org.json.JSONArray;
import java.io.File;
import java.util.HashMap;
public class ValueTypeUtil {
@ -37,18 +40,34 @@ public class ValueTypeUtil {
*
* */
public static HashMap checkFilePathAndName( int fileType) {
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
HashMap<String, String> hashMap = new HashMap<>();
String filePath = null;
String fileName = null;
switch (fileType) {
case 1:
filePath =MicroPhotoContext.PACKAGE_NAME_MPAPP + "/data/";
filePath =path+MicroPhotoContext.PACKAGE_NAME_MPAPP + "/data/";
fileName = "App.json";
break;
case 2:
filePath =MicroPhotoContext.PACKAGE_NAME_MPMASTER + "/data/";
fileName = "Master.json";
break;
case 81:
case 82:
case 83:
case 84:
case 85:
case 86:
case 87:
case 88:
case 89:
filePath =path+MicroPhotoContext.PACKAGE_NAME_MPAPP + "/data/schedules/";
fileName = Integer.toString(fileType - 80);
break;
case 91:
case 92:
case 93:
@ -58,7 +77,7 @@ public class ValueTypeUtil {
case 97:
case 98:
case 99:
filePath =MicroPhotoContext.PACKAGE_NAME_MPAPP + "/data/channels/";
filePath =path+MicroPhotoContext.PACKAGE_NAME_MPAPP + "/data/channels/";
fileName = Integer.toString(fileType - 90) + ".json";
break;
default:

Loading…
Cancel
Save