Compare commits

..

No commits in common. 'main' and 'TempBranch' have entirely different histories.

@ -4,8 +4,8 @@ plugins {
// 10,00,000 major-minor-build
def AppMajorVersion = 1
def AppMinorVersion = 3
def AppBuildNumber = 196
def AppMinorVersion = 1
def AppBuildNumber = 4
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber
@ -82,7 +82,6 @@ android {
enable isReleaseTask
reset()
include "armeabi-v7a", "arm64-v8a"
// include "arm64-v8a"
universalApk false
}
}
@ -91,14 +90,12 @@ android {
variant.outputs.all { output ->
if (outputFileName.endsWith('.apk')) {
def buildTypeFlag = "dbg"
def prevFileName = "mpapp"
if(variant.buildType.name.equals('release')) {
buildTypeFlag = "rel"
}
def abi = output.getFilter(com.android.build.OutputFile.ABI)
if (abi == null) abi = "all"
if (abi.contains("v7a")) prevFileName = "N938"
def fileName = "${prevFileName}_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}.apk"
def fileName = "mpapp_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}_${abi}.apk"
outputFileName = fileName
}
}
@ -126,10 +123,10 @@ android {
dependencies {
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'androidx.legacy:legacy-support-v13:1.0.0'
// implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.0.0'
// implementation "androidx.core:core:1.10.0" // 使
implementation 'androidx.fragment:fragment:1.3.6'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
implementation 'com.google.android.material:material:1.8.0'
implementation project(path: ':common')

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -1,8 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:sharedUserId="com.xypower.mp"
tools:ignore="Deprecated">
xmlns:tools="http://schemas.android.com/tools">
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
@ -12,10 +10,9 @@
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.MANAGE_NETWORK_POLICY"
tools:ignore="ProtectedPermissions" />
<uses-permission
android:name="android.permission.READ_PRIVILEGED_PHONE_STATE"
tools:ignore="ProtectedPermissions" />
@ -58,7 +55,6 @@
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
<uses-permission android:name="android.permission.USB_PERMISSION" />
<uses-permission
android:name="android.permission.DEVICE_POWER"
tools:ignore="ProtectedPermissions" />
@ -67,23 +63,14 @@
tools:ignore="ProtectedPermissions" />
<uses-permission
android:name="android.permission.START_ACTIVITIES_FROM_BACKGROUND"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.KILL_BACKGROUND_PROCESSES" />
tools:ignore="ProtectedPermissions" /> <!-- WiFi AP startTethering -->
<uses-permission
android:name="android.permission.TETHER_PRIVILEGED"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.CONNECTIVITY_INTERNAL"
tools:ignore="ProtectedPermissions" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="com.mediatek.camera.feature.mfnr" />
<uses-permission android:name="android.hardware.usb.accessory" />
<uses-feature android:name="android.hardware.usb.host" />
<uses-feature
android:name="android.hardware.telephony"
android:required="false" />
<queries>
<provider
@ -99,10 +86,6 @@
<intent>
<action android:name="android.media.action.STILL_IMAGE_CAMERA" />
</intent>
<intent>
<action android:name="android.intent.action.TIME_CHANGED" />
</intent>
<package android:name="com.xypower.mplive" />
</queries>
<application
@ -116,10 +99,7 @@
android:supportsRtl="true"
android:theme="@style/Theme.MicroPhoto"
tools:targetApi="28">
<activity
android:name=".LogActivity"
android:exported="false"
android:screenOrientation="landscape" />
<activity
android:name=".video.RawActivity"
android:exported="false"
@ -174,10 +154,11 @@
<category android:name="android.intent.category.default" />
</intent-filter>
</service>
<service android:name=".FloatingWindow" />
<receiver
android:name=".MicroPhotoService$AlarmReceiver"
android:exported="true" >
</receiver>
android:exported="true" />
<receiver
android:name=".BootBroadcastReceiver"
android:enabled="true"
@ -191,7 +172,17 @@
</intent-filter>
</receiver>
<receiver android:name=".NetworkChangedReceiver" />
<receiver
android:name=".ScreenActionReceiver"
android:exported="true">
<intent-filter android:priority="90000">
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.BOOT_COMPLETED" />
<action android:name="android.intent.action.SCREEN_ON" />
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.USER_UNLOCKED" />
</intent-filter>
</receiver>
<receiver
android:name="com.xypower.common.UpdateReceiver"
android:enabled="true"
@ -204,17 +195,11 @@
<data android:scheme="package" />
</intent-filter>
</receiver>
<receiver
android:name=".HeartBeatResponseReceiver"
android:enabled="true"
android:exported="true">
<intent-filter >
<action android:name="com.systemui.ACTION_HEARTBEAT_RESPONSE" />
</intent-filter>
</receiver>
<activity
android:name=".MainActivity"
android:exported="true"
android:launchMode="singleTop"
android:screenOrientation="landscape">
<intent-filter>
<action android:name="android.intent.action.MAIN" />

@ -1,227 +0,0 @@
#!/system/bin/sh
# ==============================================
# Configuration parameters - modify as needed
# ==============================================
ETH_IP="192.168.68.91" # Ethernet IP address
ETH_NETMASK="24" # Subnet mask (CIDR format)
ETH_NETWORK="192.168.68.0" # Network address
ETH_BROADCAST="192.168.68.255" # Broadcast address
ETH_GATEWAY="192.168.68.1" # Default gateway
ROUTE_TABLE="20" # Routing table number
MAX_INIT_WAIT=150 # Maximum seconds to wait for ethernet interface
MAX_UP_WAIT=10 # Maximum seconds to wait for interface to come UP
MAX_ROUTE_WAIT=5 # Maximum seconds to wait for routing rules
# For debugging only - comment out in production
# set -x
ANDROID_VERSION=$(getprop ro.build.version.release 2>/dev/null | cut -d '.' -f1)
# Record script start time
SCRIPT_START=$(date +%s)
# Cleanup function - handles unexpected interruptions
cleanup() {
echo "Script interrupted, cleaning up..." >&2
# Add additional cleanup code here if needed
exit 1
}
trap cleanup INT TERM
# Get script directory for finding tools like ethtool
SCRIPT_PATH="$0"
# Ensure path is absolute
case "$SCRIPT_PATH" in
/*) ;; # Already absolute path
*) SCRIPT_PATH="$PWD/$SCRIPT_PATH" ;;
esac
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
echo "Script directory detected as: $SCRIPT_DIR"
# Only configure rp_filter for eth0 interface
echo 0 > /proc/sys/net/ipv4/conf/eth0/rp_filter 2>/dev/null || true
# Wait for eth0 interface to appear
WAITED=0
while [ $WAITED -lt $MAX_INIT_WAIT ]; do
if [ -d "/sys/class/net/eth0" ]; then
echo "eth0 found after $WAITED seconds"
break
fi
echo "Wait eth0... ($WAITED/$MAX_INIT_WAIT)"
sleep 0.1
WAITED=$((WAITED+1))
done
# Check if eth0 exists
if ! [ -d "/sys/class/net/eth0" ]; then
echo "Error: eth0 not exists" >&2
exit 1
fi
# Check physical connection status
if [ -f "/sys/class/net/eth0/carrier" ]; then
CARRIER=$(cat /sys/class/net/eth0/carrier)
echo "Physical connection status: $CARRIER (1=connected, 0=disconnected)"
if [ "$CARRIER" != "1" ]; then
echo "Warning: Ethernet physical connection may have issues, please check the cable" >&2
fi
fi
# Clear previous configuration
/system/bin/ip link set eth0 down
/system/bin/ip addr flush dev eth0
/system/bin/ip route flush dev eth0
/system/bin/ip route flush table $ROUTE_TABLE
/system/bin/ip rule del to $ETH_NETWORK/$ETH_NETMASK 2>/dev/null || true
# Configure physical layer with ethtool (while interface is DOWN)
if [ -x "$SCRIPT_DIR/ethtool" ]; then
echo "Using ethtool from script directory: $SCRIPT_DIR/ethtool"
"$SCRIPT_DIR/ethtool" -s eth0 speed 10 duplex full autoneg off
# Try alternative path next
elif [ -x "/data/data/com.xypower.mpapp/files/ethtool" ]; then
echo "Configuring eth0 to 10Mbps full duplex..."
/data/data/com.xypower.mpapp/files/ethtool -s eth0 speed 10 duplex full autoneg off
else
echo "Warning: ethtool not found, falling back to sysfs configuration" >&2
# Try sysfs configuration as fallback
if [ -f "/sys/class/net/eth0/speed" ]; then
echo "off" > /sys/class/net/eth0/autoneg 2>/dev/null || true
echo "10" > /sys/class/net/eth0/speed 2>/dev/null || true
echo "full" > /sys/class/net/eth0/duplex 2>/dev/null || true
fi
fi
# ====================================================
# MTK Android 9 IP configuration with loss prevention
# ====================================================
# Configure IP address first while interface is DOWN
echo "Setting IP address while interface is DOWN..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
PRE_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP configuration before UP: $PRE_UP_IP (1=configured, 0=missing)"
# Enable interface and wait for UP
echo "Bringing up interface..."
/system/bin/ip link set eth0 up
if [ "$ANDROID_VERSION" = "9" ]; then
sleep 3
else
# Use standard configuration for other devices
sleep 1
fi
# Check if IP was lost after interface UP (common issue on MTK devices)
POST_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP configuration after UP: $POST_UP_IP (1=retained, 0=lost)"
# IP address lost detection and recovery
if [ "$PRE_UP_IP" = "1" ] && [ "$POST_UP_IP" = "0" ]; then
echo "Warning: IP address was lost after bringing interface up - MTK issue detected"
echo "Reapplying IP configuration..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
# Check if reapplied configuration worked
FIXED_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP reapplication result: $FIXED_IP (1=success, 0=still missing)"
# If standard method fails, try MTK-specific approaches
if [ "$FIXED_IP" = "0" ]; then
echo "Standard IP configuration failed, trying MTK-specific methods"
# Try ifconfig if available (works better on some MTK devices)
if command -v ifconfig >/dev/null 2>&1; then
echo "Using ifconfig method..."
ifconfig eth0 $ETH_IP netmask 255.255.255.0 up
sleep 1
fi
# Try Android's netd service if available
if [ -x "/system/bin/ndc" ]; then
echo "Using MTK netd service..."
/system/bin/ndc network interface setcfg eth0 $ETH_IP 255.255.255.0 up
sleep 1
fi
fi
fi
# Use loop to wait for interface UP instead of fixed sleep
WAITED=0
while [ $WAITED -lt $MAX_UP_WAIT ]; do
# Check both link status and IP configuration
IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP")
IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "1" ]; then
echo "Interface is UP with correct IP after $WAITED seconds"
break
fi
echo "Waiting for interface UP with IP... ($WAITED/$MAX_UP_WAIT)"
# If interface is UP but IP is missing, reapply IP
if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "0" ]; then
echo "Interface UP but IP missing, reapplying IP..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
fi
sleep 0.5
WAITED=$((WAITED+1))
done
# Final status check
FINAL_IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP")
FINAL_IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
if [ "$FINAL_IF_STATUS" != "1" ] || [ "$FINAL_IP_STATUS" != "1" ]; then
echo "Warning: Failed to achieve stable interface state with IP" >&2
echo "Final interface status: $FINAL_IF_STATUS (1=UP, 0=DOWN)"
echo "Final IP status: $FINAL_IP_STATUS (1=configured, 0=missing)"
/system/bin/ip addr show eth0
else
echo "Successfully configured eth0 with IP $ETH_IP"
fi
# First add to main routing table
/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link
# Then add to specified routing table
/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link table $ROUTE_TABLE
ADD_ROUTE_STATUS=$?
if [ $ADD_ROUTE_STATUS -eq 0 ]; then
echo "Add route successfully"
else
echo "Failed to add route: $ADD_ROUTE_STATUS" >&2
fi
# Only clear ARP and neighbor cache for eth0
/system/bin/ip neigh flush dev eth0
# Add routing rules - only flush cache once after rule is added
/system/bin/ip rule add from all to $ETH_NETWORK/$ETH_NETMASK lookup $ROUTE_TABLE prio 1000
/system/bin/ip route flush cache dev eth0
# Only enable forwarding for eth0 interface
echo 1 > /proc/sys/net/ipv4/conf/eth0/forwarding 2>/dev/null || true
# Wait for routing rules to take effect - using loop check instead of fixed wait
WAITED=0
while [ $WAITED -lt $MAX_ROUTE_WAIT ]; do
if /system/bin/ip rule | grep -q "$ETH_NETWORK/$ETH_NETMASK"; then
echo "Routing rules are now effective after $WAITED seconds"
break
fi
echo "Waiting for routing rules to take effect... ($WAITED/$MAX_ROUTE_WAIT)"
sleep 0.5
WAITED=$((WAITED+1))
done
# Display execution time
SCRIPT_END=$(date +%s)
TOTAL_TIME=$((SCRIPT_END - SCRIPT_START))
echo "Total script execution time: $TOTAL_TIME seconds"
exit 0

Binary file not shown.

Binary file not shown.

@ -14,27 +14,6 @@ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -ffunction-sections -fdata-sections -Wformat
set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS}")
# SET_TARGET_PROPERTIES(microphoto PROPERTIES LINK_FLAGS "-Wl,-s,--gc-sections")
add_definitions(-DUSING_ETHERNET)
if(ANDROID_ABI STREQUAL "armeabi-v7a")
add_definitions(-DUSING_N938)
elseif(ANDROID_ABI STREQUAL "arm64-v8a")
# add_definitions(-DUSING_N938)
# add_definitions(-DUSING_PTZ)
endif()
# OUTPUT_DBG_INFO:
add_definitions(-DOUTPUT_DBG_INFO)
# OUTPUT_SOCKET_DBG_INFO Depends ON OUTPUT_DBG_INFO
# TerminalService.cpp
# add_definitions(-DOUTPUT_SOCKET_DBG_INFO)
# OUTPUT_DB_DBG_INFO Depends ON OUTPUT_DBG_INFO
# Database.cpp
# add_definitions(-DOUTPUT_DB_DBG_INFO)
add_definitions(-DUSING_FFMPEG)
IF (CMAKE_BUILD_TYPE STREQUAL Debug)
ADD_DEFINITIONS(-D_DEBUG)
ELSE()
@ -52,19 +31,26 @@ add_definitions(-DSQLITE_THREADSAFE=1)
add_definitions(-DLIBRAW_NO_MEMPOOL_CHECK=1)
# add_definitions(-DHDRPLUS_NO_DETAILED_OUTPUT=1)
add_definitions(-DHAVE_STRING_H) # for memcpy in md5.c
# add_definitions(-DUSING_NRSEC)
# add_definitions(-DUSING_NRSEC_VPN)
add_definitions(-DUSING_NRSEC)
add_definitions(-DUSING_NRSEC_VPN)
# add_definitions(-DUSING_CERT)
# add_definitions(-DUSING_DOWSE)
# OUTPUT_CAMERA_DBG_INFO: CARERA
# add_definitions(-DOUTPUT_CAMERA_DBG_INFO)
add_definitions(-DALIGN_HB_TIMER_TO_PHOTO)
add_definitions(-DENABLE_3V3_ALWAYS)
add_definitions(-DCURL_STATICLIB)
add_definitions(-DUSING_HDRPLUS)
add_definitions(-DUSING_EXEC_HDRP=0)
#set(USING_EXEC_HDRP 1)
add_definitions(-DUSING_EXEC_HDRP=1)
set(USING_EXEC_HDRP 1)
if(ANDROID_ABI STREQUAL "armeabi-v7a")
add_definitions(-DUSING_N938)
elseif(ANDROID_ABI STREQUAL "arm64-v8a")
# add_definitions(-DUSING_N938)
endif()
# include_directories(${OpenCV_DIR}/include)
# add_library( lib_opencv SHARED IMPORTED )
@ -95,8 +81,6 @@ set(ncnn_DIR ${NCNN_ROOT}/${ANDROID_ABI}/lib/cmake/ncnn)
find_package(ncnn REQUIRED)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libcutils/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libutils/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/img_utils/include)
@ -150,19 +134,9 @@ include_directories(hdrplus2/${ANDROID_ABI})
include_directories(${HALIDE_ROOT}/${ANDROID_ABI}/include)
SET(ZLMEDIAKIT_LIBS "")
SET(STREAMING_SRCS "")
add_definitions(-DDISABLE_RTTI)
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLMediaKit )
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLToolKit/src/ )
# SET(ZLMEDIAKIT_LIBS ${ZLMEDIAKIT_LIBS} zlmediakit zltoolkit)
SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp media/Streaming.cpp )
#SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
#SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
SET(HDRPLUS_SOURCES
hdrplus/src/align.cpp
@ -180,7 +154,6 @@ SET(HDRPLUS2_SOURCES
hdrplus2/src/InputSource.cpp
hdrplus2/src/LibRaw2DngConverter.cpp
hdrplus2/${ANDROID_ABI}/hdrplus_pipeline.registration.cpp)
SET(HDRPLUS2_SOURCES )
SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
@ -190,6 +163,11 @@ SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp)
SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include)
SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
SET(BREAKPAD_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2)
SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype)
@ -197,12 +175,66 @@ SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype)
# SET(EVPP_SRC_DIR ${EVPP_ROOT}/evpp)
include_directories(${YAMC_INC_DIR})
include_directories(${BREAKPAD_ROOT} ${BREAKPAD_ROOT}/common/android/include)
include_directories(${ASIO_ROOT}/include)
# SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
# SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
# add_library(sqlite3 STATIC ${SQLITE_SRC_DIR}/sqlite3.c )
# INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR})
add_library( # Sets the name of the library.
sqlite3
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${SQLITE_SRC_DIR}/sqlite3.c
)
INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR})
file(GLOB BREAKPAD_SOURCES_COMMON
native-lib.cpp
${BREAKPAD_ROOT}/client/linux/crash_generation/crash_generation_client.cc
${BREAKPAD_ROOT}/client/linux/dump_writer_common/thread_info.cc
${BREAKPAD_ROOT}/client/linux/dump_writer_common/ucontext_reader.cc
${BREAKPAD_ROOT}/client/linux/handler/exception_handler.cc
${BREAKPAD_ROOT}/client/linux/handler/minidump_descriptor.cc
${BREAKPAD_ROOT}/client/linux/log/log.cc
${BREAKPAD_ROOT}/client/linux/microdump_writer/microdump_writer.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_dumper.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_ptrace_dumper.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/minidump_writer.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/pe_file.cc
${BREAKPAD_ROOT}/client/minidump_file_writer.cc
${BREAKPAD_ROOT}/common/convert_UTF.cc
${BREAKPAD_ROOT}/common/md5.cc
${BREAKPAD_ROOT}/common/string_conversion.cc
${BREAKPAD_ROOT}/common/linux/elfutils.cc
${BREAKPAD_ROOT}/common/linux/file_id.cc
${BREAKPAD_ROOT}/common/linux/guid_creator.cc
${BREAKPAD_ROOT}/common/linux/linux_libc_support.cc
${BREAKPAD_ROOT}/common/linux/memory_mapped_file.cc
${BREAKPAD_ROOT}/common/linux/safe_readlink.cc
)
file(GLOB BREAKPAD_ASM_SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S)
set_property(SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S PROPERTY LANGUAGE C)
# set_source_files_properties(${BREAKPAD_ASM_SOURCE} PROPERTIES LANGUAGE C)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
breakpad
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${BREAKPAD_SOURCES_COMMON}
${BREAKPAD_ASM_SOURCE}
)
INCLUDE_DIRECTORIES(${JSONCPP_INCLUDE_DIR})
@ -296,8 +328,10 @@ include_directories(${TERM_CORE_ROOT})
add_library( # Sets the name of the library.
jsoncpp
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${JSONCPP_SOURCES}
)
@ -338,8 +372,8 @@ add_library( # Sets the name of the library.
# Provides a relative path to your source file(s).
GPIOControl.cpp
MicroPhoto.cpp
TerminalDevice.cpp
PhoneDevice.cpp
PtzController.cpp
# PhoneDevice2.cpp
Camera.cpp
Camera2Reader.cpp
@ -352,14 +386,6 @@ add_library( # Sets the name of the library.
ncnn/yolov5ncnn.cpp
netcamera/httpclient.cpp
netcamera/VendorCtrl.cpp
netcamera/YuShiCtrl.cpp
netcamera/HangYuCtrl.cpp
netcamera/HikonCtrl.cpp
${STREAMING_SRCS}
#serial/WeatherComm.cpp
# camera2/OpenCVFont.cpp
@ -378,11 +404,9 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/SpecData_I1_JS.cpp
${TERM_CORE_ROOT}/SpecData_I1_HN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN_TY.cpp
${TERM_CORE_ROOT}/SpecData_I1_HENZZ.cpp
${TERM_CORE_ROOT}/SpecData_I1_SHX.cpp
${TERM_CORE_ROOT}/SpecData_I1_NX.cpp
${TERM_CORE_ROOT}/SpecData_I1_SX_ZY.cpp
${TERM_CORE_ROOT}/SpecData_XY.cpp
${TERM_CORE_ROOT}/SpecData_ZJ.cpp
${TERM_CORE_ROOT}/SpecData_NW.cpp
@ -400,18 +424,14 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/Client/Terminal_AH.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN_ZZ.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN_TY.cpp
${TERM_CORE_ROOT}/Client/Terminal_SHX.cpp
${TERM_CORE_ROOT}/Client/Terminal_JS.cpp
${TERM_CORE_ROOT}/Client/Terminal_NX.cpp
${TERM_CORE_ROOT}/Client/Terminal_SX_ZY.cpp
${TERM_CORE_ROOT}/Client/Terminal_ZJ.cpp
${TERM_CORE_ROOT}/Client/Terminal_NW.cpp
${TERM_CORE_ROOT}/Client/DataController.cpp
${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp
${TERM_CORE_ROOT}/Client/Database.cpp
# ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp
${TERM_CORE_ROOT}/Client/DataController.cpp
${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp
)
@ -434,15 +454,20 @@ find_library( # Sets the name of the path variable.
target_link_libraries( # Specifies the target library.
${PROJECT_NAME}
jsoncpp
freetype
breakpad
# breakpad
# Links the target library to the log library
# included in the NDK.
avcodec avfilter avformat avutil swresample swscale x264
${log-lib}
android camera2ndk mediandk z curl
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED} ${ZLMEDIAKIT_LIBS}
android camera2ndk mediandk z
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED}
)
# set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all")

@ -1,3 +1,4 @@
#include "TerminalDevice.h"
/*
* Copyright 2018 The Android Open Source Project
*

@ -371,7 +371,7 @@ namespace cv {
delete userData;
#if defined(USING_HB)
hb_buffer_destroy(hb_buffer);
#endif // 0
#endif 0
}
// https://freetype.org/freetype2/docs/tutorial/example2.cpp
@ -630,7 +630,7 @@ namespace cv {
#if defined(USING_HB)
hb_buffer_destroy(hb_buffer);
#endif // 0
#endif 0
}
Size FreeType2Impl::getTextSize(

@ -2509,8 +2509,8 @@ void DngCreator::writeInputStream(std::vector<uint8_t>& outStream,
uint64_t uOffset = static_cast<uint32_t>(offset);
ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, "
"rowStride=%d, pixStride=%d, offset=%lld", __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, (int64_t)offset);
"rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, offset);
ByteVectorOutput out(outStream);
// std::vector<uint8_t>& out = outStream;
@ -2578,8 +2578,8 @@ void DngCreator::writeInputBuffer(std::vector<uint8_t>& outStream,
uint64_t uOffset = static_cast<uint32_t>(offset);
ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, "
"rowStride=%d, pixStride=%d, offset=%lld", __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, (int64_t)offset);
"rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, offset);
ByteVectorOutput out(outStream);
// std::vector<uint8_t>& out = outStream;

@ -11,7 +11,6 @@
#include <sys/mman.h>
#include <unistd.h>
#include <climits>
#include "GPIOControl.h"
@ -21,143 +20,88 @@
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define MAX_STRING_LEN 32
std::mutex GpioControl::m_locker;
std::mutex GpioControl::m_gpioLocker;
std::vector<GpioControl::ITEM> GpioControl::m_items;
bool GpioControl::m_cameraPowerStatus = false;
#define ENABLE_GPIO_TRACING
#ifdef ENABLE_GPIO_TRACING
class GpioDebugLogger
typedef struct
{
public:
GpioDebugLogger(int cmd, int value)
{
m_startTime = GetMicroTimeStamp();
m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_val." + std::to_string(value);
CreateEmptyFile(m_path + ".enter");
}
int cmd;
int value;
int result;
long value2;
char str[MAX_STRING_LEN];
}IOT_PARAM;
GpioDebugLogger(int cmd)
{
m_startTime = GetMicroTimeStamp();
m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_get";
CreateEmptyFile(m_path + ".enter");
}
~GpioDebugLogger()
{
uint64_t ts = (GetMicroTimeStamp() - m_startTime);
if (ts > 1000)
{
CreateEmptyFile(m_path + ".leave." + std::to_string(ts));
}
else
{
std::string path = m_path + ".enter";
std::remove(path.c_str());
}
}
private:
std::string m_path;
uint64_t m_startTime;
};
#endif
std::mutex GpioControl::m_locker;
std::vector<std::pair<int, uint32_t>> GpioControl::m_references;
size_t GpioControl::turnOnImpl(const IOT_PARAM& param)
void GpioControl::setInt(int cmd, int value)
{
size_t oldRef = 0;
size_t references = 1;
std::vector<ITEM>::iterator it;
int res = 0;
int fd = -1;
time_t now = time(NULL);
IOT_PARAM param;
param.cmd = cmd;
param.value = value;
int res = 0;
// LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value);
// check res???
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == param.cmd)
{
oldRef = it->references;
it->references++;
// it->closeTime = 0;
references = it->references;
if(it->openTime == 0)
it->openTime = now;
SetCamerastatus(it->cmd, true);
break;
}
}
if (it == m_items.end())
{
oldRef = 0;
ITEM item = {param.cmd, references, now};
m_items.push_back(item);
SetCamerastatus(param.cmd, true);
}
uint32_t references = (value != 0) ? 1 : 0;
std::vector<std::pair<int, uint32_t> >::iterator it;
if (oldRef == 0/* || param.cmd != CMD_SET_3V3_PWR_EN*/)
if (value)
{
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(param.cmd, param.value);
#endif
m_gpioLocker.lock();
m_locker.lock();
fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
#ifdef OUTPUT_DBG_INFO
// int realVal = getInt(param.cmd);
// XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, param.value, param.result/*, realVal*/);
XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
#endif
// check res???
for (it = m_references.begin(); it != m_references.end(); ++it)
{
if (it->first == cmd)
{
it->second++;
references = it->second;
break;
}
}
if (it == m_references.end())
{
m_references.push_back(std::pair<int, uint32_t >(cmd, references));
}
}
m_gpioLocker.unlock();
#ifdef _DEBUG
ALOGI("PWR TurnOn cmd=%d,result=%d ref=%u\r\n",param.cmd, param.result, (uint32_t)references);
#endif
std::this_thread::sleep_for(std::chrono::milliseconds(100));
m_locker.unlock();
}
return references;
}
void GpioControl::setInt(int cmd, int value)
{
IOT_PARAM param = { cmd, value, 0 };
// param.cmd = cmd;
// param.value = value;
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(cmd, value);
#endif
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if (fd > 0)
else
{
int res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
#ifdef OUTPUT_DBG_INFO
// int realVal = getInt(param.cmd);
// XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, value, param.result/*, realVal*/);
XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, value, param.result);
#endif
m_locker.lock();
for (it = m_references.begin(); it != m_references.end(); ++it)
{
if (it->first == cmd)
{
if (it->second > 0)
{
it->second--;
}
references = it->second;
break;
}
}
if (references == 0)
{
fd = open(GPIO_NODE_MP, O_RDONLY);
if (fd > 0) {
res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
}
}
m_locker.unlock();
}
m_gpioLocker.unlock();
}
int GpioControl::getInt(int cmd)
{
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(cmd);
#endif
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -166,37 +110,32 @@ int GpioControl::getInt(int cmd)
param.cmd = cmd;
ioctl(fd, IOT_PARAM_READ, &param);
#ifdef _DEBUG
ALOGI("getInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
ALOGI("getInt cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
#endif
close(fd);
m_gpioLocker.unlock();
return param.value;
}
m_gpioLocker.unlock();
return -1;
}
void GpioControl::setLong(int cmd, long value)
{
int fd = open(GPIO_NODE_MP, O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value2 = value;
// LOGE("set_long fd=%d,cmd=%d,value2=%ld\r\n",fd, param.cmd, param.value2);
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result);
close(fd);
}
m_gpioLocker.unlock();
}
long GpioControl::getLong(int cmd)
{
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_long fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -206,37 +145,32 @@ long GpioControl::getLong(int cmd)
ioctl(fd, IOT_PARAM_READ, &param);
// LOGE("get_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result);
close(fd);
m_gpioLocker.unlock();
return param.value2;
}
m_gpioLocker.unlock();
return -1;
}
void GpioControl::setString(int cmd, const std::string& value)
{
IOT_PARAM param;
int fd = open(GPIO_NODE_MP, O_RDONLY);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
param.cmd = cmd;
memset(param.str, 0, MAX_STRING_LEN);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
memcpy(param.str, value.c_str(), len);
// LOGE("set_string fd=%d,cmd=%d,str=%s\r\n",fd, param.cmd, param.str);
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result);
close(fd);
}
m_gpioLocker.unlock();
return;
}
std::string GpioControl::getString(int cmd)
{
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_string fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -246,273 +180,8 @@ std::string GpioControl::getString(int cmd)
ioctl(fd, IOT_PARAM_READ, &param);
// LOGE("get_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result);
close(fd);
m_gpioLocker.unlock();
return std::string(param.str);
}
m_gpioLocker.unlock();
return "";
}
/////////////////////////// Power Control /////////////////////////////////
size_t GpioControl::TurnOn(int cmd)
{
IOT_PARAM param = { cmd, 1, 0 };
// param.cmd = cmd;
// param.value = value;
m_locker.lock();
size_t ref = turnOnImpl(param);
m_locker.unlock();
return ref;
}
size_t GpioControl::TurnOn(const std::vector<int>& cmds)
{
IOT_PARAM param = { 0, 1, 0 };
// param.cmd = cmd;
// param.value = value;
std::vector<int>::const_iterator it;
m_locker.lock();
for (it = cmds.cbegin(); it != cmds.cend(); ++it)
{
if (*it == 0)
{
continue;
}
param.cmd = *it;
turnOnImpl(param);
}
m_locker.unlock();
return 0;
}
size_t GpioControl::TurnOffImmediately(int cmd)
{
time_t ts = time(NULL);
size_t ref = 0;
std::vector<ITEM>::iterator it;
m_locker.lock();
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd)
{
if (it->references > 0)
{
it->references = 0;
SetCamerastatus(cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
break;
}
}
m_locker.unlock();
#ifdef _DEBUG
ALOGI("PWR TurnOffNow cmd=%d ref=%u", cmd, (uint32_t)ref);
#endif
return 0;
}
size_t GpioControl::TurnOff(int cmd, uint32_t delayedCloseTime/* = 0*/)
{
time_t ts = 0;
if (delayedCloseTime > 0)
{
ts = time(NULL) + delayedCloseTime;
}
size_t ref = 0;
std::vector<ITEM>::iterator it;
if (delayedCloseTime > 0)
{
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(cmd);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
m_locker.lock();
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd)
{
if (it->references > 0)
{
it->references--;
if (it->references == 0)
{
SetCamerastatus(cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
}
break;
}
}
m_locker.unlock();
#ifdef _DEBUG
ALOGI("PWR TurnOff cmd=%d ref=%u", cmd, (uint32_t)ref);
#endif
return 0;
}
size_t GpioControl::TurnOff(const std::vector<int>& cmds, uint32_t delayedCloseTime/* = 0*/)
{
time_t ts = 0;
if (delayedCloseTime > 0)
{
ts = time(NULL) + delayedCloseTime;
}
std::vector<ITEM>::iterator it;
std::vector<int>::const_reverse_iterator itCmd;
if (delayedCloseTime > 0)
{
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(cmds);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
m_locker.lock();
// turnOnImpl(param);
for (itCmd = cmds.crbegin(); itCmd != cmds.crend(); ++itCmd)
{
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == *itCmd)
{
if (it->references > 0)
{
it->references--;
if (it->references == 0)
{
SetCamerastatus(it->cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
}
break;
}
}
}
m_locker.unlock();
return 0;
}
size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds)
{
for (auto itCmd = cmds.cbegin(); itCmd != cmds.end(); ++itCmd)
{
if (itCmd->second > 0)
{
uint32_t delayedCloseTime = itCmd->second;
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(itCmd->first);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
}
std::vector<ITEM>::iterator it;
std::vector<std::pair<int, uint32_t> >::const_iterator itCmd;
m_locker.lock();
for (itCmd = cmds.cbegin(); itCmd != cmds.end(); ++itCmd)
{
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == itCmd->first)
{
if (it->references > 0)
{
it->references--;
if (it->references == 0)
{
SetCamerastatus(it->cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
}
break;
}
}
}
m_locker.unlock();
return 0;
}
bool GpioControl::SetCamerastatus(int cmd, bool status)
{
#ifdef USING_N938
if(cmd == CMD_SET_PIC1_POWER)
m_cameraPowerStatus = status;
#endif
#ifdef USING_PTZ
if(cmd == CMD_SET_PTZ_PWR_ENABLE)
{
m_cameraPowerStatus = status;
}
#endif
return true;
}
bool GpioControl::GetCamerastatus()
{
return m_cameraPowerStatus;
}
bool GpioControl::GetSelftestStatus(time_t wait_time)
{
int cmd = 0;
#ifdef USING_N938
cmd = CMD_SET_PIC1_POWER;
#endif
#ifdef USING_PTZ
cmd = CMD_SET_PTZ_PWR_ENABLE;
#endif
time_t now = time(NULL);
std::vector<ITEM>::iterator it;
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd && it->references > 0 && it->openTime!=0 && (now - it->openTime >= wait_time))
{
return true;//自检完成
}
}
return false;
}
time_t GpioControl::GetSelfTestRemain(time_t wait_time)
{
int cmd = 0;
#ifdef USING_N938
cmd = CMD_SET_PIC1_POWER;
#endif
#ifdef USING_PTZ
cmd = CMD_SET_PTZ_PWR_ENABLE;
#endif
time_t now = time(NULL);
std::vector<ITEM>::iterator it;
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd && it->references > 0)
{
time_t remaintime = (now - it->openTime);
remaintime = (wait_time > remaintime) ? (wait_time - remaintime) : 0;
return remaintime;//自检完成
}
}
return 0;
}

@ -12,13 +12,6 @@
#include <vector>
#include <utility>
#include <SemaphoreEx.h>
#include <LogThread.h>
#ifndef USING_N938
#ifndef USING_PTZ // MicroPhoto
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
#define CMD_GET_KEY_LOCKSTATE 103
@ -27,188 +20,171 @@
#define CMD_SET_NETWORK_STATE 106
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
//#define CMD_GET_CHARGING_VOL_STATE 110
//#define CMD_GET_CHARGING_SHUNT_VOLTAGE_STATE 111
#define CMD_GET_CHARGING_VOL_STATE 110
#define CMD_GET_CHARGING_SHUNT_VOLTAGE_STATE 111
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
//#define CMD_GET_CHARGING_POWER_STATE 113
//#define CMD_GET_CHARGING_CURRENT_STATE 114
//#define CMD_GET_BAT_VOL_STATE 115
//#define CMD_GET_BAT_SHUNT_VOLTAGE_STATE 116
#define CMD_GET_CHARGING_POWER_STATE 113
#define CMD_GET_CHARGING_CURRENT_STATE 114
#define CMD_GET_BAT_VOL_STATE 115
#define CMD_GET_BAT_SHUNT_VOLTAGE_STATE 116
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
//#define CMD_GET_BAT_POWER_STATE 118
//#define CMD_GET_BAT_CURRENT_STATE 119
#define CMD_GET_BAT_POWER_STATE 118
#define CMD_GET_BAT_CURRENT_STATE 119
#define CMD_SET_485_STATE 121
#define CMD_SET_SPI_MODE 123
#define CMD_SET_SPI_BITS_PER_WORD 124
#define CMD_SET_SPI_MAXSPEEDHZ 125
#define CMD_SET_PWM_BEE_STATE 126 // Removed
#define CMD_SET_ALM_MODE 128 // Removed
#define CMD_SET_PWM_BEE_STATE 126
#define CMD_SET_ALM_MODE 128
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_12V_EN_STATE 133
#if 1
#define CMD_SET_SPI_POWER 129
#define CMD_SET_3V3_PWR_EN 132
#endif
#define CMD_GET_CAMERA_STATUS 310
#define CMD_SET_MADA_INIT_STATUS 312
#define CMD_SET_MADA_CLOSE_STATUS 313
#define CMD_SET_MADA_REG 314
#define CMD_GET_MADA_REG 315
#define CMD_SET_INIT_STATUS 401
#define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_NEW_OTG_STATE 507
#else // defined(USING_PTZ)
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
#if 0
#define CMD_SET_SPI_POWER 129
#define CMD_SET_MADA_MOVE_STATUS 311
#define CMD_SET_12V_EN_STATE 0 // TO BE ADDED
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_SET_SPI_MODE 0 // TO BE ADDED
#define CMD_SET_SPI_BITS_PER_WORD 0 // TO BE ADDED
#define CMD_SET_SPI_MAXSPEEDHZ 0 // TO BE ADDED
#define CMD_SET_485_ENABLE 131
#define CMD_SET_3V3_PWR_EN 132
// #define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_SENSOR_ENABLE 504
#define CMD_SET_SENSOR_PWR_ENABLE 505
#define CMD_SET_SENSOR2_ENABLE 506
#define CMD_SET_SENSOR4_ENABLE 510
#define CMD_SET_SENSOR1_PWR_ENABLE 513
#define CMD_SET_SENSOR2_PWR_ENABLE 514
#define CMD_SET_SENSOR3_PWR_ENABLE 509
#define CMD_SET_SENSOR4_PWR_ENABLE 525
#define CMD_SET_PHOTO_IN 520
#define CMD_SET_PHOTO_OUT 515
#define CMD_SET_ADC_ENABLE 500
#define CMD_SET_MIPI_SWITCH 501
#define CMD_SET_CAM_RSTN1 502
#define CMD_SET_CAM_RSTN0 503
#define CMD_SET_SD_DECT 507
#define CMD_SET_PTZ_PWR_ENABLE 508
#define CMD_SET_RTC_ENABLE 511
#define CMD_SET_100M_ENABLE 518
#define CMD_SET_100M_SWITCH_PWR_ENABLE 519
#define CMD_SET_AM_POWER_ENABLE 521
#define CMD_SET_NRSEC_POWER_ENABLE 522
#define CMD_SET_AMP_ENABLE 523
#define CMD_SET_LIGHT1_RESISTOR_ENABLE 524
#define CMD_SET_100M_RESET 526
#define CMD_GET_CAMERA_STATUS 310
#define CMD_SET_MADA_MOVE_STATUS 311
#define CMD_SET_MADA_INIT_STATUS 312
#define CMD_SET_MADA_CLOSE_STATUS 313
#define CMD_SET_MADA_REG 314
#define CMD_GET_MADA_REG 315
#define CMD_SET_INIT_STATUS 401
#endif // USING_PTZ
#else // defined(USING_N938)
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_SET_485_EN1 302
#define CMD_SET_3V3_PWR_EN 132
#define CMD_SET_CAM_3V3_EN_STATE 132
#endif
#if 1 //云台
#define CMD_SET_485_ENABLE 512
#define CMD_SET_3V3_PWR_ENABLE 516
#define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_SENSOR_ENABLE 504
#define CMD_SET_SENSOR_PWR_ENABLE 505
#define CMD_SET_SENSOR2_ENABLE 506
#define CMD_SET_SENSOR4_ENABLE 510
#define CMD_SET_SENSOR1_PWR_ENABLE 513
#define CMD_SET_SENSOR2_PWR_ENABLE 514
#define CMD_SET_SENSOR3_PWR_ENABLE 509
#define CMD_SET_SENSOR4_PWR_ENABLE 525
#define CMD_SET_PHOTO_IN 520
#define CMD_SET_PHOTO_OUT 515
#define CMD_SET_ADC_ENABLE 500
#define CMD_SET_MIPI_SWITCH 501
#define CMD_SET_CAM_RSTN1 502
#define CMD_SET_CAM_RSTN0 503
#define CMD_SET_SD_DECT 507
#define CMD_SET_PTZ_PWR_ENABLE 508
#define CMD_SET_RTC_ENABLE 511
#define CMD_SET_100M_ENABLE 518
#define CMD_SET_100M_SWITCH_PWR_ENABLE 519
#define CMD_SET_AM_POWER_ENABLE 521
#define CMD_SET_NRSEC_POWER_ENABLE 522
#define CMD_SET_AMP_ENABLE 523
#define CMD_SET_LIGHT1_RESISTOR_ENABLE 524
#define CMD_SET_100M_RESET 526
#endif
#if 0 //微拍
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_CAM_3V3_EN_STATE 132
#define CMD_SET_12V_EN_STATE 133
#define CMD_SET_485_STATE 121
#define CMD_SET_SPI_MODE 123
#define CMD_SET_SPI_BITS_PER_WORD 124
#define CMD_SET_SPI_MAXSPEEDHZ 125
#define CMD_SET_SPI_POWER 129
#define CMD_SET_WTH_POWER 490
#define CMD_SET_PULL_POWER 491
#define CMD_SET_ANGLE_POWER 492
#define CMD_SET_OTHER_POWER 493
#define CMD_SET_PIC1_POWER 494
#define CMD_SET_GPIO157_POWER 510
#define CMD_SET_GPIO5_POWER 511
#define CMD_SET_PWM_BEE_STATE 126
#define CMD_SET_ALM_MODE 128
#define CMD_SET_485_en0 301
#define CMD_SET_485_en1 302
#define CMD_SET_485_en2 303
#define CMD_SET_485_en3 304
#define CMD_SET_485_en4 305
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
#else //938
#define CMD_SET_485_en1 302
#define CMD_SET_CAM_3V3_EN_STATE 360
#define CMD_SET_UART0_EN 361
#define CMD_SET_485_EN0 301
#define CMD_SET_485_en0 301
#define CMD_SET_NETWORK_POWER_EN 362
#define CMD_SET_485_EN3 304
#define CMD_SET_485_EN2 303
#define CMD_SET_485_en3 304
#define CMD_SET_485_en2 303
#define CMD_SET_SPI_POWER 129
// #define CMD_SET_5V_EN 363
#define CMD_SET_5V_EN 363
#define CMD_SDCARD_DETECT_EN 364
#define CMD_SET_PIC1_POWER 494
#define CMD_SET_OTHER_POWER 493
#define CMD_SET_ANGLE_POWER 492
#define CMD_SET_PULL_POWER 491
#define CMD_SET_WTH_POWER 490
#define CMD_SET_485_EN4 305
#define CMD_SET_485_en4 305
#define CMD_LED_CTRL 365
#define CMD_BD_EN 366
#define CMD_ADC_EN 367
#define CMD_SPI2SERIAL_POWER_EN 368
#define CMD_RS485_3V3_EN 369
#endif
#if 0
// Others
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_OTG_STATE 107
#define CMD_485_0_DE 156 // 485_0 DE信号
#define CMD_485_0_PWR_EN 157 // 485_0 电源使能
#define CMD_485_0_1_DE_EN 171 // 485_0&1DE电平转换芯片使能信号
#define CMD_485_1_DE 172 //
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_SET_CAM_3V3_EN_STATE1 72 // 整板3V3上电使能
#define CMD_3V3_SWITCH_EN 45 // 整板485_3V3信号电平转换电源使能
#define CMD_SET_INIT_STATUS 0 // 401
#define CMD_UART0_EN 73 // 预留UART0电平转换芯片使能
#define CMD_485_1_PWR_EN 5 // 485_1 电源使能
#endif // USING_N938
#define CMD_485_3_DE 6 // 485_3 DE信号
#define CMD_485_2_DE 7 // 485_2 DE信号
#define CMD_485_4_DE 13 // 485_4 DE信号
#define CMD_NETWORK_PWR_EN 94 // 100M网络电源使能
#define CMD_485_2_PWR_EN 92 // 485_2 电源使能
#define CMD_485_3_PWR_EN 91 // 485_3 电源使能
#define CMD_485_4_PWR_EN 90 // 485_4 电源使能
#define GPIO_NODE_MP "/dev/mtkgpioctrl"
#define CMD_SEC_EN 27 // 加密芯片上电使能
#define MAX_STRING_LEN 32
typedef struct
{
int cmd;
int value;
int result;
long value2;
char str[MAX_STRING_LEN];
}IOT_PARAM;
#define CMD_485_2_3_DE_EN 26 // 485_2&3 DE电平转换芯片使能信号
class GpioControl
{
public:
struct ITEM
{
int cmd;
size_t references;
time_t openTime;
};
#define CMD_5V_PWR_EN 14 // 整板5V0上电使能
#define CMD_SD_CARD_DECT 15 // SD CARD DECT
#define CMD_PIC1_EN 16
private:
static std::mutex m_locker;
static std::vector<ITEM> m_items;
static bool m_cameraPowerStatus;
#define CMD_OTHER_EN 21
#define CMD_ANGLE_EN 22
#define CMD_PULL_EN 23
#define CMD_WEATHER_EN 24
static std::mutex m_gpioLocker;
#define CMD_LED_CTRL 46
#define CMD_BD_EN 47
#define CMD_ADC_EN 44
protected:
static size_t turnOnImpl(const IOT_PARAM& param);
static size_t turnOffImpl(const IOT_PARAM& param);
#define CMD_SPI_PWR_EN 43 // SPI转串口电源使能
public:
// Power
static size_t TurnOn(int cmd);
static size_t TurnOn(const std::vector<int>& cmds);
static size_t TurnOff(int cmd, uint32_t delayedCloseTime = 0);
static size_t TurnOff(const std::vector<int>& cmds, uint32_t delayedCloseTime = 0);
static size_t TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds);
static size_t TurnOffImmediately(int cmd);
static bool SetCamerastatus(int cmd, bool status);
static bool GetCamerastatus();
static bool GetSelftestStatus(time_t wait_time);
static time_t GetSelfTestRemain(time_t wait_time);
#endif
#ifdef USING_N938
#define GPIO_NODE_N938 "/sys/devices/platform/1000b000.pinctrl/mt_gpio"
#else
#endif // USING_N938
#define GPIO_NODE_MP "/dev/mtkgpioctrl"
class GpioControl
{
private:
static std::mutex m_locker;
static std::vector<std::pair<int, uint32_t>> m_references;
public:
static void setInt(int cmd, int value);
static int getInt(int cmd);
static void setLong(int cmd, long value);
@ -218,94 +194,20 @@ public:
static void setOtgState(bool on)
{
on ? TurnOn(CMD_SET_OTG_STATE) : TurnOff(CMD_SET_OTG_STATE);
setInt(CMD_SET_OTG_STATE, on ? 1 : 0);
}
static bool getOtgState()
{
#ifndef USING_N938
return getInt(CMD_GET_OTG_STATE) != 0;
#else
return false;
#endif
}
static void setCam3V3Enable(bool enabled, uint32_t delayedCloseTime)
{
#ifdef USING_PTZ
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime);
#else
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime);
#endif
return getInt(CMD_SET_OTG_STATE) != 0;
}
static void setCam3V3Enable(bool enabled)
{
#ifdef USING_PTZ
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN);
#else
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN);
#endif
}
static void setBeeOn(bool z)
{
#ifndef USING_N938
#ifndef USING_PTZ
z ? TurnOn(CMD_SET_PWM_BEE_STATE) : TurnOff(CMD_SET_PWM_BEE_STATE);
#endif
#endif
}
static void setJidianqiState(bool z) {
#ifndef USING_N938
#ifndef USING_PTZ
z ? TurnOn(CMD_SET_ALM_MODE) : TurnOff(CMD_SET_ALM_MODE);
#endif
#endif
}
static void setSpiPower(bool on) {
on ? TurnOn(CMD_SET_SPI_POWER) : TurnOff(CMD_SET_SPI_POWER);
if (on)
{
std::this_thread::sleep_for(std::chrono::milliseconds(40));
}
}
static void setRS485Enable(bool z, uint32_t delayedCloseTime)
{
#ifndef USING_N938
#ifdef USING_PTZ
z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE, delayedCloseTime);
#else
z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE, delayedCloseTime);
#endif
#endif
}
static void set12VEnable(bool z, uint32_t delayedCloseTime)
{
#ifndef USING_N938
z ? TurnOn(CMD_SET_12V_EN_STATE) : TurnOff(CMD_SET_12V_EN_STATE, delayedCloseTime);
#endif
}
static void setRS485Enable(bool z)
{
#ifndef USING_N938
#ifdef USING_PTZ
z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE);
#ifdef ENABLE_3V3_ALWAYS
setInt(CMD_SET_CAM_3V3_EN_STATE, 1);
#else
z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE);
#endif
#endif
}
static void set12VEnable(bool z)
{
#ifndef USING_N938
z ? TurnOn(CMD_SET_12V_EN_STATE) : TurnOff(CMD_SET_12V_EN_STATE);
setInt(CMD_SET_CAM_3V3_EN_STATE, enabled ? 1 : 0);
#endif
}
@ -314,378 +216,104 @@ public:
setInt(CMD_SET_SYSTEM_RESET, 1);
}
static void reboot2()
{
setInt(CMD_SET_SYSTEM_RESET2, 1);
}
static void setLightAdc(int i)
{
#ifndef USING_N938
#ifdef USING_PTZ
setInt(CMD_SET_LIGHT1_RESISTOR_ENABLE, i);
#else
setInt(CMD_SET_LIGHT_ADC, i);
#endif
#endif
}
static int getLightAdc()
{
#ifndef USING_N938
#ifdef USING_PTZ
return getInt(CMD_SET_LIGHT1_RESISTOR_ENABLE);
#else
return getInt(CMD_GET_LIGHT_ADC);
#endif
#else
return -1;
#endif
}
static int getChargingVoltage()
{
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE);
#else
return -1;
#endif
return getInt(CMD_GET_CHARGING_VOL_STATE);
}
#if 0
static int getChargingShuntVoltage()
{
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_SHUNT_VOLTAGE_STATE);
#else
return -1;
#endif
}
#endif
static int getChargingBusVoltage() {
return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE);
}
#if 0
static int getChargingPower() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_POWER_STATE);
#else
return -1;
#endif
}
static int getChargingCurrent() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_CURRENT_STATE);
#else
return -1;
#endif
}
#endif
static int getBatteryVoltage() {
return getInt(CMD_GET_BAT_BUS_VOLTAGE_STATE);
return getInt(CMD_GET_BAT_VOL_STATE);
}
#if 0
static int getBatteryShuntVoltage() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_SHUNT_VOLTAGE_STATE);
#else
return -1;
#endif
}
#endif
static int getBatteryBusVoltage() {
return getInt(CMD_GET_BAT_BUS_VOLTAGE_STATE);
}
#if 0
static int getBatteryPower() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_POWER_STATE);
#else
return -1;
#endif
}
static int getBatteryCurrent() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_CURRENT_STATE);
#else
return -1;
#endif
}
#endif
static void set485WriteMode() {
#if 0
setInt(CMD_SET_485_STATE, 1);
#endif
}
static void set485ReadMode() {
#if 0
setInt(CMD_SET_485_STATE, 0);
#endif
}
static void setSpiMode(int i) {
#ifndef USING_N938
setInt(CMD_SET_SPI_MODE, i);
#endif
}
static void setSpiBitsPerWord(int i) {
#ifndef USING_N938
setInt(CMD_SET_SPI_BITS_PER_WORD, i);
#endif
}
static void setSpiMaxSpeedHz(long j) {
#ifndef USING_N938
setLong(CMD_SET_SPI_MAXSPEEDHZ, j);
#endif
}
};
class PowerControl
{
public:
PowerControl(int cmd1) : m_delayCloseTime(0)
{
m_cmds.resize(1, cmd1);
TurnOn();
}
PowerControl(const std::vector<int>& cmds) : m_delayCloseTime(0)
{
m_cmds = cmds;
TurnOn();
static void setBeeOn(bool z) {
setInt(CMD_SET_PWM_BEE_STATE, z ? 1 : 0);
}
PowerControl(int cmd1, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(1, cmd1);
TurnOn();
}
PowerControl(int cmd1, int cmd2, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(2, cmd1);
m_cmds[1] = cmd2;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(3, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(4, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(5, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(6, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, int cmd7, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(7, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
m_cmds[6] = cmd7;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, int cmd7, int cmd8, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(8, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
m_cmds[6] = cmd7;
m_cmds[7] = cmd8;
TurnOn();
}
virtual ~PowerControl()
{
GpioControl::TurnOff(m_cmds, m_delayCloseTime);
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
std::string status = GetStatus();
XYLOG(XYLOG_SEVERITY_INFO, "PWR After TurnOff %s, DelayCloseTime=%u", status.c_str(), m_delayCloseTime);
#endif
static void setJidianqiState(bool z) {
setInt(CMD_SET_ALM_MODE, z ? 1 : 0);
}
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
std::string GetStatus()
{
std::string result;
for (auto it = m_cmds.cbegin(); it != m_cmds.cend(); ++it)
static void setSpiPower(bool on) {
setInt(CMD_SET_SPI_POWER, on ? 1 : 0);
if (on)
{
if (*it == 0)
{
continue;
}
result += std::to_string(*it) + "=" + std::to_string(GpioControl::getInt(*it)) + " ";
std::this_thread::sleep_for(std::chrono::milliseconds(40));
}
return result;
}
#endif // #if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
protected:
void TurnOn()
{
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
// std::string status = GetStatus();
// XYLOG(XYLOG_SEVERITY_INFO, "PWR Before TurnOn %s", status.c_str());
#endif
GpioControl::TurnOn(m_cmds);
}
protected:
std::vector<int> m_cmds;
uint32_t m_delayCloseTime;
};
class CameraPowerCtrl : public PowerControl
{
public:
CameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(0, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime)
#else // USING_PTZ
PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
class NetCameraPowerCtrl : public PowerControl
{
public:
NetCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ
// MicroPhoto
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
class PlzCameraPowerCtrl : public PowerControl
{
public:
PlzCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_485_ENABLE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_12V_EN_STATE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
#else // USING_PTZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
static void setRS485Enable(bool z) {
setInt(CMD_SET_485_EN_STATE, z ? 1 : 0);
}
};
class EthernetPowerCtrl : public PowerControl
{
public:
EthernetPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
// PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_5V_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, closeDelayTime)
#else // USING_PTZ
// Micro Photo
PowerControl(CMD_SET_OTG_STATE, CMD_SET_485_EN_STATE/* Only for wp6*/, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
class UsbCameraPowerCtrl : public PowerControl
{
public:
UsbCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_PTZ_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
static void set12VEnable(bool z) {
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
}
};
class SerialCameraPowerCtrl : public PowerControl
{
public:
SerialCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_SPI_POWER, CMD_SPI2SERIAL_POWER_EN, CMD_RS485_3V3_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN4, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_ENABLE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_PTZ_PWR_ENABLE, closeDelayTime)
#else // USING_PTZ
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
#endif //MICROPHOTO_GPIOCONTROL_H

@ -1,10 +1,11 @@
#include <jni.h>
#include <string>
#include <thread>
#include <chrono>
#include <Factory.h>
#include <Client/Terminal.h>
#include "TerminalDevice.h"
#include "PhoneDevice.h"
#include "PhoneDevice2.h"
#include <LogThread.h>
#include <sys/system_properties.h>
#include <AndroidHelper.h>
@ -13,35 +14,23 @@
#include <android/multinetwork.h>
#ifndef USING_N938
#define NRSEC_PATH "/dev/spidev0.0"
#else
#define NRSEC_PATH "/dev/spidev32766.0"
#endif
#ifdef USING_BREAK_PAD
#include <client/linux/handler/exception_handler.h>
#include <client/linux/handler/minidump_descriptor.h>
#endif
#ifdef USING_MQTT
#include <mosquitto.h>
#endif
#define NRSEC_PATH "/dev/spidev0.0"
#ifdef USING_FFMPEG
extern "C" {
#include <libavformat/avformat.h>
}
#ifdef USING_BREAK_PAD
#include "client/linux/handler/exception_handler.h"
#include "client/linux/handler/minidump_descriptor.h"
#endif
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include "Camera.h"
#include "Camera2Reader.h"
#include "GPIOControl.h"
#ifdef USING_BREAK_PAD
static google_breakpad::ExceptionHandler* g_breakpad_handler = nullptr;
bool DumpCallback(const google_breakpad::MinidumpDescriptor& descriptor,
void* context,
bool succeeded) {
@ -55,7 +44,6 @@ bool DumpCallback(const google_breakpad::MinidumpDescriptor& descriptor,
#include <Client/NrsecPort.h>
#endif
#include <curl/curl.h>
static jmethodID mRegisterTimerMid = 0;
static jmethodID mRegisterHeartbeatMid = 0;
@ -166,34 +154,18 @@ void posix_signal_handler(int sig, siginfo_t *siginfo, void *context)
class Runner
{
public:
static void RequestCapture(CTerminal* pTerminal, unsigned int channel, unsigned int preset, unsigned int type, uint64_t scheduleTime);
static void RequestCapture(CTerminal* pTerminal, unsigned int channel, unsigned int preset, unsigned int type, unsigned long scheduleTime);
};
void Runner::RequestCapture(CTerminal* pTerminal, unsigned int channel, unsigned int preset, unsigned int type, uint64_t scheduleTime)
void Runner::RequestCapture(CTerminal* pTerminal, unsigned int channel, unsigned int preset, unsigned int type, unsigned long scheduleTime)
{
pTerminal->RequestCapture(channel, preset, type, scheduleTime);
}
#include <signal.h>
#include <android/log.h>
#if 0
void sighandler(int sig) {
__android_log_print(ANDROID_LOG_ERROR, "NativeCrash", "Caught signal %d", sig);
exit(1);
}
#endif
jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
JNIEnv* env = NULL;
jint result = -1;
// 在 JNI_OnLoad 或其他初始化函数中注册
#if 0
signal(SIGSEGV, sighandler);
#endif
#if defined(JNI_VERSION_1_6)
if (result==-1 && vm->GetEnv((void**)&env, JNI_VERSION_1_6) == JNI_OK)
{
@ -219,15 +191,8 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
}
#ifdef USING_BREAK_PAD
google_breakpad::MinidumpDescriptor descriptor("/sdcard/com.xypower.mpapp/logs/");
g_breakpad_handler = new google_breakpad::ExceptionHandler(
descriptor,
nullptr, // Filter callback
DumpCallback, // Minidump callback
nullptr, // Context
true, // Install handlers
-1 // Server FD (not used)
);
google_breakpad::MinidumpDescriptor descriptor("/sdcard/Android/data/com.xypower.mpapp/files/logs/");
google_breakpad::ExceptionHandler eh(descriptor, NULL, DumpCallback, NULL, true, -1);
#endif
#if 0
@ -260,42 +225,9 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
env->DeleteLocalRef(clazz);
#endif
curl_global_init(CURL_GLOBAL_ALL);
#ifdef USING_MQTT
mosquitto_lib_init();
#endif
#ifdef USING_FFMPEG
// av_register_all();
avformat_network_init();
#endif
return result;
}
JNIEXPORT void JNICALL JNI_OnUnload(JavaVM* vm, void* reserved)
{
#ifdef USING_MQTT
mosquitto_lib_cleanup();
#endif
curl_global_cleanup();
#ifdef USING_FFMPEG
// av_register_all();
avformat_network_deinit();
#endif
#ifdef USING_BREAKPAD
// Clean up breakpad handler
if (g_breakpad_handler) {
delete g_breakpad_handler;
g_breakpad_handler = nullptr;
}
#endif
}
bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread)
{
didAttachThread = false;
@ -331,10 +263,12 @@ Java_com_xypower_mpapp_MainActivity_takePhoto(
unsigned char id = (unsigned char)channel - 1;
Camera2Reader *camera = new Camera2Reader(id);
std::string pathStr = jstring2string(env, path);
std::string fileNameStr = jstring2string(env, fileName);
const char *pathStr = env->GetStringUTFChars(path, 0);
const char *fileNameStr = env->GetStringUTFChars(fileName, 0);
camera->Open(pathStr.c_str(), fileNameStr.c_str());
camera->Open(pathStr, fileNameStr);
env->ReleaseStringUTFChars(fileName, fileNameStr);
env->ReleaseStringUTFChars(path, pathStr);
camera->start();
@ -356,6 +290,12 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
*/
if (netHandle != NETID_UNSET) {
net_handle_t nh = (net_handle_t)netHandle;
android_setprocnetwork(nh);
}
char model[PROP_VALUE_MAX] = { 0 };
__system_property_get("ro.product.model", model);
@ -365,12 +305,13 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
jstring modelName = env->NewStringUTF(model);
env->SetObjectField(pThis, fieldId, modelName);
std::string appPathStr = jstring2string(env, appPath);
std::string ipStr = jstring2string(env, ip);
std::string cmdidStr = jstring2string(env, cmdid);
std::string simcardStr = jstring2string(env, simcard);
std::string tfCardPathStr = jstring2string(env, tfCardPath);
std::string nativeLibraryDirStr = jstring2string(env, nativeLibraryDir);
bool udpOrTcp = (networkProtocol != 0); // 0: tcp
const char *appPathStr = appPath == NULL ? NULL : env->GetStringUTFChars(appPath, 0);
const char *ipStr = ip == NULL ? NULL : env->GetStringUTFChars(ip, 0);
const char *cmdidStr = cmdid == NULL ? NULL : env->GetStringUTFChars(cmdid, 0);
const char *simcardStr = simcard == NULL ? NULL : env->GetStringUTFChars(simcard, 0);
const char *tfCardPathStr = tfCardPath == NULL ? NULL : env->GetStringUTFChars(tfCardPath, 0);
const char *nativeLibraryDirStr = nativeLibraryDir == NULL ? NULL : env->GetStringUTFChars(nativeLibraryDir, 0);
JavaVM* vm = NULL;
jint ret = env->GetJavaVM(&vm);
@ -381,14 +322,14 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
CTerminal* pTerminal = NewTerminal(protocol);
CPhoneDevice* device = new CPhoneDevice(vm, pThis, appPathStr, (uint64_t)netHandle, versionCode, nativeLibraryDirStr);
CPhoneDevice* device = new CPhoneDevice(vm, pThis, MakeString(appPathStr), NETID_UNSET, versionCode, MakeString(nativeLibraryDirStr));
device->SetListener(pTerminal);
device->UpdateSignalLevel(signalLevel);
device->SetBuildTime(buildTime / 1000);
device->UpdateSimcard(simcardStr);
device->UpdateTfCardPath(tfCardPathStr);
device->UpdateSimcard(MakeString(simcardStr));
device->UpdateTfCardPath(MakeString(tfCardPathStr));
pTerminal->InitServerInfo(appPathStr, cmdidStr, ipStr, port, networkProtocol, encryptData);
pTerminal->InitServerInfo(MakeString(appPathStr), MakeString(cmdidStr), MakeString(ipStr), port, udpOrTcp, encryptData);
// pTerminal->SetPacketSize(1 * 1024); // 1K
#if defined(USING_NRSEC) && !defined(USING_NRSEC_VPN)
pTerminal->InitEncryptionInfo(simcardStr, "/dev/spidev0.0", "");
@ -400,6 +341,12 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
#ifdef _DEBUG
ALOGD("Finish Startup");
#endif
if (appPathStr != NULL) env->ReleaseStringUTFChars(appPath, appPathStr);
if (ipStr != NULL) env->ReleaseStringUTFChars(ip, ipStr);
if (cmdidStr != NULL) env->ReleaseStringUTFChars(cmdid, cmdidStr);
if (simcardStr != NULL) env->ReleaseStringUTFChars(simcard, simcardStr);
if (tfCardPathStr != NULL) env->ReleaseStringUTFChars(tfCardPath, tfCardPathStr);
if (nativeLibraryDirStr != NULL) env->ReleaseStringUTFChars(nativeLibraryDir, nativeLibraryDirStr);
if (!res)
{
@ -413,9 +360,9 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
JNIEnv* env,
jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jstring url, jint mediaType) {
jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jboolean photoOrVideo) {
if (channel < 0 || channel > 0xFFFF)
if (channel < 1 || channel > 0xFF)
{
return JNI_FALSE;
}
@ -425,44 +372,15 @@ Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
return JNI_FALSE;
}
uint8_t type = (uint8_t)mediaType;
// std::thread th(&Runner::RequestCapture, pTerminal, (unsigned int)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
unsigned char type = photoOrVideo ? 0 : 1;
// std::thread th(&Runner::RequestCapture, pTerminal, (unsigned int)channel, (unsigned int)preset, type, (unsigned long)scheduleTime, 0, true);
// th.detach();
if (channel == 0x200)
{
// Heartbeat
}
else if (channel >= 0x100)
{
uint32_t packetType = channel;
packetType &= 0xFF;
pTerminal->RequestSampling(packetType, (uint64_t)scheduleTime, 0);
}
else
{
if (mediaType == XY_MEDIA_TYPE_PHOTO || mediaType == XY_MEDIA_TYPE_VIDEO)
{
pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
}
else if (mediaType == XY_MEDIA_TYPE_STREAM)
{
// virtual bool StartStream(unsigned char channel, unsigned char preset, const std::string& url, uint32_t* photoId = NULL);
// virtual bool StopStream(unsigned char channel, unsigned char preset, uint32_t photoId);
uint32_t photoId = 0;
std::string urlStr = jstring2string(env, url);
pTerminal->StartStream(channel, preset, urlStr, &photoId);
}
else if (mediaType == XY_MEDIA_TYPE_STREAM_OFF)
{
pTerminal->StopStream(channel, preset, 0);
}
}
pTerminal->RequestCapture((unsigned int)channel, (unsigned int)preset, type, (unsigned long)scheduleTime, 0, true);
return JNI_TRUE;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
JNIEnv* env,
@ -478,10 +396,6 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
std::string configFilePathStr = jstring2string(env, configFilePath);
CFG_CHANNEL cfg;
cfg.camera_id = channel - 1;
cfg.quality = DEFAULT_JPEG_QUALITY;
cfg.requestTemplate = 2;
IDevice::PHOTO_INFO photoInfo(channel, preset);
CTerminal::LoadChannelConfig(channel, configFilePathStr, cfg);
CTerminal::ConvertChannelConfigToPhotoInfo(cfg, photoOrVideo != JNI_FALSE, photoInfo);
@ -491,9 +405,9 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
if (photoInfo.usbCamera)
{
GpioControl::setOtgState(true);
CPhoneDevice::TurnOnOtg(NULL);
}
GpioControl::setCam3V3Enable(true);
CPhoneDevice::TurnOnCameraPower(NULL);
std::vector<IDevice::OSD_INFO> osds;
osds.resize(4);
@ -506,8 +420,11 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
osds[2].text = cfg.osd.rightBottom;
osds[3].text = cfg.osd.leftBottom;
std::string pathStr = jstring2string(env, path);
device->TakePhoto(photoInfo, osds, pathStr);
const char* pathStr = env->GetStringUTFChars(path, 0);
device->TakePhoto(photoInfo, osds, MakeString(pathStr));
env->ReleaseStringUTFChars(path, pathStr);
// device->TurnOffCameraPower(NULL);
// if (photoInfo.usbCamera)
@ -533,7 +450,7 @@ extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat(
JNIEnv* env,
jobject pThis,
jlong handler, jint signalLevel, jboolean scheduled) {
jlong handler, jint signalLevel) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
@ -547,25 +464,7 @@ Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat(
device->UpdateSignalLevel(signalLevel);
}
pTerminal->SendHeartbeat(scheduled != JNI_FALSE);
#ifdef OUTPUT_DBG_INFO
#if 0
std::thread t([]()
{
time_t ts = time(NULL);
int ldr = GpioControl::getLightAdc();
char buf[64] = { 0 };
snprintf(buf, sizeof(buf), "%s %d\r\n", FormatLocalDateTime(ts).c_str(), ldr);
appendFile("/sdcard/com.xypower.mpapp/tmp/ldr.txt", (const unsigned char* )buf, strlen(buf));
});
t.detach();
#endif
#endif
pTerminal->SendHeartbeat();
return JNI_TRUE;
}
@ -602,7 +501,6 @@ Java_com_xypower_mpapp_MicroPhotoService_uninit(
return JNI_FALSE;
}
XYLOG(XYLOG_SEVERITY_WARNING, "Will uninit service");
IDevice* dev = pTerminal->GetDevice();
if (dev != NULL)
{
@ -610,7 +508,6 @@ Java_com_xypower_mpapp_MicroPhotoService_uninit(
}
pTerminal->SignalExit();
pTerminal->Shutdown();
dev = pTerminal->GetDevice();
if (dev != NULL)
{
delete dev;
@ -661,7 +558,7 @@ Java_com_xypower_mpapp_MicroPhotoService_getPhotoTimeData2(
vector<jlong> dataArray;
dataArray.reserve(numberOfData);
uint64_t val = 0;
unsigned long val = 0;
jint channel = 0;
for (map<unsigned char, vector<unsigned int>>::const_iterator it = photoTime.cbegin(); it != photoTime.cend(); ++it)
{
@ -678,11 +575,11 @@ Java_com_xypower_mpapp_MicroPhotoService_getPhotoTimeData2(
for (vector<unsigned int>::const_iterator it2 = it->second.cbegin(); it2 != it->second.cend(); ++it2)
{
// time
val = ((uint64_t)((*it2) & 0xFFFFFF00)) << 24;
val = ((unsigned long)((*it2) & 0xFFFFFF00)) << 24;
// channel
val |= ((uint64_t)channel) << 16;
val |= ((unsigned long)channel) << 16;
// preset
val |= ((uint64_t)((*it2) & 0xFF)) << 8;
val |= ((unsigned long)((*it2) & 0xFF)) << 8;
dataArray.push_back((jlong)val);
}
@ -714,7 +611,7 @@ Java_com_xypower_mpapp_MicroPhotoService_getPhotoTimeData(
unsigned int scheduleTime = 0;
time_t zeroPointTime = 0;
std::vector<std::pair<uint16_t, uint8_t> > channelsAndPresets;
std::vector<std::pair<unsigned char, unsigned char> > channelsAndPresets;
if (!pTerminal->GetAndRefreshLatestScheduleTime(startTime, zeroPointTime, scheduleTime, channelsAndPresets))
{
return NULL;
@ -732,14 +629,14 @@ Java_com_xypower_mpapp_MicroPhotoService_getPhotoTimeData(
dataArray.push_back((jlong)scheduleTime);
dataArray.push_back((jlong)channelsAndPresets.size());
uint64_t val = 0;
for (std::vector<std::pair<uint16_t, uint8_t> >::const_iterator it = channelsAndPresets.cbegin(); it != channelsAndPresets.cend(); ++it)
unsigned long val = 0;
for (std::vector<std::pair<unsigned char, unsigned char> >::const_iterator it = channelsAndPresets.cbegin(); it != channelsAndPresets.cend(); ++it)
{
val = (((uint64_t)scheduleTime) << 28);
val = (unsigned long)scheduleTime << 24;
// channel
val |= (((uint64_t)(it->first)) << 12);
val |= ((unsigned long)(it->first)) << 16;
// preset
val |= (((uint64_t)(it->second)) << 4);
val |= ((unsigned long)(it->second)) << 8;
dataArray.push_back((jlong)val);
}
@ -759,7 +656,9 @@ Java_com_xypower_mpapp_MicroPhotoService_recoganizePicture(
JNIEnv* env,
jclass cls, jstring paramPath, jstring binPath, jstring blobName8, jstring blobName16, jstring blobName32, jstring picPath) {
std::string paramPathStr = jstring2string(env, paramPath);
const char* pParamPathStr = env->GetStringUTFChars(paramPath, 0);
std::string paramPathStr = MakeString(pParamPathStr);
env->ReleaseStringUTFChars(paramPath, pParamPathStr);
const char* pBinPathStr = env->GetStringUTFChars(binPath, 0);
std::string binPathStr = MakeString(pBinPathStr);
@ -835,6 +734,75 @@ Java_com_xypower_mpapp_MicroPhotoService_recoganizePicture(
return data;
}
/*
extern "C" JNIEXPORT jlongArray JNICALL
Java_com_xypower_mpapp_MicroPhotoService_getNextScheduleItem(
JNIEnv* env,
jobject pThis, jlong handler) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return NULL;
}
map<unsigned char, vector<unsigned int>> photoTime;
if (!pTerminal->GetPhotoTime(photoTime) || photoTime.empty())
{
return NULL;
}
size_t numberOfData = photoTime.size() * photoTime.begin()->second.size();
if (numberOfData == 0)
{
return NULL;
}
vector<jlong> dataArray;
dataArray.reserve(numberOfData);
unsigned long val = 0;
jint channel = 0;
for (map<unsigned char, vector<unsigned int>>::const_iterator it = photoTime.cbegin(); it != photoTime.cend(); ++it)
{
if (it->second.empty())
{
continue;
}
channel = (jint)((unsigned short)it->first);
// dataArray.push_back(channel);
// val = (jint)it->second.size();
// dataArray.push_back(val);
for (vector<unsigned int>::const_iterator it2 = it->second.cbegin(); it2 != it->second.cend(); ++it2)
{
// time
val = ((unsigned long)((*it2) & 0xFFFFFF00)) << 24;
// channel
val |= ((unsigned long)channel) << 16;
// preset
val |= ((unsigned long)((*it2) & 0xFF)) << 8;
dataArray.push_back((jlong)val);
}
}
std::sort(dataArray.begin(), dataArray.end());
jlongArray data = env->NewLongArray(dataArray.size());
if (data == NULL) {
return NULL;
}
env->SetLongArrayRegion(data, 0, dataArray.size(), &dataArray[0]);
return data;
}
*/
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_captureFinished(
JNIEnv* env,
@ -852,7 +820,7 @@ Java_com_xypower_mpapp_MicroPhotoService_captureFinished(
if (result == JNI_FALSE || bitmap == NULL)
{
cv::Mat mat;
((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (uint64_t)photoId);
((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId);
return;
}
AndroidBitmapInfo info = { 0 };
@ -876,7 +844,7 @@ Java_com_xypower_mpapp_MicroPhotoService_captureFinished(
cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR);
((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (uint64_t)photoId);
((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId);
#endif // 0
} else
{
@ -893,7 +861,7 @@ Java_com_xypower_mpapp_MicroPhotoService_captureFinished(
AndroidBitmap_unlockPixels(env, bitmap);
((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (uint64_t)photoId);
((CPhoneDevice *)dev)->OnCaptureReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, mat, (unsigned long)photoId);
}
}
}
@ -921,7 +889,7 @@ Java_com_xypower_mpapp_MicroPhotoService_burstCaptureFinished(
if (result == JNI_FALSE)
{
cv::Mat mat;
((CPhoneDevice *)dev)->OnCaptureReady(true, false, mat, (uint64_t)photoId);
((CPhoneDevice *)dev)->OnCaptureReady(true, false, mat, (unsigned long)photoId);
return;
}
@ -954,7 +922,7 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished(
}
// camera->Open(pathStr, fileNameStr);
uint64_t photoId = videoId;
unsigned long photoId = videoId;
((CPhoneDevice *)dev)->OnVideoReady(photoOrVideo != JNI_FALSE, result != JNI_FALSE, pathStr, photoId);
if (path != NULL)
{
@ -967,21 +935,19 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished(
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs(
JNIEnv* env,
jobject pThis, jlong handler, jint channelToClean) {
jobject pThis, jlong handler) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
if (channelToClean != -1)
{
pTerminal->CleanCaptureSchedules((uint32_t)((int)channelToClean));
}
bool res = pTerminal->LoadAppConfigs(true);
bool res = pTerminal->LoadAppConfigs();
return res ? JNI_TRUE : JNI_FALSE;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto(
JNIEnv* env, jclass cls, jlong handler, jstring path, jlong photoInfo) {
@ -1028,18 +994,9 @@ Java_com_xypower_mpapp_MicroPhotoService_infoLog(
return;
}
std::string str = jstring2string(env, msg);
XYLOG(XYLOG_SEVERITY_INFO, str.c_str());
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_usingEthernet(
JNIEnv* env, jclass cls) {
#ifdef USING_ETHERNET
return JNI_TRUE;
#else
return JNI_FALSE;
#endif
const char *msgStr = env->GetStringUTFChars(msg, 0);
XYLOG(XYLOG_SEVERITY_INFO, msgStr);
env->ReleaseStringUTFChars(msg, msgStr);
}
extern "C" JNIEXPORT void JNICALL
@ -1092,7 +1049,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKeyFile(
const char *md5Str = env->GetStringUTFChars(md5, 0);
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@ -1105,7 +1063,7 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKeyFile(
}
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
env->ReleaseStringUTFChars(md5, md5Str);
@ -1127,7 +1085,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKey(
return JNI_FALSE;
}
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@ -1142,7 +1101,7 @@ Java_com_xypower_mpapp_MicroPhotoService_importPublicKey(
}
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
return res ? JNI_TRUE : JNI_FALSE;
#else
@ -1163,7 +1122,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPrivateKey(
return JNI_FALSE;
}
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@ -1178,7 +1138,7 @@ Java_com_xypower_mpapp_MicroPhotoService_importPrivateKey(
}
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
return res ? JNI_TRUE : JNI_FALSE;
#else
@ -1201,7 +1161,8 @@ Java_com_xypower_mpapp_MicroPhotoService_genKeys(
#ifdef USING_NRSEC
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
const char *path = NRSEC_PATH;
@ -1215,7 +1176,7 @@ Java_com_xypower_mpapp_MicroPhotoService_genKeys(
}
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
return res ? JNI_TRUE : JNI_FALSE;
#else
@ -1230,7 +1191,8 @@ Java_com_xypower_mpapp_MicroPhotoService_querySecVersion(
std::string version;
#ifdef USING_NRSEC
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
const char *path = NRSEC_PATH;
@ -1244,7 +1206,7 @@ Java_com_xypower_mpapp_MicroPhotoService_querySecVersion(
}
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
#endif
return env->NewStringUTF(version.c_str());
}
@ -1260,7 +1222,8 @@ Java_com_xypower_mpapp_MicroPhotoService_genCertRequest(
}
const char *path = NRSEC_PATH;
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
uint8_t output[1024] = { 0 };
@ -1278,7 +1241,7 @@ Java_com_xypower_mpapp_MicroPhotoService_genCertRequest(
}
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
if (res)
{
@ -1317,7 +1280,8 @@ Java_com_xypower_mpapp_MicroPhotoService_importPrivateKeyFile(
const char *path = NRSEC_PATH;
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@ -1329,7 +1293,7 @@ Java_com_xypower_mpapp_MicroPhotoService_importPrivateKeyFile(
}
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
// const char *md5Str = env->GetStringUTFChars(md5, 0);
// env->ReleaseStringUTFChars(md5, md5Str);
@ -1356,7 +1320,8 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPublicKeyFile(
uint8_t len = 0;
std::vector<unsigned char> data(64, 0);
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@ -1368,7 +1333,7 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPublicKeyFile(
}
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
if (res)
{
@ -1394,7 +1359,8 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile(
const char *path = NRSEC_PATH;
GpioControl::setCam3V3Enable(true);
GpioControl::setSpiPower(false);
CPhoneDevice::TurnOnCameraPower(NULL);
GpioControl::setSpiPower(true);
NrsecPort nrsec;
@ -1409,7 +1375,7 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile(
nrsec.Close();
GpioControl::setSpiPower(false);
GpioControl::setCam3V3Enable(false);
CPhoneDevice::TurnOffCameraPower(NULL);
if (res) {
const char *outputPathStr = env->GetStringUTFChars(outputPath, 0);
@ -1422,157 +1388,3 @@ Java_com_xypower_mpapp_MicroPhotoService_exportPrivateFile(
return JNI_FALSE;
#endif
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_updateEhernet(
JNIEnv* env, jobject pThis, jlong handle, jlong networkHandle, jboolean available) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
CPhoneDevice* device = (CPhoneDevice*)pTerminal->GetDevice();
if (device != NULL)
{
bool changed = false;
device->UpdateNetwork(static_cast<net_handle_t>(networkHandle), available != JNI_FALSE, false, changed);
if (changed)
{
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Changed and Check socket connection");
pTerminal->ResetNetwork();
}
else
{
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Changing Not Cause Socket Disconnection");
}
}
return JNI_TRUE;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_updateActiveNetwork(
JNIEnv* env, jobject pThis, jlong handle, jlong networkHandle, jboolean available) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
CPhoneDevice* device = (CPhoneDevice*)pTerminal->GetDevice();
if (device != NULL)
{
bool changed = false;
device->UpdateNetwork(static_cast<net_handle_t>(networkHandle), available != JNI_FALSE, true, changed);
}
return JNI_TRUE;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_xypower_mpapp_MicroPhotoService_requestPowerControl(
JNIEnv* env, jclass cls, jint type) {
if (type == 1) // Net
{
NetCameraPowerCtrl* powerControl = new NetCameraPowerCtrl(2);
return reinterpret_cast<jlong>(powerControl);
}
return 0L;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_releasePowerControl(
JNIEnv* env, jclass cls, jlong powerControlHandle) {
PowerControl* powerControl = reinterpret_cast<PowerControl*>(powerControlHandle);
delete powerControl;
return JNI_TRUE;
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_xypower_mpapp_MicroPhotoService_getCustomAppId(JNIEnv *env, jobject thiz) {
#ifdef USING_N938
return 2;
#elif defined(USING_PTZ)
return 1;
#else
return 0;
#endif
}
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendCameraCtrl(
JNIEnv* env, jobject pThis, jlong handle, jint channel, jint preset, jint cmd) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return;
}
pTerminal->SendCameraCtrl(channel, preset, cmd);
}
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_notifyTimeUpdated(
JNIEnv* env, jobject pThis, jlong handle) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return;
}
std::thread t([pTerminal]()
{
pTerminal->OnTimeUpdated();
});
t.detach();
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendBasicInfo(JNIEnv *env, jobject thiz, jlong handler) {
// TODO: implement sendBasicInfo()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
pTerminal->SendBasicInfo();
return JNI_TRUE;
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendWorkStatus(JNIEnv *env, jobject thiz, jlong handler) {
// TODO: implement sendWorkStatus()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
pTerminal->SendWorkStatus();
return JNI_TRUE;
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendFault(JNIEnv *env, jobject thiz, jlong handler, jstring faultCode, jstring faultInfo) {
// TODO: implement sendFault()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
std::string faultInfoStr = jstring2string(env, faultInfo);
pTerminal->SendFaultInfo(faultInfoStr);
return JNI_TRUE;
}

File diff suppressed because it is too large Load Diff

@ -13,7 +13,6 @@
#include <atomic>
#include <filesystem>
#include <thread>
#include <memory>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraError.h>
@ -28,11 +27,6 @@
#include <opencv2/opencv.hpp>
#include <android/bitmap.h>
#include <android/multinetwork.h>
#include "SensorsProtocol.h"
#include "PtzController.h"
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "error", __VA_ARGS__))
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, "debug", __VA_ARGS__))
@ -155,37 +149,16 @@ void MatToBitmap(JNIEnv *env, cv::Mat& mat, jobject& bitmap) {
}
#endif
class PowerControl;
class VendorCtrl;
class Streaming;
struct STREAMING_CONTEXT
{
std::shared_ptr<Streaming> stream;
std::shared_ptr<PowerControl> powerCtrl;
std::shared_ptr<PowerControl> ethernetPowerCtrl;
};
class CPhoneDevice : public IDevice
{
public:
friend PtzController;
struct NETWORK
{
std::string iface;
std::string ip;
std::string netmask;
std::string gateway;
};
class CPhoneCamera : public NdkCamera
{
public:
CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params);
virtual ~CPhoneCamera();
virtual bool on_image(cv::Mat rgb);
virtual bool on_image(cv::Mat& rgb);
virtual void on_error(const std::string& msg);
virtual void onDisconnected(ACameraDevice* device);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
@ -215,13 +188,13 @@ public:
{
CPhoneDevice* device;
unsigned int timerType;
uint64_t times;
unsigned long times;
void* data;
uint64_t expectedTimes;
uint64_t uid;
unsigned long expectedTimes;
unsigned long uid;
};
CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, uint64_t activeNetHandle, unsigned int versionCode, const std::string& nativeLibDir);
CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode, const std::string& nativeLibDir);
virtual ~CPhoneDevice();
virtual void SetListener(IListener* listener);
@ -232,39 +205,28 @@ public:
virtual bool UpdateSchedules();
virtual bool QuerySystemProperties(map<string, string>& properties);
virtual bool InstallAPP(const std::string& path, unsigned int delayedTime);
virtual bool Reboot(int resetType, bool manually, const std::string& reason, uint32_t timeout = 1000);
virtual bool Reboot(int resetType, const std::string& reason);
virtual bool EnableGPS(bool enabled);
virtual int QueryBattaryVoltage(int timesForAvg, int* isCharging);
virtual uint32_t QueryLdr();
virtual float QueryBattaryVoltage(int timesForAvg, bool* isCharging);
virtual bool RequestPosition();
virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout, time_t tsForNextPhoto);
virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path);
virtual bool CloseCamera();
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times = 1);
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, unsigned long times = 0);
virtual bool UnregisterTimer(timer_uid_t uid);
virtual uint64_t RequestWakelock(uint64_t timeout);
virtual bool ReleaseWakelock(uint64_t wakelock);
virtual unsigned long RequestWakelock(unsigned long timeout);
virtual bool ReleaseWakelock(unsigned long wakelock);
virtual std::string GetVersion() const;
virtual int GetWData(WEATHER_INFO *weatherInfo, D_SENSOR_PARAM *sensorParam);
virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, D_SENSOR_PARAM *sensorParam);
virtual int GetWData(WEATHER_INFO *weatherInfo);
virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, SENSOR_PARAM *sensorParam);
virtual bool OpenSensors(int sensortype);
virtual bool CloseSensors(int sensortype, uint32_t delayedCloseTime);
virtual bool OpenPTZSensors(uint32_t sec);
virtual bool ClosePTZSensors(uint32_t delayedCloseTime);
virtual bool GetPTZSensorsStatus(time_t waittime);
virtual bool GetCameraStatus();
virtual void CameraCtrl(unsigned short waitTime, unsigned short delayTime, unsigned char channel, int cmdidx, unsigned char presetno, const char *serfile, unsigned int baud, int addr);
virtual int GetSerialPhoto(int devno, D_IMAGE_DEF *photo);
virtual void InitSerialComm(D_SENSOR_PARAM *sensorParam, char *filedir,const char *logpath);
bool LoadNetworkInfo();
virtual bool CloseSensors(int sensortype);
bool GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector<uint32_t>& items);
void UpdatePosition(double lon, double lat, double radius, time_t ts);
bool OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId);
bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat mat, unsigned int photoId);
bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, unsigned int photoId);
void UpdateSignalLevel(int signalLevel);
void UpdateTfCardPath(const std::string& tfCardPath)
@ -276,23 +238,22 @@ public:
mBuildTime = buildTime;
}
void UpdateSimcard(const std::string& simcard);
void UpdateNetwork(net_handle_t nethandle, bool available, bool defaultOrEthernet, bool& changed);
net_handle_t GetEthnetHandle() const;
static void TurnOnCameraPower(JNIEnv* env);
static void TurnOffCameraPower(JNIEnv* env);
VendorCtrl* MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle, bool syncTime);
static void TurnOnOtg(JNIEnv* env);
static void TurnOffOtg(JNIEnv* env);
protected:
std::string GetFileName() const;
std::string GetVersion() const;
bool SendBroadcastMessage(std::string action, int value);
// bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
bool TakePhotoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool TakeVideoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool StartPushStreaming(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat, time_t takingTime);
bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat);
inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const
{
if (m_listener != NULL)
@ -313,38 +274,17 @@ protected:
return false;
}
inline bool TakePTZPhotoCb(int result, const IDevice::PHOTO_INFO& photoInfo) const
{
if (m_listener != NULL)
{
std::vector<IDevice::RECOG_OBJECT> objects;
return m_listener->OnPTZPhotoTaken(result, photoInfo);
}
return false;
}
inline bool GetPhotoSerialsParamCb(SerialsPhotoParam &param) const
{
if (m_listener != NULL)
{
return m_listener->OnPhotoSerialsParamGet(param);
}
return false;
}
void QueryFlowInfo(std::map<std::string, std::string>& powerInfo);
void QueryPowerInfo(std::map<std::string, std::string>& powerInfo);
std::string QueryCpuTemperature();
bool OnImageReady(cv::Mat mat);
bool OnImageReady(cv::Mat& mat);
bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
void onError(const std::string& msg);
void onDisconnected(ACameraDevice* device);
void CloseCamera2(CPhoneCamera* camera, unsigned int photoId, unsigned char cameraType);
void CloseCamera2(CPhoneCamera* camera, unsigned int photoId, bool turnOffOtg);
static void handleSignal(int sig, siginfo_t *si, void *uc);
bool RegisterHandlerForSignal(int sig);
@ -358,17 +298,9 @@ protected:
int CallExecv(int rotation, int frontCamera, const std::string& outputPath, const std::vector<std::string>& images);
void SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway);
void ConvertDngToPng(const std::string& dngPath, const std::string& pngPath);
void SetStaticIp();
void ShutdownEthernet();
int ExecuteCommand(const std::string& cmd);
static std::string BuildCaptureResultInfo(ACameraMetadata* result, uint32_t ldr, uint32_t duration, bool burst);
protected:
mutable std::mutex m_devLocker;
std::mutex m_devLocker;
JavaVM* m_vm;
jobject m_javaService;
@ -376,10 +308,6 @@ protected:
std::string m_tfCardPath;
std::string m_nativeLibraryDir;
NETWORK* m_network;
net_handle_t m_defNetHandle;
net_handle_t m_ethnetHandle;
jmethodID mRegisterHeartbeatMid;
jmethodID mUpdateCaptureScheduleMid;
jmethodID mUpdateTimeMid;
@ -388,17 +316,13 @@ protected:
jmethodID mRequestWakelockMid;
jmethodID mReleaseWakelockMid;
jmethodID mGetFlowInfoMid;
jmethodID mGetSystemInfoMid;
jmethodID mRebootMid;
jmethodID mInstallAppMid;
jmethodID mEnableGpsMid;
jmethodID mRequestPositionMid;
jmethodID mExecHdrplusMid;
jmethodID mSetStaticIpMid;
jmethodID mExecuteCmdMid;
jmethodID mConvertDngToPngMid;
jmethodID mCallSysCameraMid;
@ -408,12 +332,13 @@ protected:
IListener* m_listener;
const CFG_RECOGNIZATION* m_pRecognizationCfg;
bool mAIInitialized;
unsigned int mNetId;
unsigned int mVersionCode;
time_t mBuildTime;
atomic_ullong m_timerUidFeed;
atomic_ullong m_wakelockIdFeed;
atomic_ullong m_uniqueIdFeed;
atomic_ulong m_timerUidFeed;
atomic_ulong m_wakelockIdFeed;
atomic_ulong m_uniqueIdFeed;
std::map<IDevice::timer_uid_t, TIMER_CONTEXT*> mTimers;
mutable CPhoneCamera* mCamera;
@ -421,33 +346,16 @@ protected:
time_t mHeartbeatStartTime;
unsigned int mHeartbeatDuration;
static std::mutex m_powerLocker;
static long mCameraPowerCount;
static long mOtgCount;
std::thread m_threadClose;
std::shared_ptr<PowerControl> m_powerCtrlPtr;
uint32_t m_ethernetFailures;
int m_signalLevel;
time_t m_signalLevelUpdateTime;
std::string m_simcard;
mutable std::mutex m_cameraLocker;
time_t m_lastTime;
std::atomic<bool> m_shouldStopWaiting;
std::atomic<bool> m_isSelfTesting{false};
IDevice::ICE_TAIL m_tempData;
mutable std::mutex m_dataLocker;
mutable std::mutex m_collectDataLocker;
std::condition_variable m_CollectDatacv;
std::atomic<bool> m_collecting;
unsigned long long localDelayTime;
std::map<uint8_t, STREAMING_CONTEXT > m_streamings;
PtzController* m_ptzController;
};

@ -0,0 +1,912 @@
#include "TerminalDevice.h"
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#define LOG_TAG "CameraTestHelpers"
#include "PhoneDevice2.h"
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
// #include <opencv2/objdetect.hpp>
// #include <opencv2/features2d.hpp>
// #include <opencv2/core/types.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <android/log.h>
#include <AndroidHelper.h>
extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
// ranges
// are normalized to eight bits.
static const int kMaxChannelValue = 262143;
static inline uint32_t YUV2RGB(int nY, int nU, int nV) {
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0) nY = 0;
// This is the floating point equivalent. We do the conversion in integer
// because some Android devices do not have floating point in hardware.
// nR = (int)(1.164 * nY + 1.596 * nV);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 2.018 * nU);
int nR = (int)(1192 * nY + 1634 * nV);
int nG = (int)(1192 * nY - 833 * nV - 400 * nU);
int nB = (int)(1192 * nY + 2066 * nU);
nR = std::min(kMaxChannelValue, std::max(0, nR));
nG = std::min(kMaxChannelValue, std::max(0, nG));
nB = std::min(kMaxChannelValue, std::max(0, nB));
nR = (nR >> 10) & 0xff;
nG = (nG >> 10) & 0xff;
nB = (nB >> 10) & 0xff;
return 0xff000000 | (nR << 16) | (nG << 8) | nB;
}
CPhoneDevice2::CPhoneDevice2(JavaVM* vm, jobject service)
{
m_vm = vm;
JNIEnv* env = NULL;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
}
m_javaService = env->NewGlobalRef(service);
jclass classService = env->GetObjectClass(m_javaService);
mRegisterTimerMid = env->GetMethodID(classService, "registerTimer", "(JI)Z");
mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(I)V");
mUnregisterTimerMid = env->GetMethodID(classService, "unregisterTimer", "(J)Z");
mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z");
env->DeleteLocalRef(classService);
if (attached)
{
vm->DetachCurrentThread();
}
m_timerUidFeed = time(NULL);
presentRotation_ = 0;
}
CPhoneDevice2::~CPhoneDevice2()
{
JNIEnv* env = NULL;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
}
env->DeleteGlobalRef(m_javaService);
if (attached)
{
m_vm->DetachCurrentThread();
}
m_javaService = NULL;
}
void CPhoneDevice2::SetListener(IListener* listener)
{
m_listener = listener;
}
bool CPhoneDevice2::UpdateTime(time_t ts)
{
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
return false;
}
jlong timeInMillis = ((jlong)ts) * 1000;
ret = env->CallBooleanMethod(m_javaService, mUpdateTimeMid, timeInMillis);
if (attached)
{
m_vm->DetachCurrentThread();
}
return (ret == JNI_TRUE);
}
bool CPhoneDevice2::Reboot()
{
return false;
}
IDevice::timer_uid_t CPhoneDevice2::RegisterTimer(unsigned int timerType, unsigned int timeout)
{
IDevice::timer_uid_t uid = m_timerUidFeed.fetch_add(1);
ALOGI("NDK RegTimer: uid=%lld Type=%u timeout=%u", uid, timerType, timeout);
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
return 0;
}
ret = env->CallBooleanMethod(m_javaService, mRegisterTimerMid, (jlong)uid, (jint)timeout);
if (attached)
{
m_vm->DetachCurrentThread();
}
if (ret == JNI_TRUE)
{
unsigned long val = timerType;
mTimers.insert(mTimers.end(), std::pair<IDevice::timer_uid_t, unsigned long>(uid, val));
return uid;
}
return 0;
}
bool CPhoneDevice2::UnregisterTimer(IDevice::timer_uid_t uid)
{
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
return false;
}
ret = env->CallBooleanMethod(m_javaService, mUnregisterTimerMid, (jlong)uid);
if (attached)
{
m_vm->DetachCurrentThread();
}
if (ret == JNI_TRUE)
{
mTimers.erase(uid);
return true;
}
return false;
}
bool CPhoneDevice2::FireTimer(timer_uid_t uid)
{
std::map<IDevice::timer_uid_t, unsigned long>::iterator it = mTimers.find(uid);
if (it == mTimers.end())
{
return false;
}
unsigned long timerType = it->second & 0xFFFFFFFF;
unsigned long times = (it->second & 0xFFFFFFFF00000000) >> 32;
times++;
if (timerType != 100)
{
int aa = 0;
}
it->second = timerType | (times << 32);
if (m_listener == NULL)
{
return false;
}
m_listener->OnTimeout(uid, timerType, NULL, times);
return true;
}
IDevice::timer_uid_t CPhoneDevice2::RegisterHeartbeat(unsigned int timerType, unsigned int timeout)
{
IDevice::timer_uid_t uid = m_timerUidFeed.fetch_add(1);
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
return 0;
}
env->CallVoidMethod(m_javaService, mRegisterHeartbeatMid, (jint)timeout);
if (attached)
{
m_vm->DetachCurrentThread();
}
return uid;
}
bool CPhoneDevice2::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const string& path)
{
ALOGI("TAKE_PHOTO: CH=%u PR=%u\n", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset);
mPhotoInfo = photoInfo;
mPath = path;
mDisplayDimension = DisplayDimension(photoInfo.width, photoInfo.height);
ALOGE("Image Buffer Size: %d", photoInfo.width * photoInfo.height * 4);
imageBuffer_ = (uint8_t*)malloc(photoInfo.width * photoInfo.height * 4);
AASSERT(imageBuffer_ != nullptr, "Failed to allocate imageBuffer_");
int cameraId = (int)photoInfo.channel - 1;
ACameraIdList *cameraIdList = NULL;
ACameraMetadata *cameraMetadata = NULL;
const char *selectedCameraId = NULL;
camera_status_t camera_status = ACAMERA_OK;
ACameraManager *cameraManager = ACameraManager_create();
camera_status = ACameraManager_getCameraIdList(cameraManager, &cameraIdList);
if (camera_status != ACAMERA_OK) {
ALOGI("Failed to get camera id list (reason: %d)\n", camera_status);
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
if (cameraIdList->numCameras < 1 ) {
ALOGI("No camera device detected.\n");
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
if (cameraIdList->numCameras <= cameraId ) {
ALOGI("No required camera device %d detected.\n", cameraId);
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
selectedCameraId = cameraIdList->cameraIds[cameraId];
ALOGI("Trying to open Camera2 (id: %s, num of camera : %d)\n", selectedCameraId,
cameraIdList->numCameras);
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId,
&cameraMetadata);
if (camera_status != ACAMERA_OK) {
ALOGI("Failed to get camera meta data of ID:%s\n", selectedCameraId);
}
ACameraMetadata_const_entry face, orientation;
camera_status = ACameraMetadata_getConstEntry(cameraMetadata, ACAMERA_LENS_FACING, &face);
uint32_t cameraFacing_ = static_cast<int32_t>(face.data.u8[0]);
if (cameraFacing_ == ACAMERA_LENS_FACING_FRONT)
{
int aa = 0;
}
camera_status = ACameraMetadata_getConstEntry(cameraMetadata, ACAMERA_SENSOR_ORIENTATION, &orientation);
ALOGI("====Current SENSOR_ORIENTATION: %8d", orientation.data.i32[0]);
uint32_t cameraOrientation_ = orientation.data.i32[0];
if (cameraOrientation_ == 90 || cameraOrientation_ == 270)
{
mDisplayDimension.Flip();
}
ImageFormat resCap = {(int32_t)photoInfo.width, (int32_t)photoInfo.height, AIMAGE_FORMAT_YUV_420_888};
MatchCaptureSizeRequest(cameraManager, selectedCameraId, photoInfo.width, photoInfo.height, cameraOrientation_, &resCap);
deviceStateCallbacks.onDisconnected = camera_device_on_disconnected;
deviceStateCallbacks.onError = camera_device_on_error;
camera_status = ACameraManager_openCamera(cameraManager, selectedCameraId,
&deviceStateCallbacks, &cameraDevice);
if (camera_status != ACAMERA_OK) {
ALOGI("Failed to open camera device (id: %s)\n", selectedCameraId);
}
camera_status = ACameraDevice_createCaptureRequest(cameraDevice, TEMPLATE_STILL_CAPTURE/*TEMPLATE_PREVIEW*/,
&captureRequest);
if (camera_status != ACAMERA_OK) {
ALOGI("Failed to create preview capture request (id: %s)\n", selectedCameraId);
}
ACaptureSessionOutputContainer_create(&captureSessionOutputContainer);
captureSessionStateCallbacks.onReady = capture_session_on_ready;
captureSessionStateCallbacks.onActive = capture_session_on_active;
captureSessionStateCallbacks.onClosed = capture_session_on_closed;
ACameraMetadata_free(cameraMetadata);
ACameraManager_deleteCameraIdList(cameraIdList);
ACameraManager_delete(cameraManager);
media_status_t status;
// status = AImageReader_new(1920, 1080, AIMAGE_FORMAT_YUV_420_888, 5, &mAImageReader);
status = AImageReader_new(resCap.width, resCap.height, resCap.format, 5, &mAImageReader);
if (status != AMEDIA_OK)
{
ALOGI("AImageReader_new error\n");
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
AImageReader_ImageListener listener{
.context = this,
.onImageAvailable = OnImageCallback,
};
AImageReader_setImageListener(mAImageReader, &listener);
//ANativeWindow *mNativeWindow;
status = AImageReader_getWindow(mAImageReader, &theNativeWindow);
if (status != AMEDIA_OK)
{
ALOGI("AImageReader_getWindow error\n");
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
ALOGI("Surface is prepared in %p.\n", theNativeWindow);
// theNativeWindow
ACameraOutputTarget_create(theNativeWindow, &cameraOutputTarget);
ACaptureRequest_addTarget(captureRequest, cameraOutputTarget);
ACaptureSessionOutput_create(theNativeWindow, &sessionOutput);
ACaptureSessionOutputContainer_add(captureSessionOutputContainer, sessionOutput);
ACameraDevice_createCaptureSession(cameraDevice, captureSessionOutputContainer,
&captureSessionStateCallbacks, &captureSession);
// ACameraCaptureSession_setRepeatingRequest(captureSession, NULL, 1, &captureRequest, NULL);
ACameraCaptureSession_capture(captureSession, NULL, 1, &captureRequest, NULL);
ALOGI("Surface is prepared in here.\n");
return true;
}
ACameraCaptureSession_stateCallbacks* CPhoneDevice2::GetSessionListener()
{
static ACameraCaptureSession_stateCallbacks sessionListener = {
.context = this,
.onClosed = CPhoneDevice2::capture_session_on_closed,
.onReady = CPhoneDevice2::capture_session_on_ready,
.onActive = CPhoneDevice2::capture_session_on_active,
};
return &sessionListener;
}
void CPhoneDevice2::ImageCallback(AImageReader *reader)
{
bool res = false;
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
if (status == AMEDIA_OK && image)
{
int32_t srcFormat = -1;
AImage_getFormat(image, &srcFormat);
AASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
int32_t srcPlanes = 0;
AImage_getNumberOfPlanes(image, &srcPlanes);
AASSERT(srcPlanes == 3, "Is not 3 planes");
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t width = srcRect.right - srcRect.left;
int32_t height = srcRect.bottom - srcRect.top;
// int32_t height = srcRect.right - srcRect.left;
// int32_t width = srcRect.bottom - srcRect.top;
uint8_t *yPixel = nullptr;
uint8_t *uPixel = nullptr;
uint8_t *vPixel = nullptr;
int32_t yLen = 0;
int32_t uLen = 0;
int32_t vLen = 0;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &uPixel, &uLen);
AImage_getPlaneData(image, 2, &vPixel, &vLen);
uint8_t * data = new uint8_t[yLen + vLen + uLen];
memcpy(data, yPixel, yLen);
memcpy(data+yLen, vPixel, vLen);
memcpy(data+yLen+vLen, uPixel, uLen);
cv::Mat mYUV = cv::Mat(((height * 3) >> 1), width, CV_8UC1, data);
// cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
// cv::Mat mYUV = cv::Mat(height, yStride, CV_8UC4, data);
cv::Mat _yuv_rgb_img(height, width, CV_8UC4), _yuv_gray_img;
cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
cv::rotate(_yuv_rgb_img, _yuv_rgb_img, cv::ROTATE_180);
// cv::Mat rgbMat(height, width, CV_8UC3);
// 通过cv::cvtColor将yuv420转换为rgb格式
// cvtColor(_yuv_rgb_img, rgbMat, cv::COLOR_YUV2RGB_I420);
// cv::Mat mat = cv::Mat(buffer.height, buffer.stride, CV_8UC4, buffer.bits);
const char *str = "OSD";
putText(_yuv_rgb_img, str, cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(0, 0, 0), 4,cv::LINE_AA);
putText(_yuv_rgb_img, str, cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(255, 255, 255), 2,cv::LINE_AA);
vector <int> compression_params;
compression_params.push_back(cv::IMWRITE_JPEG_QUALITY);
compression_params.push_back(80);
res = cv::imwrite(mPath.c_str(), _yuv_rgb_img, compression_params);
// ANativeWindow_unlockAndPost(theNativeWindow);
if (res)
{
int aa = 0;
}
// res = WriteFile(image, GetFileName() + ".org.jpg");
AImage_delete(image);
// delete pThis;
TakePhotoCb(res, mPhotoInfo, mPath, time(NULL));
}
}
void CPhoneDevice2::OnImageCallback(void *ctx, AImageReader *reader)
{
CPhoneDevice2* pThis = reinterpret_cast<CPhoneDevice2*>(ctx);
if (pThis != NULL)
{
pThis->ImageCallback(reader);
}
}
bool CPhoneDevice2::WriteFile(AImage *image, const string& path)
{
int planeCount = 0;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
ALOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount);
if (!(status == AMEDIA_OK && planeCount == 1))
{
ALOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount);
return false;
}
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
bool res = false;
FILE *file = fopen(path.c_str(), "wb");
if (file && data && len)
{
fwrite(data, 1, len, file);
fclose(file);
ALOGI("Capture: %s", path.c_str());
res = true;
}
else
{
if (file)
fclose(file);
}
return res;
}
bool CPhoneDevice2::WriteFile(CPhoneDevice2* pThis, AImage *image)
{
return pThis->WriteFile(image, pThis->GetFileName());
}
std::string CPhoneDevice2::GetFileName() const
{
return mPath;
}
/*
const char *selectedCameraId = NULL;
ACameraManager *cameraManager = ACameraManager_create();
*/
bool CPhoneDevice2::MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
ImageFormat* resCap) {
DisplayDimension disp(resCap->width,resCap->height);
if (cameraOrientation_ == 90 || cameraOrientation_ == 270) {
disp.Flip();
}
ACameraMetadata* metadata;
camera_status_t camera_status = ACAMERA_OK;
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId, &metadata);
ACameraMetadata_const_entry entry;
camera_status = ACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
// format of the data: format, width, height, input?, type int32
bool foundIt = false;
DisplayDimension foundRes(16384, 16384);
DisplayDimension maxJPG(0, 0);
for (int i = 0; i < entry.count; i += 4) {
int32_t input = entry.data.i32[i + 3];
int32_t format = entry.data.i32[i + 0];
if (input) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_JPEG) {
DisplayDimension res(entry.data.i32[i + 1], entry.data.i32[i + 2]);
ALOGI("Camera Resolution: %d x %d fmt=%d", res.width(), res.height(), format);
if (!disp.IsSameRatio(res)) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 && res > disp) {
foundIt = true;
foundRes = res;
} else if (format == AIMAGE_FORMAT_JPEG && res > maxJPG) {
maxJPG = res;
}
}
}
if (foundIt) {
// resView->width = foundRes.org_width();
// resView->height = foundRes.org_height();
resCap->width = foundRes.org_width();
resCap->height = foundRes.org_height();
} else {
ALOGI("Did not find any compatible camera resolution, taking 640x480");
resCap->width = disp.org_width();
resCap->height = disp.org_height();
// *resCap = *resView;
}
// resView->format = AIMAGE_FORMAT_YUV_420_888;
// resCap->format = AIMAGE_FORMAT_JPEG;
return foundIt;
}
/**
* Convert yuv image inside AImage into ANativeWindow_Buffer
* ANativeWindow_Buffer format is guaranteed to be
* WINDOW_FORMAT_RGBX_8888
* WINDOW_FORMAT_RGBA_8888
* @param buf a {@link ANativeWindow_Buffer } instance, destination of
* image conversion
* @param image a {@link AImage} instance, source of image conversion.
* it will be deleted via {@link AImage_delete}
*/
bool CPhoneDevice2::DisplayImage(ANativeWindow_Buffer *buf, AImage *image) {
AASSERT(buf->format == WINDOW_FORMAT_RGBX_8888 ||
buf->format == WINDOW_FORMAT_RGBA_8888,
"Not supported buffer format");
int32_t srcFormat = -1;
AImage_getFormat(image, &srcFormat);
AASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
int32_t srcPlanes = 0;
AImage_getNumberOfPlanes(image, &srcPlanes);
AASSERT(srcPlanes == 3, "Is not 3 planes");
switch (presentRotation_) {
case 0:
PresentImage(buf, image);
break;
case 90:
PresentImage90(buf, image);
break;
case 180:
PresentImage180(buf, image);
break;
case 270:
PresentImage270(buf, image);
break;
default:
AASSERT(0, "NOT recognized display rotation: %d", presentRotation_);
}
AImage_delete(image);
image = nullptr;
return true;
}
/*
* PresentImage()
* Converting yuv to RGB
* No rotation: (x,y) --> (x, y)
* Refer to:
* https://mathbits.com/MathBits/TISection/Geometry/Transformations2.htm
*/
void CPhoneDevice2::PresentImage(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
yPixel = imageBuffer_;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
vPixel = imageBuffer_ + yLen;
AImage_getPlaneData(image, 1, &vPixel, &vLen);
uPixel = imageBuffer_ + yLen + vLen;
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t rowStride;
AImage_getPlaneRowStride(image, 0, &rowStride);
int32_t height = std::min(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->width, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out += buf->stride;
}
}
/*
* PresentImage90()
* Converting YUV to RGB
* Rotation image anti-clockwise 90 degree -- (x, y) --> (-y, x)
*/
void CPhoneDevice2::PresentImage90(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
yPixel = imageBuffer_;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
vPixel = imageBuffer_ + yLen;
AImage_getPlaneData(image, 1, &vPixel, &vLen);
uPixel = imageBuffer_ + yLen + vLen;
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = std::min(buf->width, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->height, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
out += height - 1;
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
// [x, y]--> [-y, x]
int testb = pU[uv_offset];
int testc = pV[uv_offset];
int testA = pY[x];
out[x * buf->stride] = YUV2RGB(testA, testb, testc);
}
out -= 1; // move to the next column
}
}
/*
* PresentImage180()
* Converting yuv to RGB
* Rotate image 180 degree: (x, y) --> (-x, -y)
*/
void CPhoneDevice2::PresentImage180(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
yPixel = imageBuffer_;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
vPixel = imageBuffer_ + yLen;
AImage_getPlaneData(image, 1, &vPixel, &vLen);
uPixel = imageBuffer_ + yLen + vLen;
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = std::min(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->width, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
out += (height - 1) * buf->stride;
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
// mirror image since we are using front camera
out[width - 1 - x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
// out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out -= buf->stride;
}
}
/*
* PresentImage270()
* Converting image from YUV to RGB
* Rotate Image counter-clockwise 270 degree: (x, y) --> (y, x)
*/
void CPhoneDevice2::PresentImage270(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
yPixel = imageBuffer_;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
vPixel = imageBuffer_ + yLen;
AImage_getPlaneData(image, 1, &vPixel, &vLen);
uPixel = imageBuffer_ + yLen + vLen;
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = std::min(buf->width, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->height, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
int testb = pU[uv_offset];
int testc = pV[uv_offset];
int testA = pY[x];
out[(width - 1 - x) * buf->stride] =
YUV2RGB(testA, testb, testc);
}
out += 1; // move to the next column
}
}
/*
bool CPhoneDevice2::SendBroadcastMessage(String16 action, int value)
{
TM_INFO_LOG("sendBroadcastMessage(): Action: %s, Value: %d ", action.string(), value);
sp <IServiceManager> sm = defaultServiceManager();
sp <IBinder> am = sm->getService(String16("activity"));
if (am != NULL) {
Parcel data, reply;
data.writeInterfaceToken(String16("android.app.IActivityManager"));
data.writeStrongBinder(NULL);
// intent begin
data.writeString16(action); // action
data.writeInt32(0); // URI data type
data.writeString16(NULL, 0); // type
data.writeInt32(0); // flags
data.writeString16(NULL, 0); // package name
data.writeString16(NULL, 0); // component name
data.writeInt32(0); // source bound - size
data.writeInt32(0); // categories - size
data.writeInt32(0); // selector - size
data.writeInt32(0); // clipData - size
data.writeInt32(-2); // contentUserHint: -2 -> UserHandle.USER_CURRENT
data.writeInt32(-1); // bundle extras length
data.writeInt32(0x4C444E42); // 'B' 'N' 'D' 'L'
int oldPos = data.dataPosition();
data.writeInt32(1); // size
// data.writeInt32(0); // VAL_STRING, need to remove because of analyze common intent
data.writeString16(String16("type"));
data.writeInt32(1); // VAL_INTEGER
data.writeInt32(value);
int newPos = data.dataPosition();
data.setDataPosition(oldPos - 8);
data.writeInt32(newPos - oldPos); // refill bundle extras length
data.setDataPosition(newPos);
// intent end
data.writeString16(NULL, 0); // resolvedType
data.writeStrongBinder(NULL); // resultTo
data.writeInt32(0); // resultCode
data.writeString16(NULL, 0); // resultData
data.writeInt32(-1); // resultExtras
data.writeString16(NULL, 0); // permission
data.writeInt32(0); // appOp
data.writeInt32(-1); // option
data.writeInt32(1); // serialized: != 0 -> ordered
data.writeInt32(0); // sticky
data.writeInt32(-2); // userId: -2 -> UserHandle.USER_CURRENT
status_t ret = am->transact(IBinder::FIRST_CALL_TRANSACTION + 13, data,
&reply); // BROADCAST_INTENT_TRANSACTION
if (ret == NO_ERROR) {
int exceptionCode = reply.readExceptionCode();
if (exceptionCode) {
TM_INFO_LOG("sendBroadcastMessage(%s) caught exception %d\n",
action.string(), exceptionCode);
return false;
}
} else {
return false;
}
} else {
TM_INFO_LOG("getService() couldn't find activity service!\n");
return false;
}
return true;
}
*/
void CPhoneDevice2::camera_device_on_disconnected(void *context, ACameraDevice *device)
{
ALOGI("Camera(id: %s) is diconnected.\n", ACameraDevice_getId(device));
CPhoneDevice2* pThis = (CPhoneDevice2*)context;
// delete pThis;
}
void CPhoneDevice2::camera_device_on_error(void *context, ACameraDevice *device, int error)
{
ALOGI("Error(code: %d) on Camera(id: %s).\n", error, ACameraDevice_getId(device));
}
void CPhoneDevice2::capture_session_on_ready(void *context, ACameraCaptureSession *session)
{
ALOGI("Session is ready. %p\n", session);
}
void CPhoneDevice2::capture_session_on_active(void *context, ACameraCaptureSession *session)
{
ALOGI("Session is activated. %p\n", session);
}
void CPhoneDevice2::capture_session_on_closed(void *context, ACameraCaptureSession *session)
{
ALOGI("Session is closed. %p\n", session);
}

@ -0,0 +1,124 @@
#ifndef __PHONE_DEVICE2_H__
#define __PHONE_DEVICE2_H__
#include <stdio.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <map>
#include <atomic>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraMetadataTags.h>
#include <media/NdkImageReader.h>
#include <Client/Device.h>
#include <string>
#include "camera2/Camera2Helper.h"
class CPhoneDevice2 : public IDevice
{
public:
CPhoneDevice2(JavaVM* vm, jobject service);
virtual ~CPhoneDevice2();
virtual void SetListener(IListener* listener);
virtual bool UpdateTime(time_t ts);
virtual bool Reboot();
virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout);
virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const string& path);
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout);
virtual bool UnregisterTimer(timer_uid_t uid);
virtual bool FireTimer(timer_uid_t uid);
protected:
ACameraCaptureSession_stateCallbacks *GetSessionListener();
std::string GetFileName() const;
bool SendBroadcastMessage(std::string action, int value);
bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
ImageFormat* resCap);
bool DisplayImage(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage90(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage180(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage270(ANativeWindow_Buffer* buf, AImage* image);
static void camera_device_on_disconnected(void *context, ACameraDevice *device);
static void camera_device_on_error(void *context, ACameraDevice *device, int error);
static void capture_session_on_ready(void *context, ACameraCaptureSession *session);
static void capture_session_on_active(void *context, ACameraCaptureSession *session);
static void capture_session_on_closed(void *context, ACameraCaptureSession *session);
void ImageCallback(AImageReader *reader);
static void OnImageCallback(void *ctx, AImageReader *reader);
bool WriteFile(AImage *image, const string& path);
static bool WriteFile(CPhoneDevice2* pThis, AImage *image);
inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime)
{
if (m_listener != NULL)
{
std::vector<IDevice::RECOG_OBJECT> objects;
return m_listener->OnPhotoTaken(res, photoInfo, path, photoTime, objects);
}
return false;
}
protected:
JavaVM* m_vm;
jobject m_javaService;
jmethodID mRegisterTimerMid;
jmethodID mRegisterHeartbeatMid;
jmethodID mUnregisterTimerMid;
jmethodID mUpdateTimeMid;
std::string mPath;
IDevice::PHOTO_INFO mPhotoInfo;
IListener* m_listener;
atomic_ulong m_timerUidFeed;
std::map<IDevice::timer_uid_t, unsigned long> mTimers;
AImageReader *mAImageReader;
ANativeWindow *theNativeWindow;
ACameraDevice *cameraDevice;
ACaptureRequest *captureRequest;
ACameraOutputTarget *cameraOutputTarget;
ACaptureSessionOutput *sessionOutput;
ACaptureSessionOutputContainer *captureSessionOutputContainer;
ACameraCaptureSession *captureSession;
ACameraDevice_StateCallbacks deviceStateCallbacks;
ACameraCaptureSession_stateCallbacks captureSessionStateCallbacks;
DisplayDimension mDisplayDimension;
int32_t presentRotation_;
int32_t imageHeight_;
int32_t imageWidth_;
uint8_t* imageBuffer_;
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
int32_t uvPixelStride;
};
#endif // __PHONE_DEVICE2_H__

@ -1,462 +0,0 @@
//
// Created by Matthew on 2025/3/5.
//
#include "PtzController.h"
#include "SensorsProtocol.h"
#include "GPIOControl.h"
#include "PhoneDevice.h"
#include "time.h"
#include <memory>
PtzController::PtzController(CPhoneDevice* pPhoneDevice) : m_pPhoneDevice(pPhoneDevice)
{
m_exit = false;
}
void PtzController::Startup()
{
m_thread = std::thread(PtzThreadProc, this);
}
void PtzController::PtzThreadProc(PtzController* pThis)
{
pThis->PtzProc();
}
void PtzController::AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr)
{
SERIAL_CMD cmd = { 0 };
cmd.channel = channel;
cmd.preset = preset;
cmd.cmdidx = cmdidx;
cmd.bImageSize = bImageSize;
strcpy(cmd.serfile, serfile);
cmd.baud = baud;
cmd.addr = addr;
cmd.ts = time(NULL);
m_locker.lock();
m_cmds.push_back(cmd);
m_locker.unlock();
m_sem.release();
}
void PtzController::AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds)
{
IDevice::SerialsPhotoParam param = { "", 0, 0 };
m_pPhoneDevice->GetPhotoSerialsParamCb(param);
SERIAL_CMD cmdPreset = { 0 };
time_t ts = time(NULL);
#if 1
// if (photoInfo.preset != 0 && photoInfo.preset != 0xFF)
{
cmdPreset.ts = photoInfo.selfTestingTime;
cmdPreset.delayTime = photoInfo.closeDelayTime;
cmdPreset.channel = photoInfo.channel;
cmdPreset.channel = photoInfo.preset;
cmdPreset.cmdidx = PHOTO_OPEN_POWER;
strcpy(cmdPreset.serfile, param.serfile);
cmdPreset.baud = param.baud;
cmdPreset.addr = param.addr;
}
#endif
SERIAL_CMD cmd = { 0 };
cmd.ts = ts;
cmd.delayTime = photoInfo.closeDelayTime;
cmd.channel = photoInfo.channel;
cmd.preset = photoInfo.preset;
cmd.cmdidx = Take_Photo;
cmd.bImageSize = photoInfo.resolution;
strcpy(cmd.serfile, param.serfile);
cmd.baud = param.baud;
cmd.addr = param.addr;
PtzPhotoParams* ppp = new PtzPhotoParams(photoInfo, path, osds);
cmd.photoParams.reset(ppp);
// cmd.delayTime;
// uint8_t bImageSize;
// char serfile[128];
// uint32_t baud;
// int addr;
m_locker.lock();
#if 1
if (cmdPreset.cmdidx != 0)
{
m_cmds.push_back(cmdPreset);
}
#endif
m_cmds.push_back(cmd);
m_locker.unlock();
m_sem.release();
m_sem.release();
}
void PtzController::ExitAndWait()
{
m_exit = true;
m_sem.release();
if (m_thread.joinable())
{
m_thread.join();
}
}
void PtzController::PtzProc()
{
PROC_PTZ_STATE state = PTZS_POWER_OFF;
SERIAL_CMD cmd;
PTZ_STATE ptz_state;
bool hasCmd = false;
int i=0;
int closecmd=0;
std::shared_ptr<PowerControl> powerCtrl;
time_t selfTestingStartTime = 0;
time_t selfTestingWaitTime = 0;
time_t PTZ_preset_start_time = 0;
time_t PTZ_preset_wait_time = 0;
time_t close_delay_time = CAMERA_CLOSE_DELAYTIME;
time_t start_delay_time = 0;
time_t auto_delay_time = 0;
time_t auto_wait_time = WAIT_TIME_AUTO_CLOSE;
time_t photo_move_preset_time = 0;
int iwaitime = 0;
while(true)
{
m_sem.acquire();
if (m_exit)
{
break;
}
hasCmd = false;
m_locker.lock();
for (auto it = m_cmds.begin(); it != m_cmds.end(); ++it)
{
if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state))
{
// find first non-taking-photo cmd
if (it->cmdidx != Take_Photo)
{
cmd = *it;
m_cmds.erase(it);
hasCmd = true;
break;
}
}
else
{
cmd = *it;
m_cmds.erase(it);
hasCmd = true;
break;
}
}
m_locker.unlock();
if (!hasCmd)
{
if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state))
{
time_t timeout = time(NULL) - selfTestingStartTime;
if(timeout < 0)
selfTestingStartTime = time(NULL);
if (timeout >= selfTestingWaitTime)
{
XYLOG(XYLOG_SEVERITY_INFO, "超时(%u秒)未收到云台自检结束应答,状态改为空闲!", (uint32_t)timeout);
state = PTZS_IDLE;
m_sem.release();
continue;
}
else
{
//if(timeout >= CAMERA_SELF_TEST_TIME)
{
#ifndef NDEBUG
if (timeout == 1 || ((timeout % 10) == 0))
#endif
{
XYLOG(XYLOG_SEVERITY_INFO, "开始查询云台自检状态timeout=%u秒", (uint32_t)timeout);
}
if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr))
{
if(0 == ptz_state.ptz_status)
{
XYLOG(XYLOG_SEVERITY_INFO, "收到云台自检结束应答状态改为空闲timeout=%u秒", (uint32_t)timeout);
state = PTZS_IDLE;
m_sem.release();
continue;
}
}
}
}
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
m_sem.release();
continue;
}
if(0 == start_delay_time)
{
if(0 == iwaitime)
{
auto_delay_time = time(NULL);
iwaitime += 1;
m_sem.release();
continue;
}
else
{
if(time(NULL) - auto_delay_time < 0)
{
auto_delay_time = time(NULL);
}
if(time(NULL) - auto_delay_time >= auto_wait_time)
{
iwaitime = 0;
XYLOG(XYLOG_SEVERITY_INFO, "摄像机自动上电延时时间超过%u秒准备关闭摄像机", (uint32_t)auto_wait_time);
}
else
{
m_sem.release();
continue;
}
}
}
else
{
if(time(NULL) - start_delay_time < 0)
{/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间不会关摄像机电源*/
start_delay_time = time(NULL);
}
if(time(NULL) - start_delay_time >= close_delay_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机空闲时间超过%u秒准备关闭摄像机", (uint32_t)close_delay_time);
}
else
{
m_sem.release();
continue;
}
}
if (state == PTZS_POWER_OFF)
{
closecmd = 0;
XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发,摄像机本来就处于关机状态!");
// Do Nothing
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发通知云台准备关机state=%d", state);
for(i=0; i<3; i++)
{
if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr))
break;
}
powerCtrl.reset();
closecmd = 0;
state = PTZS_POWER_OFF;
XYLOG(XYLOG_SEVERITY_INFO, "自动触发关闭云台电源state=%d", state);
}
start_delay_time = 0;
continue;
}
switch (cmd.cmdidx)
{
case Take_Photo:
{
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
//powerCtrl = std::make_shared<PlzCameraPowerCtrl>(cmd.photoParams->mPhotoInfo.closeDelayTime);
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = cmd.photoParams->mPhotoInfo.selfTestingTime;
state = PTZS_PHOTO_SELF_TESTING;
XYLOG(XYLOG_SEVERITY_INFO, "1、收到拍照指令摄像机从关机状态改为自检状态");
m_locker.lock();
m_cmds.insert(m_cmds.begin(), cmd);
m_locker.unlock();
m_sem.release();
continue;
}
}
if(cmd.photoParams->mPhotoInfo.scheduleTime == 0)
{
if(1 == closecmd)
{
XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令但同时后续收到关机指令等待拍完照片再关机。state=%d", state);
}
else
{
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令state=%d", state);
}
}
else
XYLOG(XYLOG_SEVERITY_INFO, "2、收到自动拍照指令state=%d", state);
state = PTZS_TAKING_PHOTO;
if (cmd.preset != 0 && cmd.preset != 0xFF)
{
CameraPhotoCmd(0, cmd.channel, MOVE_PRESETNO, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
#if 0
if(START_ONCE_SELF == cmd.preset)
{
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "拍照调用200号预置点指令摄像机启动一次性自检从拍照状态改为自检状态取消拍照动作设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
break;
}
#endif
PTZ_preset_start_time = time(NULL);
if(START_ONCE_SELF == cmd.preset)
PTZ_preset_wait_time = CAMERA_SELF_TEST_TIME;
else
PTZ_preset_wait_time = MOVE_PRESET_WAIT_TIME;
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前开始调用预置点%ustate=%d", (uint32_t)cmd.preset, state);
for(;;)
{
if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr))
{
if(0 == ptz_state.ptz_status)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u收到移动结束应答移动时长=%d秒 state=%d", (uint32_t)cmd.preset, (uint32_t)(time(NULL)-PTZ_preset_start_time), state);
break;
}
}
if(time(NULL) - PTZ_preset_start_time < 0)
{/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间等待摄像机到达预置点*/
PTZ_preset_start_time = time(NULL);
}
if(time(NULL) - PTZ_preset_start_time >= PTZ_preset_wait_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u摄像机在%u秒内未收到调用预置点结束应答state=%d", (uint32_t)cmd.preset, (uint32_t)PTZ_preset_wait_time, state);
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(10));
photo_move_preset_time = time(NULL);
}
}
if(cmd.photoParams->mPhotoInfo.mediaType == 1)
m_pPhoneDevice->TakeVideoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
else if ((cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM || cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF))
{
m_pPhoneDevice->StartPushStreaming(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
}
else
m_pPhoneDevice->TakePhotoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
state = PTZS_IDLE;
}
break;
case PHOTO_OPEN_POWER:
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_PHOTO_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机摄像机处于state=%d", state);
}
break;
case OPEN_TOTAL:
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令摄像机处于state=%d", state);
}
closecmd = 0;
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "收到手动打开摄像机指令刷新关机计时初始值state=%d", state);
break;
case CLOSE_TOTAL:
if (state == PTZS_POWER_OFF)
{
closecmd = 0;
XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令,摄像机本来就处于关机状态!");
// Do Nothing
}
else if(PTZS_PHOTO_SELF_TESTING == state)
{
closecmd = 1;
XYLOG(XYLOG_SEVERITY_INFO, "在拍照自检过程中收到关机指令取消延时关机转到自动关机处理state=%d", state);
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令通知云台准备关机state=%d", state);
for(i=0; i<3; i++)
{
if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr))
break;
}
closecmd = 0;
powerCtrl.reset();
state = PTZS_POWER_OFF;
XYLOG(XYLOG_SEVERITY_INFO, "关闭云台电源state=%d", state);
}
start_delay_time = 0;
break;
default:
{
if (state == PTZS_POWER_OFF)
{
XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令,摄像机处于关机状态,无法执行!");
CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
break;
}
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令刷新关机计时初始值state=%d", state);
if(cmd.ts <= photo_move_preset_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "丢弃拍照调预置点期间收到的控制云台指令,指令时间" FMT_TIME_T ",拍照时间" FMT_TIME_T "state=%d", cmd.ts, photo_move_preset_time, state);
}
else
{
if((MOVE_PRESETNO == cmd.cmdidx) && (START_ONCE_SELF == cmd.preset))
{
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到调用200号预置点指令摄像机启动一次性自检从当前状态改为自检状态设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
}
}
break;
}
}
}

@ -1,100 +0,0 @@
//
// Created by Matthew on 2025/3/5.
//
#ifndef MICROPHOTO_PTZCONTROLLER_H
#define MICROPHOTO_PTZCONTROLLER_H
#include <Buffer.h>
#include <thread>
#include <vector>
#include <memory>
#include <string>
#include <mutex>
#include <SemaphoreEx.h>
#include <Client/Device.h>
enum PROC_PTZ_STATE
{
PTZS_POWER_OFF = 0,
PTZS_IDLE = 1,
PTZS_SELF_TESTING = 2,
PTZS_MOVING = 3,
PTZS_TAKING_PHOTO = 4,
PTZS_PHOTO_SELF_TESTING = 5,
};
#define CAMERA_SELF_TEST_TIME 150 /* Camera self-test time (excluding PTZ self-test)*/
#define MOVE_PRESET_WAIT_TIME 20 /* Waiting for the maximum time for the PTZ to move to the preset position*/
#define CAMERA_CLOSE_DELAYTIME 360 /* Auto Power-Off Timer Setting After Manual Power-On (for Camera)*/
#define PHOTO_OPEN_POWER 16000
#define WAIT_TIME_AUTO_CLOSE 2 /* In order to automatically capture multiple preset point images at the same time and prevent the camera from self checking every time it takes a picture.*/
class PtzPhotoParams
{
public:
PtzPhotoParams(const IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds) :
mPhotoInfo(photoInfo), mPath(path), mOsds(osds)
{
}
~PtzPhotoParams()
{
}
IDevice::PHOTO_INFO mPhotoInfo;
std::string mPath;
std::vector<IDevice::OSD_INFO> mOsds;
};
struct SERIAL_CMD
{
uint8_t channel;
uint8_t preset;
time_t ts;
int cmdidx;
uint32_t delayTime;
uint8_t bImageSize;
char serfile[128];
uint32_t baud;
int addr;
std::shared_ptr<PtzPhotoParams> photoParams;
};
class CPhoneDevice;
class PtzController
{
public:
PtzController(CPhoneDevice* pPhoneDevice);
void Startup();
// ();
void AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr);
void AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds);
void ExitAndWait();
protected:
static void PtzThreadProc(PtzController* pThis);
void PtzProc();
protected:
protected:
std::mutex m_locker;
std::vector<SERIAL_CMD> m_cmds;
CSemaphore m_sem;
bool m_exit;
std::thread m_thread;
CPhoneDevice* m_pPhoneDevice;
};
#endif //MICROPHOTO_PTZCONTROLLER_H

File diff suppressed because it is too large Load Diff

@ -1,557 +0,0 @@
//
// Created by hyz on 2024/6/5.
//
#ifndef __SENSOR_PROTOCOL_H__
#define __SENSOR_PROTOCOL_H__
#include <string>
#ifndef LOBYTE
#define LOBYTE(w) ((unsigned char)(w))
#endif
#ifndef HIBYTE
#define HIBYTE(w) ((unsigned char)(((unsigned short)(w) >> 8) & 0xFF))
#endif
#ifndef LOWORD
#define LOWORD(l) ((uint16_t)(l))
#endif
#ifndef HIWORD
#define HIWORD(l) ((uint16_t)((uint32_t)(l) >> 16))
#endif
#define MAX_STRING_LEN 32
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define MAX_FIELDS_NUM 20 /* BD_NMEA0183单组字符串数据内含数据最大数量*/
#define MAX_SERIAL_DEV_NUM 25 /* 最大接串口传感器数量*/
#define MAX_SERIAL_PORT_NUM 5
#define MAX_DEV_VALUE_NUM 12 /* 一台装置最大的采样值数量*/
#define WEATHER_PROTOCOL 1 /* 温湿度协议序号*/
#define WIND_PROTOCOL 2 /* 风速风向协议序号*/
#define SLANT_PROTOCOL 3 /* 倾斜角协议序号*/
#define RALLY_PROTOCOL 4 /* 拉力协议序号*/
#define PELCO_P_PROTOCOL 5 /* 摄像机Pelco_P协议序号*/
#define PELCO_D_PROTOCOL 6 /* 摄像机Pelco_D协议序号*/
#define SERIALCAMERA_PROTOCOL 8 /* 串口摄像机协议序号*/
#define MUTIWEATHER_PROTOCOL 9 /*多合一气象*/
#define NMEA0183_PROTOCOL 10 /* 单一北斗NMEA0183标准协议*/
#define RESERVE2_PROTOCOL 17 /* 备用2协议序号*/
#define RESERVE4_PROTOCOL 19 /* 备用4协议序号*/
#define RESERVE5_PROTOCOL 20 /* 备用5协议序号*/
#define INVALID_PROTOCOL 21 /* 无效协议序号*/
#define AirTempNo 0 /* 空气温度数据存储序号*/
#define HumidityNo 1 /* 相对湿度数据存储序号*/
#define WindSpeedNo 2 /* 风速数据存储序号*/
#define WindDirectionNo 3 /* 风向数据存储序号*/
#define RainfallNo 4 /* 雨量数据存储序号*/
#define AtmosNo 5 /* 大气压数据存储序号*/
#define OpticalRadiationNo 6 /* 日照(光辐射)数据存储序号*/
#define SER_IDLE 0 /* 传感器处于空闲状态,未启动采样*/
#define SER_SAMPLE 1 /* 正在采样过程中*/
#define SAMPLINGSUCCESS 2 /* 采样结束,正常读取到数据*/
#define SER_STARTSAMPLE 3 /* 启动采样*/
#define SER_SAMPLEFAIL -1 /* 采样失败,未采集到数据,传感器故障或未接*/
#define PHOTO_SAVE_SUCC 5 /* 图片保存成功*/
#define WEATHER_DATA_NUM 8 /* 气象数据最大数量(一般最多是6要素)*/
#define RALLY_DATA_NUM 2 /* 拉力数据最大数量(一般是1个)*/
#define SLANTANGLE_DATA_NUM 3 /* 倾角数据最大数量(一般只有X轴和Y轴值)*/
#define PTZ_MOVETIME 1 // 云台移动等待时间为1秒
#define MAX_CHANNEL_NUM 2 /* 视频通道最大通道*/
#define MAX_PHOTO_FRAME_LEN 1024 /* 图片数据一包最大长度*/
#define MAX_PHOTO_PACKET_NUM 1024 /* 图片最大包数图片最大定为1MB*/
#define RECVDATA_MAXLENTH 2048 /* 接收数据缓冲区最大值*/
#define TIMER_CNT 50 // Poll命令定时器时间 5 ms
#define SENDDATA_MAXLENTH RECVDATA_MAXLENTH /* 正常发送数据缓冲区最大值*/
// 摄像机控制命令宏定义
#define Cmd_Cancel 0x00000000 // 关闭功能
#define SET_PRESETNO 0x00030000 // 设置预置点
#define MOVE_TO_PRESETNO 0x00070000 // 调用预置点
/* 摄像机PELCO-P控制命令宏定义*/
#define P_Auto_Scan 0x20000000 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define P_IRIS_CLOSE 0x08000000 /* 光圈缩小(1 有效)*/
#define P_IRIS_OPEN 0x04000000 /* 光圈放大(1 有效)*/
#define P_FOCUS_NEAR 0x02000000 /* 近距离聚焦(1 有效)*/
#define P_FOCUS_FAR 0x01000000 /* 远距离聚焦(1 有效)*/
#define P_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/
#define P_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/
#define P_MOVE_DOWN 0x0010001f /* 向下移动镜头(1 有效)*/
#define P_MOVE_UP 0x0008001f /* 向上移动镜头(1 有效)*/
#define P_MOVE_LEFT 0x00041f00 /* 向左移动镜头(1 有效)*/
#define P_MOVE_RIGHT 0x00021f00 /* 向右移动镜头(1 有效)*/
// 摄像机PELCO-D控制命令宏定义
#define D_Auto_Scan 0x10000000 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define D_IRIS_CLOSE 0x04000000 /* 光圈缩小(1 有效)*/
#define D_IRIS_OPEN 0x02000000 /* 光圈放大(1 有效)*/
#define D_FOCUS_NEAR 0x01000000 /* 近距离聚焦(1 有效)*/
#define D_FOCUS_FAR 0x00800000 /* 远距离聚焦(1 有效)*/
#define D_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/
#define D_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/
#define D_MOVE_DOWN 0x0010002d /* 向下移动镜头(1 有效)*/
#define D_MOVE_UP 0x0008002d /* 向上移动镜头(1 有效)*/
#define D_MOVE_LEFT 0x00042d00 /* 向左移动镜头(1 有效)*/
#define D_MOVE_RIGHT 0x00022d00 /* 向右移动镜头(1 有效)*/
#define D_OPEN_TOTAL 0x0009000B /* 打开总电源(1 有效)*/
#define D_OPEN_MODULE_POWER 0x0009000C /* 打开机芯电源(1 有效)*/
/* 摄像机下发命令宏定义*/
#define TAKE_PHOTO 20000 /* 拍照*/
#define SET_BAUD 10000 /* 设置球机波特率*/
#define STOP_CMD 10005 /* 取消或停止指令*/
#define AUTO_SCAN 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define IRIS_CLOSE 10007 /* 光圈缩小(1 有效)*/
#define IRIS_OPEN 10008 /* 光圈放大(1 有效)*/
#define FOCUS_NEAR 10009 /* 近距离聚焦(1 有效)*/
#define FOCUS_FAR 10010 /* 远距离聚焦(1 有效)*/
#define ZOOM_WIDE 10011 /* 远离物体(1 有效)*/
#define ZOOM_TELE 10012 /* 接近物体(1 有效)*/
#define MOVE_DOWN 10013 /* 向下移动镜头(1 有效)*/
#define MOVE_UP 10014 /* 向上移动镜头(1 有效)*/
#define MOVE_LEFT 10015 /* 向左移动镜头(1 有效)*/
#define MOVE_RIGHT 10016 /* 向右移动镜头(1 有效)*/
#define MOVE_PRESETNO 10017 // 调用预置点
#define SAVE_PRESETNO 10018 // 设置预置点
#define OPEN_TOTAL 10019 /* 打开总电源(1 有效)*/
#define OPEN_MODULE_POWER 10020 /* 打开机芯电源(1 有效)*/
#define NOTIFY_PTZ_CLOSE 10021 // 通知云台关闭
#define QUERY_PTZ_STATE 10022 // 查询云台状态
#define CLOSE_TOTAL 10040 /* 关闭总电源*/
#define SPEED_DOME_CAMERA 0 /* 球机摄像机*/
#define SERIAL_CAMERA 2 /* 串口摄像机a*/
#define START_ONCE_SELF 200 /* 一次性自检需要的调用的预置点200*/
#define COLLECT_DATA 0 /* 调试使用*/
#define HexCharToInt( c ) (((c) >= '0') && ((c) <= '9') ? (c) - '0' : ((c) >= 'a') && ((c) <= 'f') ? (c) - 'a' + 10 :((c) >= 'A') && ((c) <= 'F') ? (c) - 'A' + 10 : 0 )
//SDS包类型结构
typedef struct
{
uint8_t PortIdx; // 信息类型
uint16_t MsgType; // 信息类型
int MsgLen; // 信息长度
uint8_t MsgData[RECVDATA_MAXLENTH];
} RTUMSG;
typedef struct
{
float fFactor; // 数据系数
float EuValueDelta; // 数据工程值偏移
} AI_PARAM;
typedef struct
{
AI_PARAM AiParam; // 数据点配置参数
int AiState; // 数据标识(-1采样失败0:没有采样1正在采样2采样结束3启动采样
float EuValue; // 数据工程值
} AI_DEF;
typedef struct
{
uint8_t AiState; // 数据标识(-1采样失败0:没有采样1正在采样2采样结束3启动采样
float EuValue; // 数据工程值
} Data_DEF;
typedef struct
{
int imagelen; // 整个图片大小
int phototime; // 拍照时间
uint8_t presetno; // 拍照预置点
char photoname[512]; // 图片存储名称和路径
int state;// 标识(-1拍照失败0:没有拍照1正在取图2拍照成功3启动拍照
} IMAGE_DEF;
typedef struct
{
int imagelen; // 整个图片大小
int imagenum; // 整个图片的总包数
int phototime; // 拍照时间
uint8_t presetno; // 拍照预置点
char photoname[512]; // 图片存储名称和路径
uint8_t buf[MAX_PHOTO_PACKET_NUM][MAX_PHOTO_FRAME_LEN]; // 图片数据缓存
int ilen[MAX_PHOTO_PACKET_NUM]; // 相对应的每包图片数据的长度
int state;// 标识(-1拍照失败0:没有拍照1正在取图2拍照成功3启动拍照
} PHOTO_DEF;
// 上层调用采集传感器参数
typedef struct SENSOR_PARAM
{
unsigned int baudrate; /* 波特率*/
int databit; /* 数据位*/
float stopbit; /* 停止位*/
char parity; /* 校验位*/
char pathname[64]; /* 串口文件名及路径*/
//int commNo; /* 约定的串口序号例如我们PC机上显示的COM1。。。*/
uint8_t SensorsType; /* 传感器类型索引,大于 0*/
int devaddr; /* 装置(传感器)使用的地址*/
uint8_t IsNoInsta; /* 装置没有安装或者已经坏了(1:正常, 0:无效,坏了或没有安装)*/
uint8_t CameraChannel; /* 像机的通道号*/
uint8_t Phase; /* 传感器所安装相别指拉力和倾角11表示A1....*/
float multiple; /*系数*/
float offset; /*偏移值*/
} SENSOR_PARAM;
// 需要配置的串口装置参数
typedef struct
{
unsigned int baudrate; /* 波特率*/
int databit; /* 数据位*/
int stopbit; /* 停止位*/
char parity; /* 校验位*/
char pathname[64]; /* 串口文件名及路径*/
int commid; /* 串口序号 注意从0开始*/
uint8_t ProtocolIdx; /* 规约索引,大于 0*/
int devaddr; /* 装置使用的地址*/
uint8_t IsNoInsta; /* 装置没有安装或者已经坏了(1:正常, 0:无效,坏了或没有安装)*/
uint8_t CameraChannel; /* 像机的通道号*/
uint8_t Phase; /* 传感器所安装相别指拉力和倾角11表示A1....*/
} SERIAL_PARAM;
// 云台状态数据
typedef struct
{
uint8_t ptz_process; /* 云台所处过程(1:自检状态;2:调用预置点;3:一般状态;)*/
uint8_t ptz_status; /* 云台当前状态值(0:停止;1:运动;2:机芯未上电;其他:其他错误*/
int presetno; /* 云台所处预置点值*/
float x_coordinate; /* 云台所处位置水平方向坐标*/
float y_coordinate; /* 云台所处位置垂直方向坐标*/
} PTZ_STATE;
/*
$--RMC IDRMC --
2 UTCtime hhmmss.ss UTC
3 status
V=
A=
4 lat ddmm.mmmmm 2
5 uLat N-S-
6 lon dddmm.mmmm
m
3
7 uLon E-W-西
8 spd
9 cog
10 date ddmmyy dd mm yy
11 mv
12 mvE E-W-西
13 mode [1]
14 navStatus V
NMEA 4.1
15 CS 16 $*$**/
// 北斗卫星数据
typedef struct
{
struct tm UTC_time; /* UTC时间*/
int ms_time; /* 毫秒*/
double lat; /* 纬度,原值(前 2 字符表示度,后面的字符表示分)转换后为° */
char uLat; /* 纬度方向N-北S-南*/
double lon; /* 经度,原值(前 3 字符表示度,后面的字符表示分)转换后为°*/
char uLon; /* 经度'E'-东,'W'-西*/
char status; /* 'A'=数据有效 其他字符表示数据无效*/
} BD_GNSS_DATA;
typedef struct
{
int m_iRevStatus; /* */
int m_iRecvLen; /* */
int m_iNeedRevLength; /* */
int iRecvTime; /* */
uint8_t m_au8RecvBuf[RECVDATA_MAXLENTH];/* */
int fd; /* 串口打开的文件句柄*/
uint8_t PollCmd[SENDDATA_MAXLENTH];
int cmdlen; // 发送缓冲区命令长度
//******************** Poll Cmd ****************************
uint8_t Retry; /* 重试命令次数 */
uint8_t RetryCnt; /* 重试命令计数*/
int64_t RetryTime; /* 重试命令时间 */
int64_t RetryTimeCnt; /* 重试命令时间计数*/
int64_t WaitTime; /* 命令间隔时间 */
int64_t WaitTimeCnt; /* 命令间隔时间计数*/
uint8_t ForceWaitFlag; /* 强制等待标志*/
uint16_t ForceWaitCnt; /* 强制等待计数*/
uint8_t ReSendCmdFlag; /* 重发命令标志 */
uint8_t SendCmdFlag; /* 命令发送标志 */
uint8_t RevCmdFlag; /* 命令正常接收标志*/
//**********************************************************
int64_t lsendtime; /* 命令发送绝对时间计时(毫秒)*/
int cameraaddr; /* 摄像机地址*/
int SerialCmdidx; /* 正在使用的串口发送命令的命令序号(-1:表示没有命令发送)
使*/
PHOTO_DEF image; /* 临时存储图片数据*/
int64_t FirstCmdTimeCnt; /* 串口读取数据起始时间*/
PTZ_STATE ptz_state;
int sendptzstatecmd; // 查询命令次数控制
BD_GNSS_DATA bd_data;
} SIO_PARAM_SERIAL_DEF;
typedef const struct
{
//char *account; // 命令说明
char *cmd_name; // 命令名称
int (*recv_process)(SIO_PARAM_SERIAL_DEF *); /* urc数据处理*/
}BD_NMEA0183_PROC_FUNC;
//串口相关装置所有参数集中定义
typedef struct
{
//******************** 端口基本信息 ************************
uint8_t IsNeedSerial; /* 是否需要使用串口通讯*/
int CmdWaitTime; /* 没有使用*/
uint8_t UseSerialidx; /* 使用的串口序号*/
int SerialCmdidx; /* 正在使用的串口发送命令的命令序号(-1:表示没有命令发送)
使*/
int enrecvtime; /* 发送加密命令后接收到应答计时*/
int64_t FirstCmdTimeCnt; /* 串口读取数据起始时间*/
uint8_t nextcmd; /* 第二次发送读取气象雨量命令 */
uint8_t SameTypeDevIdx; /* 相同类型装置顺序排列序号(从0开始)*/
uint8_t uOpenPowerFlag; /* 传感器上电标志(0:不需要打开; 1:需要打开)*/
int recvdatacnt; /* 接收到有效数据*/
PHOTO_DEF image; /* 临时存储图片数据*/
AI_DEF aiValue[MAX_DEV_VALUE_NUM]; /* 传感器采样值*/
} SERIAL_DEV_DEF;
//串口相关装置所有参数集中定义
typedef struct
{
uint8_t clcyesampling; /* 正在进行采样(0:没有进行采样;1:正在进行采样;)*/
uint8_t camerauseserial; /* 摄像机使用那个串口*/
uint32_t PtzCmdType; /* 云台指令类型*/
int usecameradevidx; /* 有像机指令需要执行*/
/* 执行指令的装置序号(-1:表示没有需要执行的指令;)*/
int SendStopPtzCmdTimeCnt; /* 发送云台停止指令*/
uint8_t serialstatus[MAX_SERIAL_PORT_NUM]; /* 串口是否可以使用状态分别对应串口1、2、3*/
SERIAL_DEV_DEF ms_dev[MAX_SERIAL_DEV_NUM]; /* 装置所接传感器数量*/
int UseingSerialdev[MAX_SERIAL_PORT_NUM]; /* 正在使用串口通讯的装置序号(-1,表示串口空闲)*/
int curdevidx[MAX_SERIAL_PORT_NUM]; /* 当前正在通讯的装置序号(-1表示没有装置需要通讯)*/
uint8_t IsReadWireTem; /* 是否在开始读取测温数据(0:表示没有;1:是)*/
//int proruntime; /* 程序运行时间*/
int IsSleep; /* 是否使程序休眠(1:不休眠;2:休眠)*/
int tempsamplingstartime; /* 测温启动距离采样启动时间间隔*/
int tempsamplingsucctime; /* 测温启动距离采样成功时间间隔*/
int samplingtimeSec; /* 高速采样数据秒级时间控制*/
int SectimesamplingCnt[3]; /* 高速采样数据秒级采样数*/
int SunshineSensorsFault; /* 控制日照传感器故障发送*/
int TempSensorsFault; /* 控制测温传感器故障发送*/
int FirstSensorsFault; /* 第一次检测传感器故障发送*/
int SensorsIsUse; /* 传感器是否启用与自检位置匹配*/
int sequsampling; /* 顺序采样控制序号-1:无采样;其他对应相应装置序号*/
int imagepacketnum; /* 串口摄像机拍照图片总包数*/
int historyimagenum[MAX_CHANNEL_NUM]; /* 球机保存的历史图片数量*/
#if 1
//int sendflag; /* 临时上送泄露电流值标志*/
int sendphototime; /* 临时上送图片数据统计*/
int sendphotocmdcnt; /* 一次拍照过程中发送拍照指令计数*/
int photographtime; /* 图片拍摄的时间*/
int iLastGetPhotoNo; /* 设置串口摄像机参数时暂存拍照命令序号*/
uint8_t bImageSize; /* 用于临时存储接收上层命令的图片大小*/
uint8_t presetno; /* 用于临时存储接收上层命令的预置点*/
char filedir[512]; /* 用于摄像机拍照之后暂时存放的路径*/
#endif
uint8_t errorPhotoNoCnt; /* 串口摄像机拍照时回应错误包号计数(如:召第6包回应第3包)*/
uint8_t RephotographCnt; /* 串口摄像机重拍计数(只在读照片数据应答出错时才重拍)*/
} SRDT_DEF;
static void PortDataProcess( void );
static int64_t get_msec();
int serial_port_comm();
static int weather_comm(SERIAL_PARAM weatherport);
static void setRS485Enable(bool z);
static void set485WriteMode();
static void set485ReadMode();
static void set12VEnable(bool z);
static void setCam3V3Enable(bool enabled);
// 串口相关的所有函数定义
/* 打开串口电源*/
void Gm_OpenSerialPower();
uint8_t getdevtype(int devno);
// 打开传感器电源
void Gm_OpenSensorsPower();
// 关闭传感器电源
void Gm_CloseSensorsPower(int port);
// 打开串口通讯
void Gm_OpenSerialPort(int devidx);
// 关闭串口通讯
void Gm_CloseSerialPort();
void DBG_LOG(int commid, char flag, const char* format, ...);
int SaveLogTofile(int commid, const char *szbuf);
// 功能说明:串口发送数据 返回实际发送的字节数
int GM_SerialComSend(const unsigned char * cSendBuf, size_t nSendLen, int commid);
void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, const char *filedir,const char *log);
// 启动串口通讯
void GM_StartSerialComm();
// 启动使用串口拍照
int GM_StartSerialCameraPhoto(int phototime, unsigned char channel, int cmdidx, unsigned char bImageSize, unsigned char presetno, const char *serfile, unsigned int baud, int addr);
void delete_old_files(const char *path, int days);
// 串口轮询通讯定时器
int GM_SerialTimer();
//轮询所有串口和传感器是否需要生成下发命令
void Gm_FindAllSensorsCommand();
//检查所有传感器是否采集完毕,采集完毕的关闭传感器电源
void GM_IsCloseSensors();
//检查所有串口是否有数据接收,有则启动接收
void GM_AllSerialComRecv();
//判断是否需要关闭定时器
int GM_CloseTimer();
void testComm();
void Gm_InitSerialComm_Test();
// 串口接收数据处理
void SerialDataProcess(int devidx, uint8_t *buf, int len);
void CameraRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t *buf, int len);
// 串口摄像机数据处理
void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial);
// 发送命令
void SendCmdFormPollCmdBuf( int port );
// 清除发送命令的所有标识
void ClearCmdAllFlag(int commid);
// 下发串口拍照指令控制
int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam);
// 生成 CameraPhoto命令
void MakeCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx, int OneParam, uint16_t TwoParam, uint8_t Threep, int phototime);
// 清除命令缓冲区
void ClearCmdFormPollCmdBuf(int port);
// 准备发送云台指令
int Gm_CtrlPtzCmd(SIO_PARAM_SERIAL_DEF *pPortParam, uint32_t ptzcmd);
// 发送转动摄像机云台命令定时器
int Gm_Camera_Timer();
// 生成 PELCO_P 命令 *
void Gm_SendPelco_pCommand( uint32_t cmdtype);
// 计算Pelco_p校验
uint8_t Gm_Pelco_pXORCheck( uint8_t *msg, int len );
// 生成 PELCO_D 命令 *
void Gm_SendPelco_DCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint32_t cmdtype);
// 计算Pelco_D校验
uint8_t Gm_Pelco_DCheck( uint8_t *msg, int len );
// 查询传感器电源状态
char Gm_GetSensorsPowerState(int port);
// 通过传感器使用的航空头查找传感器使用的串口序号
void FindDevUseSerialCommNo();
// 寻找并生成下一条倾角命令
int FindNextShxyProtocolCommand( int devidx );
// 倾角命令校验码计算
unsigned char CalLpc(unsigned char *msg, int len);
// 读上海欣影传感器协议数据
void ShxyProtocolRecvData(int commid, uint8_t *buf, int len);
// 检查检验和是否正确
int CheckShxyProtocolLpcError( uint8_t* msg, int len );
// 把16进制和10进制ASCII字符串转换成int整数
int ATOI(char *buf);
//生成倾角命令
void MakeShxyProtocolPollCommand(int portno, uint8_t cmdidx);
// 上海欣影传感器协议数据处理
void ShxyProtocolDataProcess( int commid);
// 控制关闭传感器电源
//void Gm_CtrlCloseSensorsPower(int devidx);
// 检查传感器电源是否应该关闭或打开
//void Gm_CheckSensorsPower(void);
int SaveImageDataTofile(int devno);
void Collect_sensor_data();
int CameraPhotoCmd(int phototime, unsigned char channel, int cmdidx, unsigned char bImageSize, unsigned char presetno, const char *serfile, unsigned int baud, int addr);
/* 数据和图片采集数据返回函数 开始*/
int GetWeatherData(Data_DEF *data, int datano);
int GetAirTempData(Data_DEF *airt);
int GetHumidityData(Data_DEF *airt);
int GetWindSpeedData(Data_DEF *airt);
int GetWindDirectionData(Data_DEF *airt);
int GetRainfallData(Data_DEF *airt);
int GetAtmosData(Data_DEF *airt);
int GetOpticalRadiationData(Data_DEF *airt);
int GetPullValue(int devno, Data_DEF *data);
int GetAngleValue(int devno, Data_DEF *data, int Xy);
int GetImage(int devno, IMAGE_DEF *photo);
/* 数据和图片采集数据返回函数 结束*/
// 生成一个随机整数
int GeneratingRandomNumber();
int Gm_SetSerialPortParam(int commid);
void ClearCameraCmdAllFlag(SIO_PARAM_SERIAL_DEF *pPortParam);
void ClearCameraCmdFormPollCmdBuf(SIO_PARAM_SERIAL_DEF *pPortParam);
int Gm_OpenCameraSerial(SIO_PARAM_SERIAL_DEF *pPortParam, const char *serfile, unsigned int baud);
int Gm_SetCameraSerialPortParam(int fd, unsigned int baud);
int GM_CameraComSend(unsigned char * cSendBuf, size_t nSendLen, int fd);
void SendCameraCmdFormPollCmdBuf(SIO_PARAM_SERIAL_DEF *pPortParam);
void Gm_FindCameraCommand(SIO_PARAM_SERIAL_DEF *pPortParam);
void GM_CameraSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam);
int GM_IsCloseCamera(SIO_PARAM_SERIAL_DEF *pPortParam);
int GM_CameraSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam);
int QueryPtzState(PTZ_STATE *ptz_state, int cmdidx, const char *serfile, unsigned int baud, int addr);
void MakePtzStateQueryCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx);
int Query_BDGNSS_Data(BD_GNSS_DATA *BD_data, int samptime, const char *serfile, unsigned int baud);
int GM_BdSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam);
void GM_BdSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam);
void BdRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, u_char *buf, int len);
unsigned char BDXorCheck(unsigned char *msg, int len);
void BD_NMEA0183_PortDataProcess(SIO_PARAM_SERIAL_DEF *curserial);
char** BD_NMEA0183_SplitString(char *str, int *total_fields);
int BD_get_BDRMC_data(SIO_PARAM_SERIAL_DEF *curserial);
#endif // __SENSOR_PROTOCOL_H__

@ -0,0 +1,79 @@
#include "TerminalDevice.h"
#include <dlfcn.h>
#include "Camera.h"
#include <AndroidHelper.h>
typedef jbyteArray (*TakePhotoFunc)(int, int, int, int);
extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
CTerminalDevice::CTerminalDevice(JavaVM* vm, jobject service)
{
m_vm = vm;
JNIEnv* env = NULL;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
}
m_javaService = env->NewGlobalRef(service);
if (attached)
{
vm->DetachCurrentThread();
}
}
CTerminalDevice::~CTerminalDevice()
{
JNIEnv* env = NULL;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
}
env->DeleteGlobalRef(m_javaService);
if (attached)
{
m_vm->DetachCurrentThread();
}
m_javaService = NULL;
}
bool CTerminalDevice::TakePhoto(unsigned char channel, unsigned char preset, const string& path, bool photo)
{
jboolean res = JNI_FALSE;
CCamera camera;
camera.initCamera(NULL);
if (camera.isCameraReady())
{
camera.takePicture();
}
camera.closeCamera();
#if 0
JNIEnv* env = NULL;
bool attached = GetJniEnv(m_vm, &env);
jclass serviceClass = env->GetObjectClass(m_javaService);
jmethodID mid = env->GetMethodID(serviceClass, "takePhoto", "(SSLjava/lang/String;)Z");
jstring str = env->NewStringUTF(path.c_str());
res = env->CallBooleanMethod (m_javaService, mid, (jint)channel, (jint)preset, str);
env->ReleaseStringUTFChars(str, path.c_str());
env->DeleteLocalRef(serviceClass);
if (!res)
{
int aa = 1;
}
if (attached)
{
m_vm->DetachCurrentThread();
}
#endif
return res == JNI_TRUE;
}

@ -0,0 +1,21 @@
#ifndef __TERMINAL_DEVICE_H__
#define __TERMINAL_DEVICE_H__
#include <Client/Device.h>
#include <jni.h>
class CTerminalDevice : public IDevice
{
public:
CTerminalDevice(JavaVM* vm, jobject service);
~CTerminalDevice();
virtual bool TakePhoto(unsigned char channel, unsigned char preset, const string& path, bool photo);
private:
JavaVM* m_vm;
jobject m_javaService;
};
#endif // __TERMINAL_DEVICE_H__

File diff suppressed because it is too large Load Diff

@ -1,724 +0,0 @@
/* Copyright Statement:
*
* This software/firmware and related documentation ("MediaTek Software") are
* protected under relevant copyright laws. The information contained herein is
* confidential and proprietary to MediaTek Inc. and/or its licensors. Without
* the prior written permission of MediaTek inc. and/or its licensors, any
* reproduction, modification, use or disclosure of MediaTek Software, and
* information contained herein, in whole or in part, shall be strictly
* prohibited.
*
* MediaTek Inc. (C) 2010. All rights reserved.
*
* BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
* THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
* RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
* ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
* WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
* RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
* INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
* TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
* RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
* OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
* SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
* RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
* STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
* ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
* RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
* MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
* CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
*
* The following software/firmware and/or related documentation ("MediaTek
* Software") have been modified by MediaTek Inc. All revisions are subject to
* any receiver's applicable license agreements with MediaTek Inc.
*/
#ifndef _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_
#define _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_section {
MTK_HAL_REQUEST = 0xC000, // MTK HAL internal metadata become from 0xC000 0000
MTK_P1NODE,
MTK_P2NODE,
MTK_3A_TUNINING,
MTK_3A_EXIF,
MTK_MF_EXIF,
MTK_EIS,
MTK_STEREO,
MTK_FRAMESYNC,
MTK_VHDR,
MTK_PIPELINE,
MTK_NR,
MTK_PLUGIN,
MTK_DUALZOOM,
MTK_FEATUREPIPE,
MTK_POSTPROC,
MTK_FEATURE,
MTK_FSC,
} mtk_platform_metadata_section_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_section_start {
MTK_HAL_REQUEST_START = MTK_HAL_REQUEST << 16,
MTK_P1NODE_START = MTK_P1NODE << 16,
MTK_P2NODE_START = MTK_P2NODE << 16,
MTK_3A_TUNINING_START = MTK_3A_TUNINING << 16,
MTK_3A_EXIF_START = MTK_3A_EXIF << 16,
MTK_EIS_START = MTK_EIS << 16,
MTK_STEREO_START = MTK_STEREO << 16,
MTK_FRAMESYNC_START = MTK_FRAMESYNC << 16,
MTK_VHDR_START = MTK_VHDR << 16,
MTK_PIPELINE_START = MTK_PIPELINE << 16,
MTK_NR_START = MTK_NR << 16,
MTK_PLUGIN_START = MTK_PLUGIN << 16,
MTK_DUALZOOM_START = MTK_DUALZOOM << 16,
MTK_FEATUREPIPE_START = MTK_FEATUREPIPE << 16,
MTK_POSTPROC_START = MTK_POSTPROC << 16,
MTK_FEATURE_START = MTK_FEATURE << 16,
MTK_FSC_START = MTK_FSC << 16,
} mtk_platform_metadata_section_start_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_tag {
MTK_HAL_REQUEST_REQUIRE_EXIF = MTK_HAL_REQUEST_START, //MUINT8
MTK_HAL_REQUEST_DUMP_EXIF, //MUINT8
MTK_HAL_REQUEST_REPEAT, //MUINT8
MTK_HAL_REQUEST_DUMMY, //MUINT8
MTK_HAL_REQUEST_SENSOR_SIZE, //MSize
MTK_HAL_REQUEST_SENSOR_ID, //MINT32
MTK_HAL_REQUEST_DEVICE_ID, //MINT32
MTK_HAL_REQUEST_HIGH_QUALITY_CAP, //MUINT8
MTK_HAL_REQUEST_ISO_SPEED, //MINT32
MTK_HAL_REQUEST_BRIGHTNESS_MODE, //MINT32
MTK_HAL_REQUEST_CONTRAST_MODE, //MINT32
MTK_HAL_REQUEST_HUE_MODE, //MINT32
MTK_HAL_REQUEST_SATURATION_MODE, //MINT32
MTK_HAL_REQUEST_EDGE_MODE, //MINT32
MTK_HAL_REQUEST_PASS1_DISABLE, //MINT32
MTK_HAL_REQUEST_ERROR_FRAME, // used for error handling //MUINT8
MTK_HAL_REQUEST_PRECAPTURE_START, // 4cell //MUINT8
MTK_HAL_REQUEST_AF_TRIGGER_START, // 4cell //MUINT8
MTK_HAL_REQUEST_IMG_IMGO_FORMAT, //MINT32
MTK_HAL_REQUEST_IMG_RRZO_FORMAT, //MINT32
MTK_HAL_REQUEST_INDEX, //MINT32
MTK_HAL_REQUEST_COUNT, //MINT32
MTK_HAL_REQUEST_SMVR_FPS, //MUINT8 // 0: NOT batch request
MTK_HAL_REQUEST_REMOSAIC_ENABLE, //MUINT8 // 0: preview mode 1: capture mode
MTK_HAL_REQUEST_INDEX_BSS, //MINT32
MTK_HAL_REQUEST_ZSD_CAPTURE_INTENT, //MUINT8
MTK_HAL_REQUEST_REAL_CAPTURE_SIZE, //MSize
MTK_HAL_REQUEST_VIDEO_SIZE, //MSize
MTK_HAL_REQUEST_RAW_IMAGE_INFO, //MINT32 // index[0]: raw fmt, index[1]: raw stride, index[2]: raw size(width), index[3]: raw size(height)
MTK_HAL_REQUEST_ISP_PIPELINE_MODE, //MINT32
MTK_P1NODE_SCALAR_CROP_REGION = MTK_P1NODE_START, //MRect
MTK_P1NODE_BIN_CROP_REGION, //MRect
MTK_P1NODE_DMA_CROP_REGION, //MRect
MTK_P1NODE_BIN_SIZE, //MSize
MTK_P1NODE_RESIZER_SIZE, //MSize
MTK_P1NODE_RESIZER_SET_SIZE, //MSize
MTK_P1NODE_CTRL_RESIZE_FLUSH, //MBOOL
MTK_P1NODE_CTRL_READOUT_FLUSH, //MBOOL
MTK_P1NODE_CTRL_RECONFIG_SENSOR_SETTING, //MBOOL
MTK_P1NODE_PROCESSOR_MAGICNUM, //MINT32
MTK_P1NODE_MIN_FRM_DURATION, //MINT64
MTK_P1NODE_RAW_TYPE, //MINT32
MTK_P1NODE_SENSOR_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER1_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER2_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER1_SIZE, //MSize
MTK_P1NODE_SENSOR_MODE, //MINT32
MTK_P1NODE_SENSOR_VHDR_MODE, //MINT32
MTK_P1NODE_METADATA_TAG_INDEX, //MINT32
MTK_P1NODE_RSS_SIZE, //MSize
MTK_P1NODE_SENSOR_STATUS, //MINT32
MTK_P1NODE_SENSOR_RAW_ORDER, //MINT32
MTK_P1NODE_TWIN_SWITCH, //MINT32
MTK_P1NODE_TWIN_STATUS, //MINT32
MTK_P1NODE_RESIZE_QUALITY_SWITCH, //MINT32
MTK_P1NODE_RESIZE_QUALITY_STATUS, //MINT32
MTK_P1NODE_RESIZE_QUALITY_LEVEL, //MINT32
MTK_P1NODE_RESIZE_QUALITY_SWITCHING, //MBOOL
MTK_P1NODE_RESUME_SHUTTER_TIME_US, //MINT32
MTK_P1NODE_FRAME_START_TIMESTAMP, //MINT64
MTK_P1NODE_FRAME_START_TIMESTAMP_BOOT, //MINT64
MTK_P1NODE_REQUEST_PROCESSED_WITHOUT_WB, //MBOOL
MTK_P1NODE_ISNEED_GMV, //MBOOL
MTK_P2NODE_HIGH_SPEED_VDO_FPS = MTK_P2NODE_START, //MINT32
MTK_P2NODE_HIGH_SPEED_VDO_SIZE, //MSize
MTK_P2NODE_CTRL_CALTM_ENABLE, //MBOOL
MTK_P2NODE_FD_CROP_REGION, //MRect
MTK_P2NODE_CROP_REGION, //MRect // for removing black edge
MTK_P2NODE_DSDN_ENABLE, //MBOOL // for DSDN on/off controled by Policy
MTK_P2NODE_SENSOR_CROP_REGION, //MRect
MTK_3A_AE_HIGH_ISO_BINNING, //MBOOL // for 3HDR high iso binning mode
MTK_SENSOR_SCALER_CROP_REGION, //MRect
MTK_PROCESSOR_CAMINFO = MTK_3A_TUNINING_START, //IMemory
MTK_ISP_ATMS_MAPPING_INFO, //IMemory
MTK_3A_ISP_PROFILE, //MUINT8
MTK_3A_ISP_P1_PROFILE, //MUINT8
MTK_CAMINFO_LCSOUT_INFO, //IMemory
MTK_3A_ISP_BYPASS_LCE, //MBOOL
MTK_3A_ISP_DISABLE_NR, //MBOOL
MTK_3A_ISP_NR3D_SW_PARAMS, //MINT32[14] //GMVX, GMVY, confX, confY, MAX_GMV, frameReset, GMV_Status,ISO_cutoff
MTK_3A_ISP_NR3D_HW_PARAMS, //IMemory
MTK_3A_ISP_LCE_GAIN, //MINT32, bits[0:15]: LCE gain, bits[16:31]: LCE gain confidence ratio (0-100)
MTK_3A_ISP_FUS_NUM, //MINT32
MTK_3A_AE_CAP_PARAM, //IMemory
MTK_3A_AE_CAP_SINGLE_FRAME_HDR, //MUINT8
MTK_3A_AE_BV_TRIGGER, //MBOOL
MTK_3A_AF_LENS_POSITION, //MINT32
MTK_3A_FLICKER_RESULT, //MINT32
MTK_3A_DUMMY_BEFORE_REQUEST_FRAME, //MBOOL // Dummy frame before capture, only for capture intent, preview don't use
MTK_3A_DUMMY_AFTER_REQUEST_FRAME, //MBOOL // Dummy frame after capture, only for capture intent, preview don't use
MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MAX, //MINT32
MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MIN, //MINT32
MTK_3A_MANUAL_AWB_COLORTEMPERATURE, //MINT32
MTK_3A_HDR_MODE, //MUINT8
MTK_3A_AE_HDR_MIXED_ISO, //MUINT32
MTK_3A_AE_ZSL_STABLE, //MINT32 ( MBOOL )
MTK_3A_PGN_ENABLE, //MUINT8
MTK_3A_SKIP_HIGH_QUALITY_CAPTURE, //MUINT8
MTK_3A_AI_SHUTTER, //MBOOL
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, //MINT32
MTK_3A_FEATURE_AE_TARGET_MODE, //MINT32
MTK_3A_OPEN_ID, //MINT32
MTK_LSC_TBL_DATA, //IMemory
MTK_LSC_TSF_DATA, //IMemory
MTK_LSC_TSF_DUMP_NO, //IMemory
MTK_ISP_P2_ORIGINAL_SIZE, //MSize
MTK_ISP_P2_CROP_REGION, //MRect
MTK_ISP_P2_RESIZER_SIZE, //MSize
MTK_ISP_P2_IN_IMG_FMT, //MINT32, 0 or not exist: RAW->YUV, 1: YUV->YUV
MTK_ISP_P2_TUNING_UPDATE_MODE, //MUINT8, [0 or not exist]: as default; [1]: keep existed parameters but some parts will be updated; [2]: keep all existed parameters (force mode) [3] LPCNR Pass1 [4] LPCNR Pass2
MTK_ISP_P2_IN_IMG_RES_REVISED, //MINT32, describes P2 input image revised resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist.
MTK_ISP_APP_TARGET_SIZE, //MINT32, describes APP Target resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist.
MTK_MSF_SCALE_INDEX, //MINT32, which scale stage index, would only exist with scaling flow
MTK_MSF_FRAME_NUM, //MINT32, After BSS which frame number is this stage using
MTK_TOTAL_MULTI_FRAME_NUM, //MINT32, MSYUV fuction used this input to know frame nunber
MTK_TOTAL_MULTI_FRAME_NUM_CAPTURED, //MINT32, MSF function used
MTK_SW_DSDN_VERSION, //MINT32, distinguish different dsdn version
MTK_ISP_COLOR_SPACE, //MINT32
MTK_ISP_DRC_CURVE, //IMemory
MTK_ISP_DRC_CURVE_SIZE, //MINT32
MTK_ISP_FEO_DATA, //IMemory
MTK_ISP_FEO_ENABLE, //MINT32
MTK_ISP_FEO_INFO, //IMemory
MTK_ISP_HLR_RATIO, //MINT32, which is a HDR ratio applied in HLR
MTK_ISP_STAGE, //MINT32
MTK_FOCUS_AREA_POSITION, //MINT32
MTK_FOCUS_AREA_SIZE, //MSize
MTK_FOCUS_AREA_RESULT, //MUINT8
MTK_FOCUS_PAUSE, //MUINT8
MTK_FOCUS_MZ_ON, //MUINT8
MTK_3A_AF_FOCUS_VALUE, //MINT64
MTK_3A_PRV_CROP_REGION, //MRect
MTK_3A_ISP_MDP_TARGET_SIZE, //MSize
MTK_3A_REPEAT_RESULT, //MUINT8
MTK_3A_SKIP_PRECAPTURE, //MBOOL //if CUST_ENABLE_FLASH_DURING_TOUCH is true, MW can skip precapture
MTK_3A_SKIP_BAD_FRAME, //MBOOL
MTK_3A_FLARE_IN_MANUAL_CTRL_ENABLE, //MBOOL
MTK_3A_DYNAMIC_SUBSAMPLE_COUNT, //MINT32 30fps = 1, 60fps = 2, ... , 120fps = 4
MTK_3A_AE_LV_VALUE, //MINT32
MTK_APP_CONTROL, //MINT32
MTK_3A_CUST_PARAMS, //IMemory
MTK_3A_SETTING_CUST_PARAMS, //IMemory
MTK_3A_PERFRAME_INFO, //IMemory
MTK_SENSOR_MODE_INFO_ACTIVE_ARRAY_CROP_REGION, //MRect
MTK_3A_AE_BV, //MINT32
MTK_3A_AE_CWV, //MINT32
MTK_ISP_P2_PROCESSED_RAW, //MINT32
MTK_3A_EXIF_METADATA = MTK_3A_EXIF_START, //IMetadata
MTK_EIS_REGION = MTK_EIS_START, //MINT32
MTK_EIS_INFO, //MINT64
MTK_EIS_VIDEO_SIZE, //MRect
MTK_EIS_NEED_OVERRIDE_TIMESTAMP, //MBOOL
MTK_EIS_LMV_DATA, //IMemory
MTK_STEREO_JPS_MAIN1_CROP = MTK_STEREO_START, //MRect
MTK_STEREO_JPS_MAIN2_CROP, //MRect
MTK_STEREO_SYNC2A_MODE, //MINT32
MTK_STEREO_SYNCAF_MODE, //MINT32
MTK_STEREO_HW_FRM_SYNC_MODE, //MINT32
MTK_STEREO_NOTIFY, //MINT32
MTK_STEREO_SYNC2A_MASTER_SLAVE, //MINT32[2]
MTK_STEREO_SYNC2A_STATUS, //IMemory
MTK_JPG_ENCODE_TYPE, //MINT8
MTK_CONVERGENCE_DEPTH_OFFSET, //MFLOAT
MTK_N3D_WARPING_MATRIX_SIZE, //MUINT32
MTK_P1NODE_MAIN2_HAL_META, //IMetadata
MTK_P2NODE_BOKEH_ISP_PROFILE, //MUINT8
MTK_STEREO_FEATURE_DENOISE_MODE, //MINT32
MTK_STEREO_FEATURE_SENSOR_PROFILE, //MINT32
MTK_P1NODE_MAIN2_APP_META, //IMetadata
MTK_STEREO_FEATURE_OPEN_ID, //MINT32
MTK_STEREO_FRAME_PER_CAPTURE, //MINT32
MTK_STEREO_ENABLE_MFB, //MINT32
MTK_STEREO_BSS_RESULT, //MINT32
MTK_STEREO_FEATURE_FOV_CROP_REGION, //MINT32[6] // p.x, p.y, p.w, p.h, srcW, srcH
MTK_STEREO_DCMF_FEATURE_MODE, //MINT32 // mtk_platform_metadata_enum_dcmf_feature_mode
MTK_STEREO_HDR_EV, //MINT32
MTK_STEREO_DELAY_FRAME_COUNT, //MINT32
MTK_STEREO_DCMF_DEPTHMAP_SIZE, //MSize
MTK_STEREO_WITH_CAMSV, //MBOOL
MTK_FRAMESYNC_ID = MTK_FRAMESYNC_START, //MINT32
MTK_FRAMESYNC_TOLERANCE, //MINT64
MTK_FRAMESYNC_FAILHANDLE, //MINT32
MTK_FRAMESYNC_RESULT, //MINT64
MTK_FRAMESYNC_TYPE, //MINT32
MTK_FRAMESYNC_MODE, //MUINT8
MTK_VHDR_LCEI_DATA = MTK_VHDR_START, //Memory
MTK_VHDR_IMGO_3A_ISP_PROFILE, //MUINT8
MTK_HDR_FEATURE_HDR_HAL_MODE,
MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM,
MTK_VHDR_MULTIFRAME_TIMESTAMP, //MINT64
MTK_VHDR_MULTIFRAME_EXPOSURE_TIME, //MINT64
MTK_PIPELINE_UNIQUE_KEY = MTK_PIPELINE_START, //MINT32
MTK_PIPELINE_FRAME_NUMBER, //MINT32
MTK_PIPELINE_REQUEST_NUMBER, //MINT32
MTK_PIPELINE_EV_VALUE, //MINT32
MTK_PIPELINE_DUMP_UNIQUE_KEY, //MINT32
MTK_PIPELINE_DUMP_FRAME_NUMBER, //MINT32
MTK_PIPELINE_DUMP_REQUEST_NUMBER, //MINT32
MTK_PIPELINE_VIDEO_RECORD, //MINT32
MTK_NR_MODE = MTK_NR_START, //MINT32
MTK_NR_MNR_THRESHOLD_ISO, //MINT32
MTK_NR_SWNR_THRESHOLD_ISO, //MINT32
MTK_REAL_LV, //MINT32
MTK_ANALOG_GAIN, //MUINT32
MTK_AWB_RGAIN, //MINT32
MTK_AWB_GGAIN, //MINT32
MTK_AWB_BGAIN, //MINT32
MTK_PLUGIN_MODE = MTK_PLUGIN_START, //MINT64
MTK_PLUGIN_COMBINATION_KEY, //MINT64
MTK_PLUGIN_P2_COMBINATION, //MINT64
MTK_PLUGIN_PROCESSED_FRAME_COUNT, //MINT32
MTK_PLUGIN_CUSTOM_HINT, //MINT32
MTK_PLUGIN_DETACT_JOB_SYNC_TOKEN, //MINT64, may be not exists.
MTK_PLUGIN_UNIQUEKEY,
MTK_DUALZOOM_DROP_REQ = MTK_DUALZOOM_START, //MINT32
MTK_DUALZOOM_FORCE_ENABLE_P2, //MINT32
MTK_DUALZOOM_DO_FRAME_SYNC, //MINT32
MTK_DUALZOOM_ZOOM_FACTOR, //MINT32
MTK_DUALZOOM_DO_FOV, //MINT32
MTK_DUALZOOM_FOV_RECT_INFO, //MINT32
MTK_DUALZOOM_FOV_CALB_INFO, //MINT32
MTK_DUALZOOM_FOV_MARGIN_PIXEL, //MSize
MTK_DUALCAM_AF_STATE, //MUINT8
MTK_DUALCAM_LENS_STATE, //MUINT8
MTK_DUALCAM_TIMESTAMP, //MINT64
MTK_DUALZOOM_3DNR_MODE, //MINT32
MTK_DUALZOOM_ZOOMRATIO, //MINT32
MTK_DUALZOOM_CENTER_SHIFT, //MINT32
MTK_DUALZOOM_FOV_RATIO, //MFLOAT
MTK_DUALZOOM_REAL_MASTER, //MINT32
MTK_DUALZOOM_FD_TARGET_MASTER, //MINT32
MTK_DUALZOOM_FD_REAL_MASTER, //MINT32 // maybe not set
MTK_LMV_SEND_SWITCH_OUT, //MINT32
MTK_LMV_SWITCH_OUT_RESULT, //MINT32
MTK_LMV_VALIDITY, //MINT32
MTK_VSDOF_P1_MAIN1_ISO, //MINT32
MTK_DUALZOOM_IS_STANDBY, //MBOOL
MTK_DUALZOOM_CAP_CROP, //MRect
MTK_DUALZOOM_MASTER_UPDATE_MODE, //MBOOL
MTK_DUALZOOM_STREAMING_NR, //MINT32
MTK_FEATUREPIPE_APP_MODE = MTK_FEATUREPIPE_START, //MINT32
MTK_POSTPROC_TYPE = MTK_POSTPROC_START, //MINT32
MTK_FEATURE_STREAMING = MTK_FEATURE_START, //MINT64
MTK_FEATURE_CAPTURE, //MINT64
MTK_FEATURE_CAPTURE_PHYSICAL, //MINT64
MTK_FEATURE_FREE_MEMORY_MBYTE, //MINT32
MTK_FEATURE_MFNR_NVRAM_QUERY_INDEX, //MINT32
MTK_FEATURE_MFNR_NVRAM_DECISION_ISO, //MINT32
MTK_FEATURE_MFNR_TUNING_INDEX_HINT, //MINT64
MTK_FEATURE_MFNR_FINAL_EXP, //MINT32
MTK_FEATURE_MFNR_OPEN_ID, //MINT32
MTK_FEATURE_AINR_MDLA_MODE, //MINT32
MTK_ISP_AINR_MDLA_MODE, //MINT32
MTK_ISP_LTM_BIT_MODE, //MINT32
MTK_FEATURE_BSS_SELECTED_FRAME_COUNT, //MINT32
MTK_FEATURE_BSS_FORCE_DROP_NUM, //MINT32
MTK_FEATURE_BSS_FIXED_LSC_TBL_DATA, //MUINT8
MTK_FEATURE_BSS_PROCESS, //MINT32
MTK_FEATURE_BSS_ISGOLDEN, //MBOOL
MTK_FEATURE_BSS_REORDER, //MBOOL
MTK_FEATURE_BSS_MANUAL_ORDER, //MUINT8
MTK_FEATURE_BSS_RRZO_DATA, //MUINT8
MTK_FEATURE_BSS_DOWNSAMPLE, //MBOOL
MTK_FEATURE_PACK_RRZO, //MUINT8
MTK_FEATURE_FACE_RECTANGLES, //MRect array
MTK_FEATURE_FACE_POSE_ORIENTATIONS, //MINT32[n*3] array, each struct include: xAsix, yAsix, zAsix
MTK_FEATURE_CAP_YUV_PROCESSING, //MUINT8
MTK_FEATURE_CAP_PIPE_DCE_CONTROL, //MUINT8
MTK_FEATURE_MULTIFRAMENODE_BYPASSED, //MUINT8
MTK_FEATURE_FACE_APPLIED_GAMMA, //MINT32
MTK_FEATURE_CAP_PQ_USERID, //MINT64
MTK_FEATURE_FLIP_IN_P2A, //MINT32
MTK_FSC_CROP_DATA = MTK_FSC_START, //IMemory
MTK_FSC_WARP_DATA, //IMemory
MTK_STAGGER_ME_META, //IMetadata
MTK_STAGGER_SE_META, //IMetadata
MTK_STAGGER_BLOB_IMGO_ORDER //MUINT8
} mtk_platform_metadata_tag_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_3a_exif_metadata_tag {
MTK_3A_EXIF_FNUMBER, //MINT32
MTK_3A_EXIF_FOCAL_LENGTH, //MINT32
MTK_3A_EXIF_FOCAL_LENGTH_35MM, //MINT32
MTK_3A_EXIF_SCENE_MODE, //MINT32
MTK_3A_EXIF_AWB_MODE, //MINT32
MTK_3A_EXIF_LIGHT_SOURCE, //MINT32
MTK_3A_EXIF_EXP_PROGRAM, //MINT32
MTK_3A_EXIF_SCENE_CAP_TYPE, //MINT32
MTK_3A_EXIF_FLASH_LIGHT_TIME_US, //MINT32
MTK_3A_EXIF_AE_METER_MODE, //MINT32
MTK_3A_EXIF_AE_EXP_BIAS, //MINT32
MTK_3A_EXIF_CAP_EXPOSURE_TIME, //MINT32
MTK_3A_EXIF_AE_ISO_SPEED, //MINT32
MTK_3A_EXIF_REAL_ISO_VALUE, //MINT32
MTK_3A_EXIF_AE_BRIGHTNESS_VALUE, //MINT32
MTK_3A_EXIF_FLASH_FIRING_STATUS, //MINT32
MTK_3A_EXIF_FLASH_RETURN_DETECTION, //MINT32
MTK_3A_EXIF_FLASH_MODE, //MINT32
MTK_3A_EXIF_FLASH_FUNCTION, //MINT32
MTK_3A_EXIF_FLASH_REDEYE, //MINT32
MTK_3A_EXIF_DEBUGINFO_BEGIN, // debug info begin
// key: MINT32
MTK_3A_EXIF_DBGINFO_AAA_KEY = MTK_3A_EXIF_DEBUGINFO_BEGIN, //MINT32
MTK_3A_EXIF_DBGINFO_AAA_DATA,
MTK_3A_EXIF_DBGINFO_SDINFO_KEY,
MTK_3A_EXIF_DBGINFO_SDINFO_DATA,
MTK_3A_EXIF_DBGINFO_ISP_KEY,
MTK_3A_EXIF_DBGINFO_ISP_DATA,
//
MTK_CMN_EXIF_DBGINFO_KEY,
MTK_CMN_EXIF_DBGINFO_DATA,
//
MTK_MF_EXIF_DBGINFO_MF_KEY,
MTK_MF_EXIF_DBGINFO_MF_DATA,
//
MTK_N3D_EXIF_DBGINFO_KEY,
MTK_N3D_EXIF_DBGINFO_DATA,
//
MTK_POSTNR_EXIF_DBGINFO_NR_KEY,
MTK_POSTNR_EXIF_DBGINFO_NR_DATA,
//
MTK_RESVB_EXIF_DBGINFO_KEY,
MTK_RESVB_EXIF_DBGINFO_DATA,
//
MTK_RESVC_EXIF_DBGINFO_KEY,
MTK_RESVC_EXIF_DBGINFO_DATA,
// data: Memory
MTK_3A_EXIF_DEBUGINFO_END, // debug info end
} mtk_platform_3a_exif_metadata_tag_t;
// MTK_3A_FEATURE_AE_EXPOSURE_LEVEL
typedef enum mtk_camera_metadata_enum_ae_exposure_level {
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NONE = 0,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_SHORT,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NORMAL,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG,
} mtk_camera_metadata_enum_ae_exposure_level_t;
// MTK_3A_FEATURE_AE_TARGET_MODE
typedef enum mtk_camera_metadata_enum_ae_target_mode {
MTK_3A_FEATURE_AE_TARGET_MODE_NORMAL = 0,
MTK_3A_FEATURE_AE_TARGET_MODE_IVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_ZVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_LE_FIX,
MTK_3A_FEATURE_AE_TARGET_MODE_SE_FIX,
MTK_3A_FEATURE_AE_TARGET_MODE_4CELL_MVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR_RTO1X,
MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_2EXP,
MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_3EXP,
} mtk_camera_metadata_enum_ae_target_mode_t;
//MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM
typedef enum mtk_camera_metadata_enum_stagger_valid_exposure_num {
MTK_STAGGER_VALID_EXPOSURE_NON = 0,
MTK_STAGGER_VALID_EXPOSURE_1 = 1,
MTK_STAGGER_VALID_EXPOSURE_2 = 2,
MTK_STAGGER_VALID_EXPOSURE_3 = 3
} mtk_camera_metadata_enum_stagger_valid_exposure_num_t;
//MTK_3A_ISP_FUS_NUM
typedef enum mtk_camera_metadata_enum_3a_isp_fus_num {
MTK_3A_ISP_FUS_NUM_NON = 0,
MTK_3A_ISP_FUS_NUM_1 = 1,
MTK_3A_ISP_FUS_NUM_2 = 2,
MTK_3A_ISP_FUS_NUM_3 = 3,
} mtk_camera_metadata_enum_3a_isp_fus_num_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_enum_nr_mode {
MTK_NR_MODE_OFF = 0,
MTK_NR_MODE_MNR,
MTK_NR_MODE_SWNR,
MTK_NR_MODE_AUTO
} mtk_platform_metadata_enum_nr_mode_t;
typedef enum mtk_platform_metadata_enum_mfb_mode {
MTK_MFB_MODE_OFF = 0,
MTK_MFB_MODE_MFLL,
MTK_MFB_MODE_AIS,
MTK_MFB_MODE_NUM,
} mtk_platform_metadata_enum_mfb_mode_t;
typedef enum mtk_platform_metadata_enum_custom_hint {
MTK_CUSTOM_HINT_0 = 0,
MTK_CUSTOM_HINT_1,
MTK_CUSTOM_HINT_2,
MTK_CUSTOM_HINT_3,
MTK_CUSTOM_HINT_4,
MTK_CUSTOM_HINT_NUM,
} mtk_platform_metadata_enum_custom_hint_t;
typedef enum mtk_platform_metadata_enum_plugin_mode {
MTK_PLUGIN_MODE_COMBINATION = 1 << 0,
MTK_PLUGIN_MODE_NR = 1 << 1,
MTK_PLUGIN_MODE_HDR = 1 << 2,
MTK_PLUGIN_MODE_MFNR = 1 << 3,
MTK_PLUGIN_MODE_COPY = 1 << 4,
MTK_PLUGIN_MODE_TEST_PRV = 1 << 5,
MTK_PLUGIN_MODE_BMDN = 1 << 6,
MTK_PLUGIN_MODE_MFHR = 1 << 7,
MTK_PLUGIN_MODE_BMDN_3rdParty = 1 << 8,
MTK_PLUGIN_MODE_MFHR_3rdParty = 1 << 9,
MTK_PLUGIN_MODE_FUSION_3rdParty = 1 << 10,
MTK_PLUGIN_MODE_VSDOF_3rdParty = 1 << 11,
MTK_PLUGIN_MODE_COLLECT = 1 << 12,
MTK_PLUGIN_MODE_HDR_3RD_PARTY = 1 << 13,
MTK_PLUGIN_MODE_MFNR_3RD_PARTY = 1 << 14,
MTK_PLUGIN_MODE_BOKEH_3RD_PARTY = 1 << 15,
MTK_PLUGIN_MODE_DCMF_3RD_PARTY = 1 << 16,
} mtk_platform_metadata_enum_plugin_mode_t;
typedef enum mtk_platform_metadata_enum_p2_plugin_combination {
MTK_P2_RAW_PROCESSOR = 1 << 0,
MTK_P2_ISP_PROCESSOR = 1 << 1,
MTK_P2_YUV_PROCESSOR = 1 << 2,
MTK_P2_MDP_PROCESSOR = 1 << 3,
MTK_P2_CAPTURE_REQUEST = 1 << 4,
MTK_P2_PREVIEW_REQUEST = 1 << 5
} mtk_platform_metadata_enum_p2_plugin_combination;
typedef enum mtk_platform_metadata_enum_isp_color_space {
MTK_ISP_COLOR_SPACE_SRGB = 0 ,
MTK_ISP_COLOR_SPACE_DISPLAY_P3 = 1 ,
MTK_ISP_COLOR_SPACE_CUSTOM_1 = 2
} mtk_platform_metadata_enum_isp_color_space;
typedef enum mtk_platform_metadata_enum_dualzoom_drop_req {
MTK_DUALZOOM_DROP_NEVER_DROP = 0,
MTK_DUALZOOM_DROP_NONE = 1,
MTK_DUALZOOM_DROP_DIRECTLY = 2,
MTK_DUALZOOM_DROP_NEED_P1,
MTK_DUALZOOM_DROP_NEED_SYNCMGR,
MTK_DUALZOOM_DROP_NEED_SYNCMGR_NEED_STREAM_F_PIPE,
} mtk_platform_metadata_enum_dualzoom_drop_req_t;
typedef enum mtk_platform_metadata_enum_p1_sensor_status {
MTK_P1_SENSOR_STATUS_NONE = 0,
MTK_P1_SENSOR_STATUS_STREAMING = 1,
MTK_P1_SENSOR_STATUS_SW_STANDBY = 2,
MTK_P1_SENSOR_STATUS_HW_STANDBY = 3,
} mtk_platform_metadata_enum_p1_sensor_status_t;
typedef enum mtk_platform_metadata_enum_p1_twin_switch {
MTK_P1_TWIN_SWITCH_NONE = 0,
MTK_P1_TWIN_SWITCH_ONE_TG = 1,
MTK_P1_TWIN_SWITCH_TWO_TG = 2
} mtk_platform_metadata_enum_p1_twin_switch_t;
typedef enum mtk_platform_metadata_enum_p1_twin_status {
MTK_P1_TWIN_STATUS_NONE = 0,
MTK_P1_TWIN_STATUS_TG_MODE_1 = 1,
MTK_P1_TWIN_STATUS_TG_MODE_2 = 2,
MTK_P1_TWIN_STATUS_TG_MODE_3 = 3,
} mtk_platform_metadata_enum_p1_twin_status_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_switch {
MTK_P1_RESIZE_QUALITY_SWITCH_NONE = 0,
MTK_P1_RESIZE_QUALITY_SWITCH_L_L = 1,
MTK_P1_RESIZE_QUALITY_SWITCH_L_H = 2,
MTK_P1_RESIZE_QUALITY_SWITCH_H_L = 3,
MTK_P1_RESIZE_QUALITY_SWITCH_H_H = 4,
} mtk_platform_metadata_enum_p1_resize_quality_switch_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_status {
MTK_P1_RESIZE_QUALITY_STATUS_NONE = 0,
MTK_P1_RESIZE_QUALITY_STATUS_ACCEPT = 1,
MTK_P1_RESIZE_QUALITY_STATUS_IGNORE = 2,
MTK_P1_RESIZE_QUALITY_STATUS_REJECT = 3,
MTK_P1_RESIZE_QUALITY_STATUS_ILLEGAL = 4,
} mtk_platform_metadata_enum_p1_resize_quality_status_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_level {
MTK_P1_RESIZE_QUALITY_LEVEL_UNKNOWN = 0,
MTK_P1_RESIZE_QUALITY_LEVEL_L = 1,
MTK_P1_RESIZE_QUALITY_LEVEL_H = 2,
} mtk_platform_metadata_enum_p1_resize_quality_level_t;
typedef enum mtk_platform_metadata_enum_lmv_result {
MTK_LMV_RESULT_OK = 0,
MTK_LMV_RESULT_FAILED,
MTK_LMV_RESULT_SWITCHING
} mtk_platform_metadata_enum_lmv_result_t;
typedef enum mtk_platform_metadata_enum_featurepipe_app_mode {
MTK_FEATUREPIPE_PHOTO_PREVIEW = 0,
MTK_FEATUREPIPE_VIDEO_PREVIEW = 1,
MTK_FEATUREPIPE_VIDEO_RECORD = 2,
MTK_FEATUREPIPE_VIDEO_STOP = 3,
} mtk_platform_metadata_enum_featurepipe_app_mode_t;
typedef enum mtk_platform_metadata_enum_dcmf_feature_mode {
MTK_DCMF_FEATURE_BOKEH = 0,
MTK_DCMF_FEATURE_MFNR_BOKEH = 1,
MTK_DCMF_FEATURE_HDR_BOKEH = 2,
} mtk_platform_metadata_enum_dcmf_feature_mode_t;
typedef enum mtk_platform_metadata_enum_smvr_fps {
MTK_SMVR_FPS_30 = 0,
MTK_SMVR_FPS_120 = 1,
MTK_SMVR_FPS_240 = 2,
MTK_SMVR_FPS_480 = 3,
MTK_SMVR_FPS_960 = 4,
} mtk_platform_metadata_enum_smvr_fps_t;
//MTK_FRAMESYNC_FAILHANDLE
typedef enum mtk_platform_metadata_enum_fremesync_failhandle {
MTK_FRAMESYNC_FAILHANDLE_CONTINUE,
MTK_FRAMESYNC_FAILHANDLE_DROP,
} mtk_platform_metadata_enum_fremesync_failhandle_t;
//MTK_FRAMESYNC_RESULT
typedef enum mtk_platform_metadata_enum_fremesync_result {
MTK_FRAMESYNC_RESULT_PASS,
MTK_FRAMESYNC_RESULT_FAIL_CONTINUE,
MTK_FRAMESYNC_RESULT_FAIL_DROP,
} mtk_platform_metadata_enum_fremesync_result_t;
//MTK_FRAMESYNC_MODE
typedef enum mtk_platform_metadata_enum_fremesync_mode {
MTK_FRAMESYNC_MODE_VSYNC_ALIGNMENT,
MTK_FRAMESYNC_MODE_READOUT_CENTER_ALIGNMENT,
} mtk_platform_metadata_enum_fremesync_mode_t;
//MTK_FEATURE_MULTIFRAMENODE_BYPASSED
typedef enum mtk_platform_metadata_enum_multiframenode_bypassed {
MTK_FEATURE_MULTIFRAMENODE_NOT_BYPASSED = 0,
MTK_FEATURE_MULTIFRAMENODE_TO_BE_BYPASSED = 1
} mtk_platform_metadata_enum_mfllnode_bypassed_t;
//MTK_FEATURE_BSS_PROCESS
typedef enum mtk_platform_metadata_enum_bss_processing {
MTK_FEATURE_BSS_PROCESS_ENABLE = 0,
MTK_FEATURE_BSS_PROCESS_DISABLE = 1
} mtk_platform_metadata_enum_bss_processing_t;
//MTK_FEATURE_BSS_MANUAL_ORDER
typedef enum mtk_platform_metadata_enum_bss_manual_order {
MTK_FEATURE_BSS_MANUAL_ORDER_OFF = 0,
MTK_FEATURE_BSS_MANUAL_ORDER_GOLDEN = 1
} mtk_platform_metadata_enum_bss_manual_order_t;
//MTK_FEATURE_CAP_YUV_PROCESSING
typedef enum mtk_platform_metadata_enum_cap_yuv_processing {
MTK_FEATURE_CAP_YUV_PROCESSING_NOT_NEEDED = 0,
MTK_FEATURE_CAP_YUV_PROCESSING_NEEDED = 1
} mtk_platform_metadata_enum_cap_yuv_processing_t;
//MTK_FEATURE_CAP_PIPE_DCE_CONTROL
typedef enum mtk_platform_metadata_enum_cap_pipe_control {
MTK_FEATURE_CAP_PIPE_DCE_ENABLE_BUT_NOT_APPLY = 2,
MTK_FEATURE_CAP_PIPE_DCE_MANUAL_DISABLE = 1,
MTK_FEATURE_CAP_PIPE_DCE_DEFAULT_APPLY = 0
} mtk_platform_metadata_enum_cap_pipe_dce_control_t;
// MTK_FEATURE_AINR_MDLA_MODE, MTK_ISP_AINR_MDLA_MODE
typedef enum mtk_platform_metadata_enum_ainr_mdla_mode {
MTK_FEATURE_AINR_MDLA_MODE_NONE = 0,
MTK_FEATURE_AINR_MDLA_MODE_DRCOUT_16BIT = 1,
MTK_FEATURE_AINR_MDLA_MODE_NNOUT_12BIT = 2,
MTK_FEATURE_AINR_MDLA_MODE_NNOUT_16BIT = 3,
} mtk_platform_metadata_enum_ainr_mdla_mode_t;
//MTK_ISP_P2_PROCESSED_RAW
typedef enum mtk_platform_metadata_enum_p2_processed_raw {
MTK_ISP_P2_PROCESSED_RAW_NOT_NEEDED = 0,
MTK_ISP_P2_PROCESSED_RAW_NEEDED = 1
} mtk_platform_metadata_enum_p2_processed_raw_t;
//MTK_DUALZOOM_STREAMING_NR
typedef enum mtk_platform_metadata_enum_dualzoom_streaming_nr {
MTK_DUALZOOM_STREAMING_NR_AUTO = 0,
MTK_DUALZOOM_STREAMING_NR_OFF = 1
} mtk_platform_metadata_enum_dualzoom_streaming_nr_t;
//MTK_STAGGER_BLOB_IMGO_ORDER
typedef enum mtk_platform_metadata_enum_stagger_blob_imgo_order {
MTK_STAGGER_IMGO_NONE = 0,
MTK_STAGGER_IMGO_NE = 1,
MTK_STAGGER_IMGO_ME = 2,
MTK_STAGGER_IMGO_SE = 3
} mtk_platform_metadata_enum_stagger_blob_imgo_order_t;
//MTK_3A_EXIF_FLASH_FIRING_STATUS
typedef enum mtk_platform_metadata_enum_3a_exif_flash_firing_status_t {
MTK_3A_EXIF_FLASH_FIRING_STATUS_NOT_FIRED = 0,
MTK_3A_EXIF_FLASH_FIRING_STATUS_FIRED = 1,
} mtk_platform_metadata_enum_3a_exif_flash_firing_status_t;
//MTK_3A_EXIF_FLASH_RETURN_DETECTION
typedef enum mtk_platform_metadata_enum_3a_exif_flash_return_detection_t {
MTK_3A_EXIF_FLASH_RETURN_DETECTION_NOT_SUPPORT = 0,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_RESERVED = 1,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_NOT_DETECTED = 2,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_DETECTED = 3,
} mtk_platform_metadata_enum_3a_exif_flash_return_detection_t;
//MTK_3A_EXIF_FLASH_MODE
typedef enum mtk_platform_metadata_enum_3a_exif_flash_mode_t {
MTK_3A_EXIF_FLASH_MODE_UNKNOWN = 0,
MTK_3A_EXIF_FLASH_MODE_COMPULSORY_FIRING = 1,
MTK_3A_EXIF_FLASH_MODE_COMPULSORY_SUPPRESSION = 2,
MTK_3A_EXIF_FLASH_MODE_AUTO = 3,
} mtk_platform_metadata_enum_3a_exif_flash_mode_t;
//MTK_3A_EXIF_FLASH_FUNCTION
typedef enum mtk_platform_metadata_enum_3a_exif_flash_function_t {
MTK_3A_EXIF_FLASH_FUNCTION_SUPPORT = 0,
MTK_3A_EXIF_FLASH_FUNCTION_NOT_SUPPORT = 1,
} mtk_platform_metadata_enum_3a_exif_flash_function_t;
//MTK_3A_EXIF_FLASH_REDEYE
typedef enum mtk_platform_metadata_enum_3a_exif_flash_redeye_t {
MTK_3A_EXIF_FLASH_REDEYE_NOT_SUPPORT = 0,
MTK_3A_EXIF_FLASH_REDEYE_SUPPORT = 1,
} mtk_platform_metadata_enum_3a_exif_flash_redeye_t;
//MTK_FEATURE_ABF
typedef enum mtk_platform_metadata_enum_abf_mode {
MTK_ABF_MODE_OFF = 0,
MTK_ABF_MODE_ON,
} mtk_platform_metadata_enum_abf_mode_t;
#endif

File diff suppressed because it is too large Load Diff

@ -23,7 +23,6 @@
#include <opencv2/core/core.hpp>
#include "Camera2Helper.h"
#include <mutex>
#include <map>
#include <set>
/**
@ -40,9 +39,6 @@ static const uint64_t kMaxExposureTime = static_cast<uint64_t>(250000000);
#define WAIT_AF_LOCKED 4
#define PREVIEW_REQUEST_IDX 0
#define CAPTURE_REQUEST_IDX 1
#define DEFAULT_WARMUP_TIME 250 // 250ms
class CameraManager
{
@ -85,11 +81,8 @@ public:
unsigned int orientation:3;
unsigned int zoom : 1;
unsigned int wait3ALocked : 3;
unsigned int burstRawCapture : 3;
unsigned int customHdr : 1;
unsigned int hdrStep : 3;
unsigned int minFps : 4;
unsigned int reserved : 7;
unsigned int burstRawCapture : 2;
unsigned int reserved : 16;
int64_t exposureTime;
unsigned int sensitivity;
int compensation;
@ -165,12 +158,11 @@ public:
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height);
void CreateSession(ANativeWindow* previewWindow);
CaptureRequest* CreateRequest(bool isPreviewRequest, int32_t sensitivity = -1);
void DestroyRequest(CaptureRequest* request);
CaptureRequest* CreateRequest(bool isPreviewRequest);
void DestroySession();
virtual bool on_image(cv::Mat rgb);
virtual bool on_image(cv::Mat& rgb);
virtual void on_error(const std::string& msg);
virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height);
virtual void onDisconnected(ACameraDevice* device);
@ -188,7 +180,6 @@ public:
void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult);
void FireBurstCapture();
void FireOneCapture(uint64_t ts);
uint32_t GetLdr() const
{
@ -201,17 +192,10 @@ public:
}
bool IsCameraAvailable(const std::string& cameraId);
int64_t GetTimestamp(const ACameraMetadata* result);
static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height);
static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult);
protected:
void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity);
void Setup3DNR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity);
void SetupHDR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity);
bool SetupTonemapCurve(ACameraMetadata* characteristics, ACaptureRequest* request);
protected:
std::mutex m_locker;
std::set<std::string> m_availableCameras;
@ -248,12 +232,9 @@ protected:
int32_t maxRegions[3];
bool mCaptureTriggered;
bool mFocusTriggered;
bool mCaptureDispatched;
uint32_t mStableFrameCount;
CAPTURE_RESULT mResult;
uint64_t m_startTime;
unsigned long long m_startTime;
protected:
@ -274,24 +255,32 @@ protected:
ACameraOutputTarget* mOutputTarget;
ACaptureSessionOutput* mSessionOutput;
AImageReader* mImageReader2;
ANativeWindow* mImageWindow2;
ACameraOutputTarget* mOutputTarget2;
ACaptureSessionOutput* mSessionOutput2;
std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests;
ACameraCaptureSession* capture_session;
std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
std::map<int64_t, std::shared_ptr<ACameraMetadata> > mCaptureResultMap;
uint32_t mLdr;
uint32_t mFinalLdr;
uint32_t mFinalBurstCaptures;
int32_t mFinalOutputFormat;
std::vector<std::shared_ptr<AImage> > mCaptureFrames;
// cv::Mat mOneFrame;
std::vector<std::pair<int64_t, cv::Mat> > mOneFrame;
cv::Mat mOneFrame;
std::vector<std::vector<uint8_t> > mRawFrames;
int64_t m_minTimestamp;
ACameraCaptureSession* capture_session;
// AImageReader* image_reader;
// ANativeWindow* image_reader_surface;
// ACameraOutputTarget* image_reader_target;
// ACaptureRequest* capture_request;
// ACaptureSessionOutput* capture_session_output;
};

@ -50,12 +50,9 @@ public:
Halide::Runtime::Buffer<uint16_t> imgs = burst.ToBuffer();
if (imgs.dimensions() != 3 || imgs.extent(2) < 2) {
return output_img;
#if 0
throw std::invalid_argument(
"The input of HDRPlus must be a 3-dimensional buffer with at least "
"two channels.");
#endif
}
const int cfa_pattern = static_cast<int>(burst.GetCfaPattern());

@ -136,9 +136,7 @@ CfaPattern RawImage::GetCfaPattern() const {
} else if (cfa_pattern == std::string{1, 2, 0, 1}) {
return CfaPattern::CFA_GBRG;
}
#if 0
throw std::invalid_argument("Unsupported CFA pattern: " + cfa_pattern);
#endif
return CfaPattern::CFA_UNKNOWN;
}

@ -1,428 +0,0 @@
//
// Created by Matthew on 2025/3/1.
//
#include "RTSPRecorder.h"
#include <chrono>
#include <thread>
#include <android/log.h>
#include <errno.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
#define LOG_TAG "libcurl"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#include <libavutil/log.h>
#include <android/log.h>
void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl) {
// Map FFmpeg log levels to Android log levels
int android_log_level;
switch (level) {
case AV_LOG_PANIC:
case AV_LOG_FATAL:
android_log_level = ANDROID_LOG_FATAL;
break;
case AV_LOG_ERROR:
android_log_level = ANDROID_LOG_ERROR;
break;
case AV_LOG_WARNING:
android_log_level = ANDROID_LOG_WARN;
break;
case AV_LOG_INFO:
android_log_level = ANDROID_LOG_INFO;
break;
case AV_LOG_VERBOSE:
android_log_level = ANDROID_LOG_VERBOSE;
break;
case AV_LOG_DEBUG:
case AV_LOG_TRACE:
android_log_level = ANDROID_LOG_DEBUG;
break;
default:
android_log_level = ANDROID_LOG_INFO;
break;
}
// Format the log message
char log_message[1024];
vsnprintf(log_message, sizeof(log_message), fmt, vl);
// Send the log message to logcat
__android_log_print(android_log_level, "FFmpeg", "%s", log_message);
}
int setup_output_streams(AVFormatContext *input_ctx, AVFormatContext *output_ctx) {
// Copy streams and fix time_base
for (unsigned int i = 0; i < input_ctx->nb_streams; i++) {
AVStream *in_stream = input_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(output_ctx, NULL);
if (!out_stream) {
return AVERROR_UNKNOWN;
}
// Copy codec parameters
int ret = avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
if (ret < 0) {
return ret;
}
// Fix time base
out_stream->time_base = in_stream->time_base;
// Clear any existing flags
out_stream->codecpar->codec_tag = 0;
}
return 0;
}
int write_mp4_header(AVFormatContext *output_ctx) {
AVDictionary *opts = NULL;
// MP4 specific options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write header
int ret = avformat_write_header(output_ctx, &opts);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Header write failed: %s (code: %d)\n", errbuf, ret);
}
av_dict_free(&opts);
return ret;
}
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
// Open input RTMP stream
if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) {
fprintf(stderr, "Could not open input file '%s'\n", rtmpUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
fprintf(stderr, "Could not find stream information\n");
avformat_close_input(&inputFormatContext);
return;
}
// Open output MP4 file
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
fprintf(stderr, "Could not create output context\n");
avformat_close_input(&inputFormatContext);
return;
}
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
fprintf(stderr, "Failed to allocate output stream\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
fprintf(stderr, "Failed to copy codec parameters\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
outStream->codecpar->codec_tag = 0;
}
// Open output file
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) {
fprintf(stderr, "Could not open output file '%s'\n", outputPath);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
}
// Write output file header
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error occurred when writing header to output file\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
// Read packets from input and write them to output
while (av_read_frame(inputFormatContext, &packet) >= 0) {
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base);
packet.pos = -1;
if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
fprintf(stderr, "Error muxing packet\n");
break;
}
av_packet_unref(&packet);
}
stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);
// Clean up
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&outputFormatContext->pb);
}
avformat_free_context(outputFormatContext);
}
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
#ifndef NDEBUG
// Set the custom log callback
av_log_set_callback(ffmpeg_log_callback);
av_log_set_level(AV_LOG_WARNING);
#endif
std::string url = rtspUrl;
AVDictionary* options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
av_dict_set(&options, "stimeout", "5000000", 0);
if (!userName.empty())
{
av_dict_set(&options, "username", userName.c_str(), 0); // Replace with actual username
av_dict_set(&options, "password", password.c_str(), 0); // Replace with actual password
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", userName.c_str(), password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
// Open input RTSP stream
int res = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &options);
av_dict_free(&options);
if (res != 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Could not open input: %s (error code: %d)\n", errbuf, res);
// fprintf(stderr, "Could not open input file '%s'\n", rtspUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
// fprintf(stderr, "Could not find stream information\n");
avformat_close_input(&inputFormatContext);
return;
}
// Open output MP4 file
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
fprintf(stderr, "Could not create output context\n");
avformat_close_input(&inputFormatContext);
return;
}
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
const AVCodecParameters *in_codecpar = inStream->codecpar;
// Skip audio streams
if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
continue;
}
if (in_codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
// Copy video stream as-is
const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id);
AVStream *out_stream = avformat_new_stream(outputFormatContext, codec);
if (!out_stream) {
return;
}
avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
out_stream->codecpar->codec_tag = 0;
out_stream->time_base = (AVRational){1, 90000};
out_stream->avg_frame_rate = inStream->avg_frame_rate;
}
else if (in_codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
// Setup AAC audio stream
const AVCodec *aac_encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!aac_encoder) {
fprintf(stderr, "AAC encoder not found\n");
return;
}
AVStream *out_stream = avformat_new_stream(outputFormatContext, aac_encoder);
if (!out_stream) {
return;
}
// Set AAC parameters
out_stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
out_stream->codecpar->codec_id = AV_CODEC_ID_AAC;
out_stream->codecpar->sample_rate = in_codecpar->sample_rate;
out_stream->codecpar->format = AV_SAMPLE_FMT_FLTP;
out_stream->codecpar->channels = in_codecpar->channels;
out_stream->codecpar->channel_layout = av_get_default_channel_layout(in_codecpar->channels);
out_stream->codecpar->bit_rate = 128000;
out_stream->codecpar->frame_size = 1024; // AAC frame size
out_stream->time_base = (AVRational){1, in_codecpar->sample_rate};
}
}
// Open output file
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) {
fprintf(stderr, "Could not open output file '%s'\n", outputPath);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
}
AVDictionary *opts = NULL;
// Set output format options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write output file header
res = avformat_write_header(outputFormatContext, &opts);
av_dict_free(&opts);
if (res < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Error occurred when writing header to output file: %s (error code: %d)\n", errbuf, res);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
#if 0
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
#endif
uint32_t framesToSkip = 16;
uint32_t framesSkipped = 0;
// Skip initial frames
while (framesSkipped < framesToSkip) {
if (av_read_frame(inputFormatContext, &packet) < 0)
break;
if (packet.stream_index == 0) { // Video stream
framesSkipped++;
}
av_packet_unref(&packet);
}
auto startTime = av_gettime();
// int64_t durationNs = (int64_t)duration * 1000000;
int64_t durationNs = (int64_t)(duration + 32) * 1000;
// Read packets from input and write them to output
while (1) {
if ((av_gettime() - startTime) >= durationNs) {
// printf("Duration limit reached (%d seconds)\n", ctx->duration_secs);
break;
}
#if 0
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base);
packet.pos = -1;
if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
fprintf(stderr, "Error muxing packet\n");
break;
}
#endif
if (av_read_frame(inputFormatContext, &packet) < 0) break;
// Skip audio packets
if (inputFormatContext->streams[packet.stream_index]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
av_packet_unref(&packet);
continue;
}
// Adjust packet timebase
AVStream *in_stream = inputFormatContext->streams[packet.stream_index];
AVStream *out_stream = outputFormatContext->streams[packet.stream_index];
av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base);
packet.pos = -1;
res = av_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (res < 0)
{
break;
}
}
// stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);
// Clean up
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&outputFormatContext->pb);
}
avformat_free_context(outputFormatContext);
}

@ -1,20 +0,0 @@
//
// Created by Matthew on 2025/3/1.
//
#ifndef MICROPHOTO_RTSPRECORDER_H
#define MICROPHOTO_RTSPRECORDER_H
#include <string>
#include <android/multinetwork.h>
// void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs);
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle);
class RTSPRecorder {
};
#endif //MICROPHOTO_RTSPRECORDER_H

@ -1,186 +0,0 @@
//
// Created by Matthew on 2025/2/28.
//
#include "RTSPToMP4.h"
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <cstring>
#include <limits>
int32_t getMaxInputSize(AMediaExtractor* extractor, size_t trackIndex)
{
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, trackIndex);
int32_t maxInputSize = 0;
if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, &maxInputSize)) {
// LOGI("Max input size for track %zu: %d", trackIndex, maxInputSize);
} else {
// LOGE("Failed to get max input size for track %zu", trackIndex);
}
AMediaFormat_delete(format);
return maxInputSize;
}
RTSPToMP4::RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs/* = 0*/)
: fd(-1), codec(nullptr), extractor(nullptr), muxer(nullptr), videoTrackIndex(-1), durationInMs(durationInMs), running(false) {
initExtractor(rtspUrl);
initCodec("video/avc");
initMuxer(outputPath);
}
RTSPToMP4::~RTSPToMP4() {
if (codec) AMediaCodec_delete(codec);
if (extractor) AMediaExtractor_delete(extractor);
if (muxer) AMediaMuxer_delete(muxer);
if (fd != -1)
{
fdatasync(fd);
close(fd);
fd = -1;
}
}
void RTSPToMP4::initCodec(const char* mime) {
codec = AMediaCodec_createDecoderByType(mime);
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mime);
// Set other format parameters as needed
// ...
AMediaCodec_configure(codec, format, nullptr, nullptr, 0);
AMediaFormat_delete(format);
}
void RTSPToMP4::initExtractor(const char* rtspUrl) {
extractor = AMediaExtractor_new();
media_status_t status = AMediaExtractor_setDataSource(extractor, rtspUrl);
if (status != AMEDIA_OK) {
// Handle error
// ...
}
}
void RTSPToMP4::initMuxer(const char* outputPath) {
fd = open(outputPath, O_CREAT | O_WRONLY, 0644);
muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
int numTracks = AMediaExtractor_getTrackCount(extractor);
if (numTracks <= 0) {
// LOGE("No tracks found in RTSP stream");
AMediaExtractor_delete(extractor);
return;
}
for (int i = 0; i < numTracks; ++i) {
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, i);
const char* mime;
if (AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime) && strncmp(mime, "video/", 6) == 0) {
videoTrackIndex = AMediaMuxer_addTrack(muxer, format);
AMediaExtractor_selectTrack(extractor, i);
}
AMediaFormat_delete(format);
}
if (videoTrackIndex == -1) {
// LOGE("No video track found in RTSP stream");
AMediaExtractor_delete(extractor);
AMediaMuxer_delete(muxer);
return;
}
int32_t maxInputSize = getMaxInputSize(extractor, videoTrackIndex);
if (maxInputSize <= 0) {
// LOGE("Invalid max input size");
// releaseMediaExtractor(extractor);
sampleData.resize(1920 * 1080 * 4, 0);
return;
}
sampleData.resize(maxInputSize, 0);
}
void RTSPToMP4::startDecodingAndMuxing() {
AMediaCodec_start(codec);
size_t bufferSize = sampleData.size();
uint8_t* buffer = &sampleData[0];
int64_t sampleTime = 0;
int64_t startTime = 0;
bool firstSampleData = true;
int64_t durationTime = (durationInMs == 0) ? std::numeric_limits<int64_t>::max() : (int64_t)durationInMs * 1000;
while (running) {
// Extract data from RTSP stream
ssize_t sampleSize = AMediaExtractor_readSampleData(extractor, buffer, bufferSize);
if (sampleSize < 0) {
break; // End of stream
}
sampleTime = AMediaExtractor_getSampleTime(extractor);
if (firstSampleData)
{
startTime = sampleTime;
firstSampleData = false;
}
sampleTime -= startTime;
// Feed data to codec
size_t inputBufferIndex;
uint8_t* inputBuffer = AMediaCodec_getInputBuffer(codec, inputBufferIndex, &bufferSize);
memcpy(inputBuffer, buffer, sampleSize);
AMediaCodec_queueInputBuffer(codec, inputBufferIndex, 0, sampleSize, sampleTime, 0);
// Retrieve decoded frames and write to muxer
AMediaCodecBufferInfo bufferInfo;
ssize_t outputBufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &bufferInfo, 0);
if (outputBufferIndex >= 0) {
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.presentationTimeUs = sampleTime;
bufferInfo.flags = AMediaExtractor_getSampleFlags(extractor);
uint8_t* outputBuffer = AMediaCodec_getOutputBuffer(codec, outputBufferIndex, &bufferSize);
AMediaMuxer_writeSampleData(muxer, videoTrackIndex, outputBuffer, &bufferInfo);
AMediaCodec_releaseOutputBuffer(codec, outputBufferIndex, false);
}
AMediaExtractor_advance(extractor);
if (sampleTime > durationTime)
{
break;
}
}
AMediaCodec_stop(codec);
AMediaMuxer_stop(muxer);
if (fd != -1)
{
fdatasync(fd);
close(fd);
fd = -1;
}
}
void RTSPToMP4::start() {
// Add video track to muxer
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, 0);
videoTrackIndex = AMediaMuxer_addTrack(muxer, format);
running = true;
AMediaMuxer_start(muxer);
startDecodingAndMuxing();
}
void RTSPToMP4::stop() {
running = false;
}

@ -1,38 +0,0 @@
//
// Created by Matthew on 2025/2/28.
//
#ifndef MICROPHOTO_RTSPTOMP4_H
#define MICROPHOTO_RTSPTOMP4_H
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaExtractor.h>
#include <media/NdkMediaMuxer.h>
#include <vector>
class RTSPToMP4 {
public:
RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs = 0);
~RTSPToMP4();
void start();
void stop();
private:
void initCodec(const char* mime);
void initExtractor(const char* rtspUrl);
void initMuxer(const char* outputPath);
void startDecodingAndMuxing();
int fd;
AMediaCodec* codec;
AMediaExtractor* extractor;
AMediaMuxer* muxer;
int videoTrackIndex;
uint64_t durationInMs;
bool running;
std::vector<uint8_t> sampleData;
};
#endif //MICROPHOTO_RTSPTOMP4_H

@ -1,547 +0,0 @@
//
// Created by Matthew on 2025/3/11.
//
#include "Streaming.h"
#include <iostream>
#include <string>
#include <thread>
#include <atomic>
#include <android/api-level.h>
#include <android/log.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
extern void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl);
#if 0
StreamForwarder::~StreamForwarder() {
stop();
if (inputCtx) {
avformat_close_input(&inputCtx);
}
if (outputCtx) {
if (outputCtx->pb) {
avio_closep(&outputCtx->pb);
}
avformat_free_context(outputCtx);
}
}
bool StreamForwarder::initialize(const std::string& inputUrl, const std::string& outputUrl) {
if (!openInput(inputUrl)) {
return false;
}
if (!openOutput(outputUrl)) {
return false;
}
return true;
}
bool StreamForwarder::openInput(const std::string& inputUrl) {
inputCtx = avformat_alloc_context();
if (!inputCtx) {
return false;
}
if (avformat_open_input(&inputCtx, inputUrl.c_str(), nullptr, nullptr) < 0) {
return false;
}
if (avformat_find_stream_info(inputCtx, nullptr) < 0) {
return false;
}
return true;
}
bool StreamForwarder::openOutput(const std::string& outputUrl) {
int ret = avformat_alloc_output_context2(&outputCtx, nullptr, "flv", outputUrl.c_str());
if (ret < 0) {
return false;
}
// Copy streams from input to output
for (unsigned int i = 0; i < inputCtx->nb_streams; i++) {
AVStream* inStream = inputCtx->streams[i];
AVStream* outStream = avformat_new_stream(outputCtx, inStream->codec->codec);
if (!outStream) {
return false;
}
ret = avcodec_copy_context(outStream->codec, inStream->codec);
if (ret < 0) {
return false;
}
}
// Open output file
if (!(outputCtx->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&outputCtx->pb, outputUrl.c_str(), AVIO_FLAG_WRITE);
if (ret < 0) {
return false;
}
}
// Write header
ret = avformat_write_header(outputCtx, nullptr);
if (ret < 0) {
return false;
}
return true;
}
void StreamForwarder::setFrameCallback(std::function<void(uint8_t*, int, int, int)> callback) {
frameCallback = callback;
}
void StreamForwarder::start() {
isRunning = true;
forwardPackets();
}
void StreamForwarder::stop() {
isRunning = false;
}
void StreamForwarder::forwardPackets() {
AVPacket packet;
AVFrame* frame = av_frame_alloc();
while (isRunning) {
if (av_read_frame(inputCtx, &packet) < 0) {
break;
}
// Process video frames if callback is set
if (frameCallback && packet.stream_index == 0) { // Assuming video is stream 0
AVCodecContext* codecCtx = inputCtx->streams[packet.stream_index]->codec;
int ret = avcodec_send_packet(codecCtx, &packet);
if (ret < 0) {
continue;
}
while (ret >= 0) {
ret = avcodec_receive_frame(codecCtx, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
} else if (ret < 0) {
goto end;
}
processFrame(frame);
}
}
// Forward packet
av_packet_rescale_ts(&packet,
inputCtx->streams[packet.stream_index]->time_base,
outputCtx->streams[packet.stream_index]->time_base);
int ret = av_interleaved_write_frame(outputCtx, &packet);
if (ret < 0) {
break;
}
av_packet_unref(&packet);
}
end:
av_frame_free(&frame);
av_write_trailer(outputCtx);
}
void StreamForwarder::processFrame(AVFrame* frame) {
if (frameCallback) {
frameCallback(frame->data[0], frame->linesize[0],
frame->width, frame->height);
}
}
#endif
RtspForwarder::RtspForwarder(const std::string& input, const std::string& output)
: inputUrl(input), outputUrl(output), isRunning(false)
{
}
bool RtspForwarder::isStreaming() const
{
return isRunning;
}
bool RtspForwarder::start()
{
run();
return true;
}
bool RtspForwarder::stop()
{
isRunning = false;
return true;
}
int RtspForwarder::run()
{
#ifndef NDEBUG
// Set the custom log callback
av_log_set_callback(ffmpeg_log_callback);
av_log_set_level(AV_LOG_DEBUG);
#endif
isRunning = true;
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
int ret;
int videoStreamIndex = -1;
int64_t startTime = AV_NOPTS_VALUE;
AVBSFContext* bsf_ctx = nullptr;
std::string url = inputUrl;
if (!m_userName.empty())
{
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", m_userName.c_str(), m_password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
// Input options
AVDictionary* inputOptions = nullptr;
av_dict_set(&inputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&inputOptions, "stimeout", "5000000", 0); // 5 second timeout
// av_dict_set(&inputOptions, "buffer_size", "1024000", 0); // 1MB buffer
std::cout << "Opening input: " << url << std::endl;
// Open input
ret = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &inputOptions);
av_dict_free(&inputOptions);
if (ret < 0) {
std::cerr << "Could not open input: " << av_err2str(ret) << std::endl;
return ret;
}
// Get stream info
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) {
// std::cerr << "Failed to get stream info: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
return ret;
}
// Find video stream
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
if (inputFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex == -1) {
// std::cerr << "No video stream found" << std::endl;
avformat_close_input(&inputFormatContext);
return -1;
}
// Create stream mapping
std::vector<int> streamMapping(inputFormatContext->nb_streams, -1);
int outputStreamIdx = 0;
// Allocate output context
ret = avformat_alloc_output_context2(&outputFormatContext, nullptr, "rtsp", outputUrl.c_str());
if ((ret < 0) || !outputFormatContext) {
std::cerr << "Could not create output context" << std::endl;
avformat_close_input(&inputFormatContext);
return false;
}
// FIXED VERSION - remove the redundant stream creation
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
const AVCodecParameters *in_codecpar = inStream->codecpar;
// Skip non-video streams if needed
if (in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO) {
streamMapping[i] = -1;
continue;
}
// Create only ONE stream per input stream
const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id);
AVStream *outStream = avformat_new_stream(outputFormatContext, codec);
if (!outStream) {
return false;
}
ret = avcodec_parameters_copy(outStream->codecpar, in_codecpar);
outStream->codecpar->codec_tag = 0;
outStream->time_base = (AVRational){1, 90000};
outStream->avg_frame_rate = inStream->avg_frame_rate;
// Map input stream to output stream
streamMapping[i] = outputStreamIdx++;
}
const AVBitStreamFilter* filter = av_bsf_get_by_name("h264_mp4toannexb");
if (filter)
{
for (unsigned i = 0; i < outputFormatContext->nb_streams; i++) {
AVStream* stream = outputFormatContext->streams[i];
if (stream->codecpar->codec_id == AV_CODEC_ID_H264) {
ret = av_bsf_alloc(filter, &bsf_ctx);
if (ret < 0) {
std::cerr << "Failed to allocate bitstream filter context: " << av_err2str(ret) << std::endl;
return false;
}
// Copy parameters from input to bsf
ret = avcodec_parameters_copy(bsf_ctx->par_in, stream->codecpar);
if (ret < 0) {
std::cerr << "Failed to copy parameters to bsf: " << av_err2str(ret) << std::endl;
return false;
}
// Initialize the bsf context
ret = av_bsf_init(bsf_ctx);
if (ret < 0) {
std::cerr << "Failed to initialize bitstream filter: " << av_err2str(ret) << std::endl;
return false;
}
// Update output parameters
ret = avcodec_parameters_copy(stream->codecpar, bsf_ctx->par_out);
if (ret < 0) {
std::cerr << "Failed to copy parameters from bsf: " << av_err2str(ret) << std::endl;
return false;
}
break; // Only apply to the first H.264 stream
}
}
}
AVDictionary* outputOptions = nullptr;
av_dict_set(&outputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&outputOptions, "rtsp_flags", "filter_src", 0);
av_dict_set(&outputOptions, "timeout", "5000000", 0);
av_dict_set(&outputOptions, "allowed_media_types", "video", 0);
av_dict_set(&outputOptions, "buffer_size", "1024000", 0); // 1MB buffer
av_dict_set(&outputOptions, "fflags", "nobuffer", 0); // Reduce latency
av_dict_set(&outputOptions, "muxdelay", "0.1", 0); // Reduce delay
av_dict_set(&outputOptions, "max_delay", "500000", 0);
av_dict_set(&outputOptions, "preset", "ultrafast", 0);
av_dict_set(&outputOptions, "tune", "zerolatency", 0);
av_dict_set(&outputOptions, "rtsp_flags", "prefer_tcp", 0);
// Open output
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
// Output options
// ret = avio_open(&outputFormatContext->pb, outputUrl.c_str(), AVIO_FLAG_WRITE);
ret = avio_open2(&outputFormatContext->pb, outputFormatContext->url, AVIO_FLAG_WRITE, NULL, &outputOptions);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Could not open output URL: " << errbuf << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
av_dict_free(&outputOptions);
return ret;
}
}
// Write header
ret = avformat_write_header(outputFormatContext, &outputOptions);
av_dict_free(&outputOptions);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing header: " << errbuf << std::endl;
avformat_close_input(&inputFormatContext);
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return ret;
}
// Main loop - read and write packets
AVPacket packet;
AVMediaType medaiType;
while (isRunning) {
ret = av_read_frame(inputFormatContext, &packet);
if (ret < 0) {
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
std::cerr << "End of stream or timeout, reconnecting in "
<< reconnectDelayMs << "ms" << std::endl;
std::this_thread::sleep_for(std::chrono::milliseconds(reconnectDelayMs));
avformat_close_input(&inputFormatContext);
ret = avformat_open_input(&inputFormatContext, inputUrl.c_str(), nullptr, &inputOptions);
if (ret < 0) continue;
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) continue;
continue;
}
break;
}
// Later when writing packets:
int original_stream_index = packet.stream_index;
if (streamMapping[original_stream_index] >= 0) {
packet.stream_index = streamMapping[original_stream_index];
// Write packet...
} else {
// Skip this packet
av_packet_unref(&packet);
continue;
}
// Skip audio packets
medaiType = inputFormatContext->streams[original_stream_index]->codecpar->codec_type;
if (medaiType == AVMEDIA_TYPE_AUDIO || medaiType == AVMEDIA_TYPE_DATA)
{
av_packet_unref(&packet);
continue;
}
#if 0
// Fix timestamps if enabled
if (fixTimestamps) {
// Handle timestamp issues similar to FFmpeg warning
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
if (packet.pts == AV_NOPTS_VALUE) {
// Generate PTS if missing
if (startTime == AV_NOPTS_VALUE) {
startTime = av_gettime();
}
packet.pts = av_rescale_q(av_gettime() - startTime,
AV_TIME_BASE_Q,
inStream->time_base);
packet.dts = packet.pts;
}
// Rescale timestamps to output timebase
packet.pts = av_rescale_q_rnd(packet.pts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration,
inStream->time_base,
outStream->time_base);
}
// Write packet to output
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
std::cerr << "Error writing frame: " << av_err2str(ret) << std::endl;
break;
}
#endif
AVStream *in_stream = inputFormatContext->streams[original_stream_index];
AVStream *out_stream = outputFormatContext->streams[packet.stream_index];
av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base);
// CRITICAL: Fix timestamp issues
if (packet.dts != AV_NOPTS_VALUE && packet.pts != AV_NOPTS_VALUE && packet.dts > packet.pts) {
packet.dts = packet.pts;
}
// Handle missing timestamps
if (packet.pts == AV_NOPTS_VALUE) {
if (startTime == AV_NOPTS_VALUE) {
startTime = av_gettime();
}
packet.pts = av_rescale_q(av_gettime() - startTime,
AV_TIME_BASE_Q,
out_stream->time_base);
packet.dts = packet.pts;
}
packet.pos = -1;
// Apply bitstream filter if it's H.264
if (bsf_ctx && out_stream->codecpar->codec_id == AV_CODEC_ID_H264) {
ret = av_bsf_send_packet(bsf_ctx, &packet);
if (ret < 0) {
std::cerr << "Error sending packet to bitstream filter: " << av_err2str(ret) << std::endl;
break;
}
while (ret >= 0) {
ret = av_bsf_receive_packet(bsf_ctx, &packet);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
// Need more input or end of file
break;
} else if (ret < 0) {
std::cerr << "Error receiving packet from bitstream filter: " << av_err2str(ret) << std::endl;
break;
}
// Write the filtered packet
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing frame: " << errbuf << std::endl;
break;
}
}
} else {
// Write the packet without filtering
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing frame: " << errbuf << std::endl;
break;
}
}
}
cleanup:
// Free the bitstream filter context
if (bsf_ctx) {
av_bsf_free(&bsf_ctx);
}
// Write trailer
av_write_trailer(outputFormatContext);
// Cleanup
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return ret;
}

@ -1,90 +0,0 @@
//
// Created by Matthew on 2025/3/11.
//
#ifndef MICROPHOTO_STREAMING_H
#define MICROPHOTO_STREAMING_H
#include <string>
#include <memory>
#include <functional>
#include <iostream>
#include <thread>
#include <atomic>
#include <android/multinetwork.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libswscale/swscale.h>
}
class Streaming
{
public:
virtual ~Streaming() {}
virtual bool start() { return false; }
virtual bool stop() { return false; }
virtual bool isStreaming() const { return false; }
void setAuth(const std::string& userName, const std::string& password)
{
m_userName = userName;
m_password = password;
}
protected:
std::string m_userName;
std::string m_password;
};
#if 0
class StreamForwarder : public Streaming
{
private:
AVFormatContext* inputCtx = nullptr;
AVFormatContext* outputCtx = nullptr;
bool isRunning = false;
public:
StreamForwarder() = default;
virtual ~StreamForwarder();
bool initialize(const std::string& inputUrl, const std::string& outputUrl);
virtual void start();
virtual void stop();
private:
bool openInput(const std::string& inputUrl);
bool openOutput(const std::string& outputUrl);
void forwardPackets();
void setFrameCallback(std::function<void(uint8_t*, int, int, int)> callback);
};
#endif
class RtspForwarder : public Streaming {
private:
std::string inputUrl;
std::string outputUrl;
std::atomic<bool> isRunning;
// Options
int reconnectDelayMs = 5000;
bool fixTimestamps = true;
public:
RtspForwarder(const std::string& input, const std::string& output);
virtual bool start();
virtual bool stop();
virtual bool isStreaming() const;
int run();
};
#endif //MICROPHOTO_STREAMING_H

@ -1,330 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "HangYuCtrl.h"
#include "netcamera.h"
#include "httpclient.h"
#include <LogThread.h>
#include <SpecData_JSON.h>
#include <cstring>
#include <algorithm>
HangYuCtrl::~HangYuCtrl()
{
}
bool HangYuCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
//流类型范围1-4,1为主流
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return 0;
}
std::string xmlString(resData.begin(), resData.end());
size_t widthStart = xmlString.find("<ResolutionWidth>");
size_t widthEnd = xmlString.find("</ResolutionWidth>");
if (widthStart != std::string::npos && widthEnd != std::string::npos) {
widthStart += std::string("<ResolutionWidth>").length();
xmlString.replace(widthStart, widthEnd - widthStart, std::to_string(resX));
}
size_t heightStart = xmlString.find("<ResolutionHeigth>");
size_t heightEnd = xmlString.find("</ResolutionHeigth>");
if (heightStart != std::string::npos && heightEnd != std::string::npos) {
heightStart += std::string("<ResolutionHeigth>").length();
xmlString.replace(heightStart, heightEnd - heightStart, std::to_string(resY));
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
return 0;
}
return true;
}
bool HangYuCtrl::SetOsd(uint8_t channel, std::string osdstring, uint8_t pos)
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
//左上OSD
bool hasDateTime = (osdstring.find("$$DATETIME$$") != std::string::npos);
size_t posi = osdstring.find("$$DATETIME$$");
if (posi != std::string::npos) {
size_t endPos = posi + 12;
while (endPos < osdstring.size() && (osdstring[endPos] == ' ' || osdstring[endPos] == '\n')) {
endPos++;
}
osdstring.erase(posi, endPos - posi);
}
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
std::replace(osdstring.begin(), osdstring.end(), '\n', '^');
string xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?><MultiLineOSD><DisplayTime><Enable>" + string(hasDateTime ? "true" : "false") + "</Enable><PosX>8</PosX><PosY>0</PosY></DisplayTime><OSD><ID>1</ID><Enable>false</Enable><Text>"+ osdstring+ "</Text><x>8</x><y>" + string(hasDateTime ? "24" : "0") + "</y></MultiLineOSD>";
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
return res;
}
void HangYuCtrl::EnableOsd(bool enable, uint8_t channel)
{
//航煜 只能显示时间和一个OSD
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return;
}
std::string xmlString(resData.begin(), resData.end());
std::string enableStartTag = "<Enable>";
std::string enableEndTag = "</Enable>";
size_t pos = 0;
while ((pos = xmlString.find(enableStartTag, pos)) != std::string::npos) {
size_t startPos = pos + enableStartTag.length();
size_t endPos = xmlString.find(enableEndTag, startPos);
if (endPos == std::string::npos) {
break;
}
std::string newValue = enable ? "true" : "false";
xmlString.replace(startPos, endPos - startPos, newValue);
pos = endPos + enableEndTag.length();
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
// return;
}
}
std::string HangYuCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = 0;
for (int idx = 0; idx < 10; idx++)
{
res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res == 0 && !resData.empty())
{
break;
}
}
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
const char* start = strstr((const char*)&resData[0], "<RTSPURI>");
if (start == NULL)
{
return "";
}
start += 9;
const char* end = strstr(start, "</RTSPURI>");
if (end == NULL)
{
return "";
}
return std::string(start, end);
}
bool HangYuCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
// <?xml version="1.0" encoding="utf-8"?>
//<Time>
//<DateTimeFormat>
//<!--req,string,YYYYMMDDWhhmmss,YYYYMMDDhhmmss,MMDDYYYYWhhmmss,MMD
// DYYYYhhmmss,DDMMYYYYWhhmmss,DDMMYYYYhhmmss-->
//</DateTimeFormat>
//<TimeFormat><!--req,xs:string,12hour,24hour--></TimeFormat>
//<SystemTime><!--req,xs:datetime,” 20040503T173008+08”--></SystemTime>
//<SyncNTPFlag><!--req,xs:string,"Sync,NoSync"--></SyncNTPFlag>
//</Time>
std::string reqData = "<?xml version=\"1.0\" encoding=\"utf-8\"?><Time><SystemTime>"
+ FormatLocalDateTime("%d%02d%02dT%02d%02d%02d") + "+08</SystemTime></Time>";
std::string url = "http://" + m_ip + "/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool HangYuCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
{
bool res = false;
std::vector<uint8_t> data;
// /Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", m_ip.c_str(), (uint32_t)streamID);
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
if (0 == nRet)
{
bool qualityDowngraded = false;
std::string originalConfig;
if (img.size() < 1000)
{
qualityDowngraded = DowngradeQuality(originalConfig);
XYLOG(XYLOG_SEVERITY_INFO,"Reduce Img Quality");
}
nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
if (!originalConfig.empty())
{
UpdateQuality(originalConfig);
}
std::vector<uint8_t> header = {0xFF, 0xD8, 0xFF, 0xE0}; // JPEG
std::vector<uint8_t>::iterator it = std::search(img.begin(), img.end(), header.begin(), header.end());
if (it != img.end() && it != img.begin())
{
img.erase(img.begin(), it);
#ifndef NDEBUG
int aa = 0;
#endif
}
}
return nRet == 0;
}
bool HangYuCtrl::DowngradeQuality(std::string& originalConfig)
{
bool res = false;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet)
{
std::string str = ByteArrayToString(&data[0], data.size());
originalConfig = str;
if (replaceAll(str, "<Quality>middle</Quality>", "<Quality>low</Quality>") == 0)
{
res = (replaceAll(str, "<Quality>high</Quality>", "<Quality>middle</Quality>") != 0);
}
else
{
res = true;
}
if (!res)
{
return res;
}
data.clear();
if (res)
{
nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, str.c_str(), data);
return 0 == nRet;
}
}
return false;
}
bool HangYuCtrl::UpdateQuality(const std::string& originalConfig)
{
std::vector<uint8_t> data;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
int nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, originalConfig.c_str(), data);
return 0 == nRet;
}
bool HangYuCtrl::UpgradeQuality()
{
bool res = false;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet)
{
std::string str = ByteArrayToString(&data[0], data.size());
if (replaceAll(str, "<Quality>low</Quality>", "<Quality>middle</Quality>") == 0)
{
res = (replaceAll(str, "<Quality>middle</Quality>", "<Quality>high</Quality>") != 0);
}
else
{
res = true;
}
if (!res)
{
return res;
}
data.clear();
if (res)
{
nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, str.c_str(), data);
return 0 == nRet;
}
}
return false;
}
bool HangYuCtrl::QueryQuality(std::string& qualityContents)
{
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet && !data.empty())
{
qualityContents = ByteArrayToString(&data[0], data.size());
}
return (0 == nRet);
}
bool HangYuCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path)
{
return false;
}

@ -1,34 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef __MICROPHOTO_HANGYUCTRL_H__
#define __MICROPHOTO_HANGYUCTRL_H__
#include "VendorCtrl.h"
class HangYuCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~HangYuCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t channel);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool HasAuthOnStreaming() const { return true; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
private:
bool QueryQuality(std::string& qualityContents);
bool DowngradeQuality(std::string& originalConfig);
bool UpdateQuality(const std::string& originalConfig);
bool UpgradeQuality();
};
#endif //__MICROPHOTO_HANGYUCTRL_H__

@ -1,204 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "HikonCtrl.h"
#include "netcamera.h"
#include "httpclient.h"
#include <LogThread.h>
#include <SpecData_JSON.h>
#include <cstring>
#include <algorithm>
HikonCtrl::~HikonCtrl()
{
}
bool HikonCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
//流类型范围1-4,1为主流
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return 0;
}
std::string xmlString(resData.begin(), resData.end());
size_t widthStart = xmlString.find("<ResolutionWidth>");
size_t widthEnd = xmlString.find("</ResolutionWidth>");
if (widthStart != std::string::npos && widthEnd != std::string::npos) {
widthStart += std::string("<ResolutionWidth>").length();
xmlString.replace(widthStart, widthEnd - widthStart, std::to_string(resX));
}
size_t heightStart = xmlString.find("<ResolutionHeigth>");
size_t heightEnd = xmlString.find("</ResolutionHeigth>");
if (heightStart != std::string::npos && heightEnd != std::string::npos) {
heightStart += std::string("<ResolutionHeigth>").length();
xmlString.replace(heightStart, heightEnd - heightStart, std::to_string(resY));
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
return 0;
}
return true;
}
bool HikonCtrl::SetOsd(uint8_t channel, std::string osdstring, uint8_t pos)
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
//左上OSD
bool hasDateTime = (osdstring.find("$$DATETIME$$") != std::string::npos);
size_t posi = osdstring.find("$$DATETIME$$");
if (posi != std::string::npos) {
size_t endPos = posi + 12;
while (endPos < osdstring.size() && (osdstring[endPos] == ' ' || osdstring[endPos] == '\n')) {
endPos++;
}
osdstring.erase(posi, endPos - posi);
}
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
std::replace(osdstring.begin(), osdstring.end(), '\n', '^');
string xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?><MultiLineOSD><DisplayTime><Enable>" + string(hasDateTime ? "true" : "false") + "</Enable><PosX>8</PosX><PosY>0</PosY></DisplayTime><OSD><ID>1</ID><Enable>false</Enable><Text>"+ osdstring+ "</Text><x>8</x><y>" + string(hasDateTime ? "24" : "0") + "</y></MultiLineOSD>";
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
return res;
}
void HikonCtrl::EnableOsd(bool enable, uint8_t channel)
{
//航煜 只能显示时间和一个OSD
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return;
}
std::string xmlString(resData.begin(), resData.end());
std::string enableStartTag = "<Enable>";
std::string enableEndTag = "</Enable>";
size_t pos = 0;
while ((pos = xmlString.find(enableStartTag, pos)) != std::string::npos) {
size_t startPos = pos + enableStartTag.length();
size_t endPos = xmlString.find(enableEndTag, startPos);
if (endPos == std::string::npos) {
break;
}
std::string newValue = enable ? "true" : "false";
xmlString.replace(startPos, endPos - startPos, newValue);
pos = endPos + enableEndTag.length();
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
// return;
}
}
std::string HikonCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = 0;
for (int idx = 0; idx < 10; idx++)
{
res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res == 0 && !resData.empty())
{
break;
}
}
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
const char* start = strstr((const char*)&resData[0], "<RTSPURI>");
if (start == NULL)
{
return "";
}
start += 9;
const char* end = strstr(start, "</RTSPURI>");
if (end == NULL)
{
return "";
}
return std::string(start, end);
}
bool HikonCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
// <?xml version="1.0" encoding="utf-8"?>
//<Time>
//<DateTimeFormat>
//<!--req,string,YYYYMMDDWhhmmss,YYYYMMDDhhmmss,MMDDYYYYWhhmmss,MMD
// DYYYYhhmmss,DDMMYYYYWhhmmss,DDMMYYYYhhmmss-->
//</DateTimeFormat>
//<TimeFormat><!--req,xs:string,12hour,24hour--></TimeFormat>
//<SystemTime><!--req,xs:datetime,” 20040503T173008+08”--></SystemTime>
//<SyncNTPFlag><!--req,xs:string,"Sync,NoSync"--></SyncNTPFlag>
//</Time>
std::string reqData = "<?xml version=\"1.0\" encoding=\"utf-8\"?><Time><SystemTime>"
+ FormatLocalDateTime("%d%02d%02dT%02d%02d%02d") + "+08</SystemTime></Time>";
std::string url = "http://" + m_ip + "/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool HikonCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
{
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/ISAPI/Streaming/channels/1/picture?", m_ip.c_str());
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
return nRet == 0;
}
bool HikonCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path)
{
return false;
}

@ -1,34 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef __MICROPHOTO_HIKONCTRL_H__
#define __MICROPHOTO_HIKONCTRL_H__
#include "VendorCtrl.h"
class HikonCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~HikonCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t channel);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool HasAuthOnStreaming() const { return true; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
private:
bool QueryQuality(std::string& qualityContents);
bool DowngradeQuality(std::string& originalConfig);
bool UpdateQuality(const std::string& originalConfig);
bool UpgradeQuality();
};
#endif //__MICROPHOTO_HIKONCTRL_H__

@ -1,27 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "VendorCtrl.h"
#include <curl/curl.h>
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime/* = true*/) :
m_ip(ip), m_userName(userName), m_password(password), m_channel(channel), m_netHandle(netHandle)
{
}
std::string VendorCtrl::CvtJSONToString(const Json::Value& data)
{
Json::StreamWriterBuilder builder;
#ifndef NDEBUG
builder["indentation"] = "\t"; // assume default for comments is None
builder["emitUTF8"] = true;
#else
builder["indentation"] = "";
#endif
return Json::writeString(builder, data);
}
bool VendorCtrl::IsTimeout() const
{
return m_lastErrorCode == CURLE_OPERATION_TIMEDOUT;
}

@ -1,50 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef MICROPHOTO_VENDORCTRL_H
#define MICROPHOTO_VENDORCTRL_H
#include <string>
#include <json/json.h>
#include <android/multinetwork.h>
#define LEFT_TOP 0
#define RIGHT_TOP 1
#define LEFT_BOTTOM 2
#define RIGHT_BOTTOM 3
class VendorCtrl {
public:
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime = true);
virtual ~VendorCtrl() {}
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos) = 0;
virtual void EnableOsd(bool enable, uint8_t channel) = 0;
virtual std::string GetStreamingUrl(uint8_t channel) = 0;
virtual bool UpdateTime(time_t ts) = 0;
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img) = 0;
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path) = 0;
virtual bool HasAuthOnStreaming() const { return false; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY) = 0;
void UpdateNetHandle(net_handle_t netHandle) { m_netHandle = netHandle; }
int GetLastError() const { return m_lastErrorCode; }
bool IsTimeout() const;
protected:
std::string CvtJSONToString(const Json::Value& data);
protected:
std::string m_ip;
std::string m_userName;
std::string m_password;
uint8_t m_channel;
net_handle_t m_netHandle;
int m_lastErrorCode;
};
#endif //MICROPHOTO_VENDORCTRL_H

@ -1,237 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "YuShiCtrl.h"
#include "httpclient.h"
#include "netcamera.h"
#include <json/json.h>
YuShiCtrl::~YuShiCtrl()
{
}
bool YuShiCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
return false;
}
bool YuShiCtrl::SetOsd(uint8_t channel, std::string osd, uint8_t pos)
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/OSDs/Contents", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
string jsonstring;
switch (pos) {
case LEFT_TOP:
{
OSDJson(0, 1, osd, 0, 0, true, jsonstring);
break;
}
case RIGHT_TOP:
{
OSDJson(1, 1, osd, 9900, 0, false, jsonstring);
break;
}
case LEFT_BOTTOM:
{
OSDJson(2, 1, osd, 0, 9900, false, jsonstring);
break;
}
case RIGHT_BOTTOM:
{
OSDJson(3, 1, osd, 9900, 9900, false, jsonstring);
break;
}
}
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, jsonstring.c_str(), resData);
return res;
}
void YuShiCtrl::EnableOsd(bool enable, uint8_t channel)
{
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/OSDs/Contents", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res =DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
std::string jsonString(resData.begin(), resData.end());
Json::CharReaderBuilder reader;
Json::Value root;
std::string errors;
std::istringstream s(jsonString);
if (!Json::parseFromStream(reader, s, &root, &errors)) {
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to parse JSON:%s", errors.c_str());
return;
}
Json::Value& data = root["Response"]["Data"];
if (data.isNull()) {
XYLOG(XYLOG_SEVERITY_ERROR,"Data not found in JSON");
return;
}
Json::Value& contentList = data["ContentList"];
for (auto& content : contentList) {
content["Enabled"] = enable ? 1 : 0;
}
Json::StreamWriterBuilder writer;
std::string putJsonString = Json::writeString(writer, data);
DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, putJsonString.c_str(), resData);
}
std::string YuShiCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/0/LiveStreamURL", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
Json::CharReaderBuilder builder;
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
Json::Value json;
const char* doc = (const char*)&(resData[0]);
if (reader->parse(doc, doc + resData.size() - 1, &json, NULL))
{
if (json.isMember("Response"))
{
Json::Value& jsonRes = json["Response"];
if (jsonRes.isMember("Data"))
{
Json::Value& jsonData = jsonRes["Data"];
if (jsonData.isMember("URL"))
{
return std::string(jsonData["URL"].asCString());
}
}
}
}
return "";
}
bool YuShiCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
#if 0
Json::Value jsonData(Json::objectValue);
jsonData["TimeZone"] = "GMT+08:00";
jsonData["DeviceTime"] = (int64_t)ts;
jsonData["DateFormat"] = 0; // YYYY-MM-DD
jsonData["HourFormat"] = 1; // 24H
#endif
std::string contents = "{\"TimeZone\":\"GMT+08:00\",\"DateFormat\":0,\"HourFormat\":1,\"DeviceTime\":" + std::to_string(ts) + "}";
std::string url = "http://" + m_ip + "/LAPI/V1.0/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, contents.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool YuShiCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
{
// Yu Shi
char url[128] = { 0 };
int streamSid = 0; // should put into config
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/%d/Snapshot", m_ip.c_str(), (uint32_t)streamID, streamSid);
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
return nRet == 0;
}
bool YuShiCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path) {
return false;
}
void YuShiCtrl::OSDJson(int id, bool enabled, std::string osdString, int x, int y, bool timeOn, std::string& jsonString)
{
Json::Value root;
root["Num"] = 1;
Json::Value contentList(Json::arrayValue);
Json::Value content;
content["ID"] = id;
content["Enabled"] = enabled;
int row = 1;
for (char ch : osdString) {
if (ch == '\n') {
row++;
}
}
content["Num"] = row;
Json::Value contentInfo(Json::arrayValue);
size_t start = 0;
size_t end = osdString.find('\n');
if(timeOn)
{
//如果在此位置显示时间
Json::Value info;
info["ContentType"] = 2;
info["Value"] = "";
contentInfo.append(info);
}
for (int i = 0; i < row; i++)
{
std::string line;
if (end == std::string::npos) {
line = osdString.substr(start);
} else {
line = osdString.substr(start, end - start);
start = end + 1;
end = osdString.find('\n', start);
}
Json::Value info;
info["ContentType"] = 1;
info["Value"] = line;
contentInfo.append(info);
}
content["ContentInfo"] = contentInfo;
Json::Value area;
Json::Value topLeft;
topLeft["X"] = x; //9900
topLeft["Y"] = y;
area["TopLeft"] = topLeft;
content["Area"] = area;
contentList.append(content);
root["ContentList"] = contentList;
Json::StreamWriterBuilder writer;
jsonString = Json::writeString(writer, root);
}

@ -1,30 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef MICROPHOTO_YUSHICTRL_H
#define MICROPHOTO_YUSHICTRL_H
#include "VendorCtrl.h"
class YuShiCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~YuShiCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t streamID);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
private:
void OSDJson(int id, bool enabled, std::string osdString, int x, int y, bool timeOn, std::string& jsonString);
};
#endif //MICROPHOTO_YUSHICTRL_H

@ -1,350 +0,0 @@
#include "httpclient.h"
#include "netcamera.h"
#include <LogThread.h>
#include <errno.h>
static size_t OnWriteData(void* buffer, size_t size, size_t nmemb, void* lpVoid)
{
std::vector<uint8_t>* data = (std::vector<uint8_t>*)lpVoid;
if( NULL == data || NULL == buffer )
{
XYLOG(XYLOG_SEVERITY_ERROR,"OnWriteData callback -1");
return -1;
}
uint8_t* begin = (uint8_t *)buffer;
uint8_t* end = begin + size * nmemb;
data->insert(data->end(), begin, end);
return nmemb;
}
static int SockOptCallback(void *clientp, curl_socket_t curlfd, curlsocktype purpose)
{
net_handle_t netHandle = *((net_handle_t *)clientp);
int res = android_setsocknetwork(netHandle, curlfd);
if (res == -1)
{
int errcode = errno;
printf("android_setsocknetwork errno=%d", errcode);
XYLOG(XYLOG_SEVERITY_ERROR,"setsocknetwork -1, errcode=%d",errcode);
}
return res == 0 ? CURL_SOCKOPT_OK : CURL_SOCKOPT_ERROR;
}
int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data, int* curlResVal/* = NULL*/)
{
CURLcode nRet;
std::string auth;
CURL *curl = curl_easy_init();
curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, "GET");
curl_easy_setopt(curl, CURLOPT_URL, url);
if (authType != HTTP_AUTH_TYPE_NONE)
{
if (userName != NULL && password != NULL && strlen(userName) > 0)
{
auth = userName;
auth += ":";
auth += password;
curl_easy_setopt(curl, CURLOPT_USERPWD, auth.c_str());
// DIGEST Auth
if (authType == HTTP_AUTH_TYPE_BASIC)
{
curl_easy_setopt(curl, CURLOPT_HTTPAUTH, CURLAUTH_BASIC);
}
else if (authType == HTTP_AUTH_TYPE_DIGEST)
{
curl_easy_setopt(curl, CURLOPT_HTTPAUTH, CURLAUTH_DIGEST);
}
}
}
if (netHandle != NETWORK_UNSPECIFIED)
{
#if 0
curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback);
curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle);
#endif
}
curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1);
curl_easy_setopt(curl, CURLOPT_USERAGENT , "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.94 Safari/537.36");
//
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, OnWriteData);
// 设置回调函数的参数,获取反馈信息
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &data);
// 接收数据时超时设置如果5秒内数据未接收完直接退出
#ifndef NDEBUG
curl_easy_setopt(curl, CURLOPT_TIMEOUT, 60);
#else
curl_easy_setopt(curl, CURLOPT_TIMEOUT, 60);
#endif
// 设置重定向次数,防止重定向次数太多
curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 4);
// 连接超时,这个数值如果设置太短可能导致数据请求不到就断开了
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10);
nRet = curl_easy_perform(curl);
if (curlResVal != NULL)
{
*curlResVal = nRet;
}
long responseCode = 0;
if (CURLE_OK == nRet)
{
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &responseCode);
if (responseCode != 200)
{
// #ifdef _DEBUG
char * log = new char[data.size() + 16];
memset(&log[0], 0, data.size());
snprintf(log, data.size() + 16, "%d", (int)responseCode);
if (!data.empty())
{
strcat(log, " ");
memcpy(&log[strlen(log)], &data[0], data.size());
}
// printf("%s", log);
XYLOG(XYLOG_SEVERITY_ERROR, log);
delete[] log;
// #endif
}
}
else
{
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &responseCode);
XYLOG(XYLOG_SEVERITY_WARNING, "Net Photo failure, nRet=%d, code=%d", (int)nRet, (int)responseCode);
// printf("GET err=%d", nRet);
}
curl_easy_cleanup(curl);
return ((0 == nRet) && (responseCode == 200)) ? 0 : 1;
}
int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector<uint8_t>& data, int* curlResVal/* = NULL*/)
{
std::string auth;
CURL *curl = curl_easy_init();
curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, "PUT");
curl_easy_setopt(curl, CURLOPT_URL, url);
if (authType != HTTP_AUTH_TYPE_NONE)
{
if (userName != NULL && password != NULL && strlen(userName) > 0)
{
auth = userName;
auth += ":";
auth += password;
curl_easy_setopt(curl, CURLOPT_USERPWD, auth.c_str());
// DIGEST Auth
if (authType == HTTP_AUTH_TYPE_BASIC)
{
curl_easy_setopt(curl, CURLOPT_HTTPAUTH, CURLAUTH_BASIC);
}
else if (authType == HTTP_AUTH_TYPE_DIGEST)
{
curl_easy_setopt(curl, CURLOPT_HTTPAUTH, CURLAUTH_DIGEST);
}
}
}
if (netHandle != NETWORK_UNSPECIFIED)
{
#if 0
curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback);
curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle);
#endif
}
if(contents != NULL)
curl_easy_setopt(curl, CURLOPT_POSTFIELDS, contents);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, OnWriteData);
// 设置回调函数的参数,获取反馈信息
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &data);
// 接收数据时超时设置如果5秒内数据未接收完直接退出
curl_easy_setopt(curl, CURLOPT_TIMEOUT, 60);
// 设置重定向次数,防止重定向次数太多
curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 4);
// 连接超时,这个数值如果设置太短可能导致数据请求不到就断开了
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10);
CURLcode nRet = curl_easy_perform(curl);
if (curlResVal != NULL)
{
*curlResVal = nRet;
}
if (CURLE_OK != nRet)
{
printf("GET err=%d", nRet);
}
curl_easy_cleanup(curl);
return (0 == nRet) ? 0 : 1;
}
bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photoInfo)
{
bool res = false;
std::vector<uint8_t> data;
const char* userName = NULL;
const char* password = NULL;
if (photoInfo.authType != 0)
{
userName = photoInfo.userName;
password = photoInfo.password;
}
std::string url = "http://";
url += photoInfo.ip;
url += photoInfo.url;
int nRet = DoGetRequest(url.c_str(), photoInfo.authType, userName, password, photoInfo.netHandle, data);
if (0 == nRet)
{
if (!data.empty())
{
FILE *fp = fopen(photoInfo.outputPath, "wb");
if (fp != NULL)
{
fwrite(&data[0], data.size(), 1, fp);
fdatasync(fileno(fp));
fclose(fp);
res = true;
}
}
}
return res;
}
bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photoInfo, std::vector<uint8_t>& img)
{
bool res = false;
const char* userName = NULL;
const char* password = NULL;
// if (photoInfo.authType != 0)
{
userName = photoInfo.userName;
password = photoInfo.password;
}
std::string url = "http://";
url += photoInfo.ip;
url += photoInfo.url;
int nRet = DoGetRequest(url.c_str(), photoInfo.authType, userName, password, photoInfo.netHandle, img);
return (0 == nRet);
}
int UniviewResolutionSet(const NET_PHOTO_INFO& photoInfo, int channel, unsigned int cmd)
{
std::string path = "/LAPI/V1.0/Channels/" + std::to_string(channel) + "/Media/Capture";
Json::Value outdata; // 创建 Json::Value 对象来存储 JSON 数据
uniview_resolution_jsoncpp_file_info(outdata, cmd);
Json::StreamWriterBuilder writer;
std::string sendbuf = Json::writeString(writer, outdata);
std::vector<uint8_t> respContent;
DoPutRequest(path.c_str(), photoInfo.authType, photoInfo.userName, photoInfo.password, photoInfo.netHandle, sendbuf.c_str(), respContent);
// respContent.push_back(0);
// XYLOG(XYLOG_SEVERITY_DEBUG, "Sendlen= %zu, respContent=%s", sendbuf.size(), (const char*)&respContent[0]);
return 0;
}
int uniview_resolution_jsoncpp_file_info(Json::Value &outdata, unsigned int cmd)
{
PIC_RESOLUTION pic_resol[] = { {352,288},{640,360},{720,576},{1280,720},{1920,1080},{2688,1520},{3072,2048},{3840,2160},{2560,1440},{704,288} };
outdata["Enable"] = 1;
Json::Value Resolution;
outdata["Resolution"] = Resolution;
if ((cmd < 1) || (cmd > 10))
{
cmd = 5;
}
Resolution["Width"] = pic_resol[cmd-1].width;
Resolution["Height"] = pic_resol[cmd-1].height;
outdata["Picturesize"] = 900;
return 0;
}
namespace nc_hk
{
bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photoInfo, std::vector<uint8_t>& img)
{
bool res = false;
const char* userName = NULL;
const char* password = NULL;
if (photoInfo.authType != 0)
{
userName = photoInfo.userName;
password = photoInfo.password;
}
std::string url = "http://";
url += photoInfo.ip;
url += photoInfo.url;
int nRet = DoGetRequest(url.c_str(), photoInfo.authType, userName, password, photoInfo.netHandle, img);
#ifdef _DEBUG
if (0 == nRet)
{
FILE *fp = fopen("/sdcard/com.xypower.mpapp/tmp/netimg.jpg", "wb");
if (fp != NULL)
{
fwrite(&img[0], img.size(), 1, fp);
fclose(fp);
}
}
#endif
return (0 == nRet);
}
}
namespace nc_ys
{
bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photoInfo, std::vector<uint8_t>& img)
{
bool res = false;
const char* userName = NULL;
const char* password = NULL;
if (photoInfo.authType != 0)
{
userName = photoInfo.userName;
password = photoInfo.password;
}
std::string url = "http://";
url += photoInfo.ip;
url += photoInfo.url;
int nRet = DoGetRequest(url.c_str(), photoInfo.authType, userName, password, photoInfo.netHandle, img);
#ifdef _DEBUG
if (0 == nRet)
{
FILE *fp = fopen("/sdcard/com.xypower.mpapp/tmp/netimg.jpg", "wb");
if (fp != NULL)
{
fwrite(&img[0], img.size(), 1, fp);
fclose(fp);
}
}
#endif
return (0 == nRet);
}
}

@ -1,24 +0,0 @@
#include <string>
#include <vector>
#include <curl/curl.h>
#include <unistd.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <net/if.h>
#include <bits/ioctl.h>
#include <json/json.h>
#include <android/multinetwork.h>
#include "LogThread.h"
#ifndef __HTTP_CLIENT__
#define __HTTP_CLIENT__
bool setIPAddress(const char *if_name, const char *ip_addr, const char *net_mask, const char *gateway_addr);
int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data, int* curlResVal = NULL);
int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector<uint8_t>& data, int* curlResVal = NULL);
#endif // __HTTP_CLIENT__

@ -1,61 +0,0 @@
#include <stdint.h>
#include <vector>
#include <android/multinetwork.h>
#ifndef __NET_CAMERA__
#define __NET_CAMERA__
#define HTTP_AUTH_TYPE_NONE 0
#define HTTP_AUTH_TYPE_BASIC 1
#define HTTP_AUTH_TYPE_DIGEST 2
struct NET_PHOTO_INFO
{
net_handle_t netHandle;
unsigned char authType; // 0, 1
unsigned char reserved[7]; // for memory alignment
char ip[24];
char userName[8];
char password[16];
char url[128];
char outputPath[128];
};
/*
struct NET_PHOTO_INFO
{
std::string ip;
std::string userName;
std::string password;
std::string interface;
std::string url;
std::string outputPath;
unsigned char authType; // 0, 1
unsigned char reserved[7]; // for memory alignment
};
*/
typedef struct
{
unsigned int width;
unsigned int height;
}PIC_RESOLUTION;
bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photoInfo);
bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photoInfo, std::vector<uint8_t>& img);
//bool setOSD(uint8_t channel, bool status, int type, uint32_t cameraId, const NET_PHOTO_INFO& photoInfo);
int UniviewResolutionSet(const NET_PHOTO_INFO& photoInfo, int channel, unsigned int cmd);
int uniview_resolution_jsoncpp_file_info(Json::Value &outdata, unsigned int cmd);
namespace nc_hk
{
bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photoInfo, std::vector<uint8_t>& img);
}
namespace nc_ys
{
bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photoInfo, std::vector<uint8_t>& img);
}
#endif // __NET_CAMERA__

@ -112,6 +112,37 @@ int set_port_attr (int fd, int baudrate, int databit, const char *stopbit, char
return (tcsetattr (fd, TCSANOW, &opt));
}
static void setInt(int cmd, int value)
{
int fd = open("/dev/mtkgpioctrl", O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value = value;
// LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value);
if( fd > 0 )
{
int res = ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
close(fd);
}
return;
}
static void setRS485Enable(bool z) {
setInt(CMD_SET_485_EN_STATE, z ? 1 : 0);
}
static void set485WriteMode() {
setInt(CMD_SET_485_STATE, 1);
}
static void set485ReadMode() {
setInt(CMD_SET_485_STATE, 0);
}
static void set12VEnable(bool z) {
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
}
/*********************************************************************************
* *
**********************************************************************************/

@ -8,6 +8,10 @@
#include <string>
#include "GPIOControl.h"
#define MAX_STRING_LEN 32
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, "serial_port_comm", fmt, ##args)
// 串口参数
@ -30,6 +34,14 @@ typedef struct
unsigned char m_au8RecvBuf[128];/* */
} SIO_PARAM_SERIAL_DEF;
typedef struct
{
int cmd;
int value;
int result;
long value2;
char str[MAX_STRING_LEN];
}IOT_PARAM;
void PortDataProcess( void );
int serial_port_comm();

@ -46,9 +46,6 @@ public class BridgeProvider extends ContentProvider {
private final static String PATH_RECOG_PIC = "/recogPic";
private final static String PATH_REQUEST_PWR_CTRL = "/requestPwrCtrl";
private final static String PATH_RELEASE_PWR_CTRL = "/releasePwrCtrl";
public BridgeProvider() {
Log.i(TAG, "BridgeProvider");
}
@ -88,9 +85,6 @@ public class BridgeProvider extends ContentProvider {
matcher.addURI(AUTHORITY, PATH_QUERY_SEC_VERSION, 1);
matcher.addURI(AUTHORITY, PATH_QUERY_BATTERY_VOLTAGE, 2);
matcher.addURI(AUTHORITY, PATH_RECOG_PIC, 3);
matcher.addURI(AUTHORITY, PATH_REQUEST_PWR_CTRL, 4);
matcher.addURI(AUTHORITY, PATH_RELEASE_PWR_CTRL, 5);
Cursor cursor = null;
int matched = matcher.match(uri);
@ -104,12 +98,6 @@ public class BridgeProvider extends ContentProvider {
case 3:
cursor = recoganizePicture(uri, selection, selectionArgs);
break;
case 4:
cursor = requestPowerControl(uri, selection, selectionArgs);
break;
case 5:
cursor = recoganizePicture(uri, selection, selectionArgs);
break;
default:
break;
}
@ -181,48 +169,6 @@ public class BridgeProvider extends ContentProvider {
return matrixCursor;
}
private Cursor requestPowerControl(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);
int type = 0;
if (!TextUtils.isEmpty(decodedSelection)) {
Uri u = Uri.parse("http://a.com/?" + decodedSelection);
String val = u.getQueryParameter("type");
try {
type = Integer.parseInt(val);
} catch (Exception ex) {
ex.printStackTrace();
}
}
long nativeHandle = MicroPhotoService.requestPowerControl(type);
String[] columns = { "pwrCtrl" };
MatrixCursor matrixCursor = new MatrixCursor(columns, 1);
matrixCursor.addRow(new Object[] { Long.valueOf(nativeHandle) });
return matrixCursor;
}
private Cursor releasePowerControl(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);
long nativeHandle = 0;
if (!TextUtils.isEmpty(decodedSelection)) {
Uri u = Uri.parse("http://a.com/?" + decodedSelection);
String val = u.getQueryParameter("handle");
try {
nativeHandle = Long.parseLong(val);
} catch (Exception ex) {
ex.printStackTrace();
}
}
boolean res = MicroPhotoService.releasePowerControl(nativeHandle);
String[] columns = { "result" };
MatrixCursor matrixCursor = new MatrixCursor(columns, 1);
matrixCursor.addRow(new Object[] { Integer.valueOf(res ? 1 : 0) });
return matrixCursor;
}
private Cursor recoganizePicture(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);

@ -0,0 +1,222 @@
package com.xypower.mpapp;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.Rect;
import android.os.IBinder;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class FloatingWindow extends Service {
private Context mContext;
private WindowManager mWindowManager;
private View mView;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
mContext = this;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
allAboutLayout(intent);
moveView();
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onDestroy() {
try {
if (mView != null) {
mWindowManager.removeView(mView);
}
} catch (Exception ex) {
// ex.printStackTrace();
Log.e("FW", "Exception " + ex.getMessage());
}
super.onDestroy();
}
WindowManager.LayoutParams mWindowsParams;
private void moveView() {
/*
DisplayMetrics metrics = mContext.getResources().getDisplayMetrics();
int width = (int) (metrics.widthPixels * 1f);
int height = (int) (metrics.heightPixels * 1f);
mWindowsParams = new WindowManager.LayoutParams(
width,//WindowManager.LayoutParams.WRAP_CONTENT,
height,//WindowManager.LayoutParams.WRAP_CONTENT,
//WindowManager.LayoutParams.TYPE_SYSTEM_ALERT,
(Build.VERSION.SDK_INT <= 25) ? WindowManager.LayoutParams.TYPE_PHONE : WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY
,
//WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL,
WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL
| WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN // Not displaying keyboard on bg activity's EditText
| WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
| WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD
| WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
| WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON,
//WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, //Not work with EditText on keyboard
PixelFormat.TRANSLUCENT);
mWindowsParams.gravity = Gravity.TOP | Gravity.LEFT;
//params.x = 0;
mWindowsParams.y = 100;
mWindowManager.addView(mView, mWindowsParams);
mView.setOnTouchListener(new View.OnTouchListener() {
private int initialX;
private int initialY;
private float initialTouchX;
private float initialTouchY;
long startTime = System.currentTimeMillis();
@Override
public boolean onTouch(View v, MotionEvent event) {
if (System.currentTimeMillis() - startTime <= 300) {
return false;
}
if (isViewInBounds(mView, (int) (event.getRawX()), (int) (event.getRawY()))) {
editTextReceiveFocus();
} else {
editTextDontReceiveFocus();
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
initialX = mWindowsParams.x;
initialY = mWindowsParams.y;
initialTouchX = event.getRawX();
initialTouchY = event.getRawY();
break;
case MotionEvent.ACTION_UP:
break;
case MotionEvent.ACTION_MOVE:
mWindowsParams.x = initialX + (int) (event.getRawX() - initialTouchX);
mWindowsParams.y = initialY + (int) (event.getRawY() - initialTouchY);
mWindowManager.updateViewLayout(mView, mWindowsParams);
break;
}
return false;
}
});
*/
}
private boolean isViewInBounds(View view, int x, int y) {
Rect outRect = new Rect();
int[] location = new int[2];
view.getDrawingRect(outRect);
view.getLocationOnScreen(location);
outRect.offset(location[0], location[1]);
return outRect.contains(x, y);
}
private void editTextReceiveFocus() {
if (!wasInFocus) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = true;
}
}
private void editTextDontReceiveFocus() {
if (wasInFocus) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = false;
hideKeyboard(mContext, edt1);
}
}
private boolean wasInFocus = true;
private EditText edt1;
private void allAboutLayout(Intent intent) {
LayoutInflater layoutInflater = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
mView = layoutInflater.inflate(R.layout.ovelay_window, null);
edt1 = (EditText) mView.findViewById(R.id.edt1);
final TextView tvValue = (TextView) mView.findViewById(R.id.tvValue);
Button btnClose = (Button) mView.findViewById(R.id.btnClose);
edt1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
// mWindowsParams.softInputMode = WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = true;
showSoftKeyboard(v);
}
});
edt1.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {
tvValue.setText(edt1.getText());
}
@Override
public void afterTextChanged(Editable editable) {
}
});
btnClose.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
stopSelf();
}
});
}
private void hideKeyboard(Context context, View view) {
if (view != null) {
InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
public void showSoftKeyboard(View view) {
if (view.requestFocus()) {
InputMethodManager imm = (InputMethodManager)
getSystemService(Context.INPUT_METHOD_SERVICE);
imm.showSoftInput(view, InputMethodManager.SHOW_IMPLICIT);
}
}
}

@ -1,19 +0,0 @@
package com.xypower.mpapp;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
public class HeartBeatResponseReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if ("com.systemui.ACTION_HEARTBEAT_RESPONSE".equals(action)) {
long timestamp = intent.getLongExtra("timestamp", 0);
Log.d("MpApp","系统广播监听 timestamp:"+timestamp);
MicroPhotoService.infoLog("收到heartbeat广播 timestamp:" + timestamp);
}
}
}

@ -1,191 +0,0 @@
package com.xypower.mpapp;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.os.FileObserver;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.Messenger;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.MenuItem;
import com.xypower.common.MicroPhotoContext;
import com.xypower.mpapp.databinding.ActivityLogBinding;
import com.xypower.mpapp.utils.RandomReader;
import java.io.File;
public class LogActivity extends AppCompatActivity {
public static final String TAG = "MPLOG";
public static final int MSG_WHAT_LOG_OBSERVER = MicroPhotoService.MSG_WHAT_MAX + 10;
public static final int MAX_LOG_LINES = 480;
public static final int MIN_LOG_LINES = 120;
private ActivityLogBinding binding;
private Handler mHandler = null;
private LogFileObserver mLogFileObserver = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
binding = ActivityLogBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
binding.logs.setText("");
binding.logs.setMovementMethod(ScrollingMovementMethod.getInstance());
binding.logs.setScrollbarFadingEnabled(false);
mHandler = new Handler(Looper.myLooper()) {
@Override
public void handleMessage(@NonNull Message msg) {
switch (msg.what) {
case MSG_WHAT_LOG_OBSERVER:
{
byte[] bytes = (byte[])msg.obj;
int bytesRead = msg.arg1;
String log = null;
try {
log = new String(bytes, 0, bytesRead, "UTF-8");
} catch (Exception e) {
e.printStackTrace();
}
if (log != null) {
binding.logs.append(log);
int offset = binding.logs.getLineCount() * binding.logs.getLineHeight();
if (offset > binding.logs.getHeight()) {
binding.logs.scrollTo(0, offset - binding.logs.getHeight() + binding.logs.getLineHeight());
}
}
}
break;
}
}
};
}
@Override
protected void onResume() {
// call the superclass method first
super.onResume();
try {
String logFilePath = MicroPhotoContext.buildAppDir(this.getApplicationContext());
logFilePath += "logs";
File file = new File(logFilePath);
if (!file.exists()) {
file.mkdirs();
}
logFilePath += "/log.txt";
file = new File(logFilePath);
if (!file.exists()) {
file.createNewFile();
}
mLogFileObserver = new LogFileObserver(logFilePath);
mLogFileObserver.startWatching();
Log.i(TAG, "Log Observer Started");
int lines = binding.logs.getLineCount();
if (lines > MAX_LOG_LINES) {
int excessLineNumber = lines - MIN_LOG_LINES;
int eolIndex = -1;
CharSequence charSequence = binding.logs.getText();
for (int i = 0; i < excessLineNumber; i++) {
do {
eolIndex++;
} while (eolIndex < charSequence.length() && charSequence.charAt(eolIndex) != '\n');
}
if (eolIndex < charSequence.length()) {
binding.logs.getEditableText().delete(0, eolIndex + 1);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
protected void onPause() {
// call the superclass method first
super.onPause();
try {
if (mLogFileObserver != null) {
mLogFileObserver.stopWatching();
mLogFileObserver = null;
Log.i(TAG, "Log Observer Stopped");
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
// todo: goto back activity from here
finish();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private class LogFileObserver extends FileObserver {
private long mOffset = 0;
private String mPath = null;
public LogFileObserver(String path) {
super(path, FileObserver.MODIFY | FileObserver.CREATE);
mPath = path;
File file = new File(path);
if (file.exists()) {
mOffset = file.length();
}
}
@Override
public void onEvent(int event, String s) {
int e = event & FileObserver.ALL_EVENTS;
if (e == FileObserver.MODIFY) {
File file = new File(mPath);
long newOffset = file.length();
if (newOffset > mOffset) {
RandomReader reader = new RandomReader(mPath, mOffset);
byte[] bytes = new byte[(int)(newOffset - mOffset)];
int bytesRead = reader.read(bytes);
mOffset += bytesRead;
Message msg = Message.obtain();
msg.what = MSG_WHAT_LOG_OBSERVER;
msg.obj = bytes;
msg.arg1 = bytesRead;
mHandler.sendMessage(msg);
}
} else if (e == FileObserver.CREATE) {
mOffset = 0;
}
}
}
}

@ -1,32 +1,32 @@
package com.xypower.mpapp;
import android.Manifest;
import android.app.Activity;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.location.Location;
import android.location.LocationListener;
import android.net.Uri;
import android.location.LocationManager;
import android.os.Build;
import android.os.FileObserver;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.Messenger;
import android.os.PowerManager;
import android.os.StrictMode;
import androidx.annotation.NonNull;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AlertDialog;
import androidx.core.app.ActivityCompat;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.os.SystemClock;
import android.provider.Settings;
import android.telephony.SubscriptionManager;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
@ -35,10 +35,11 @@ import android.widget.Toast;
import com.dev.devapi.api.SysApi;
import com.xypower.common.CameraUtils;
import com.xypower.common.FilesUtils;
import com.xypower.common.MicroPhotoContext;
import com.xypower.mpapp.databinding.ActivityMainBinding;
import com.xypower.mpapp.utils.LocationUtil;
import com.xypower.mpapp.utils.RandomReader;
import java.io.File;
import java.lang.reflect.Method;
@ -49,18 +50,71 @@ public class MainActivity extends AppCompatActivity {
public static final String TAG = "MPLOG";
private static int MY_PERMISSIONS_REQUEST_FOREGROUND_SERVICE = 100;
public static final int MSG_WHAT_LOG_OBSERVER = MicroPhotoService.MSG_WHAT_MAX + 10;
// Used to load the 'microphoto' library on application startup.
public static final int MAX_LOG_LINES = 480;
public static final int MIN_LOG_LINES = 120;
private ActivityMainBinding binding;
private Handler mHandler = null;
private Messenger mMessenger = null;
private long mConfigModificationTime = 0;
private LogFileObserver mLogFileObserver = null;
private class LogFileObserver extends FileObserver {
private long mOffset = 0;
private String mPath = null;
public LogFileObserver(String path) {
super(path, FileObserver.MODIFY | FileObserver.CREATE);
mPath = path;
File file = new File(path);
if (file.exists()) {
mOffset = file.length();
}
}
@Override
public void onEvent(int event, String s) {
int e = event & FileObserver.ALL_EVENTS;
if (e == FileObserver.MODIFY) {
File file = new File(mPath);
long newOffset = file.length();
if (newOffset > mOffset) {
RandomReader reader = new RandomReader(mPath, mOffset);
byte[] bytes = new byte[(int)(newOffset - mOffset)];
int bytesRead = reader.read(bytes);
mOffset += bytesRead;
Message msg = Message.obtain();
msg.what = MSG_WHAT_LOG_OBSERVER;
msg.obj = bytes;
msg.arg1 = bytesRead;
mHandler.sendMessage(msg);
}
} else if (e == FileObserver.CREATE) {
mOffset = 0;
}
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) {
int activeSubId = SubscriptionManager.getActiveDataSubscriptionId();
if (activeSubId == -1) {
MicroPhotoContext.selectSimCard(getApplicationContext(), 1);
}
}
Log.d(TAG, "Start inflate");
binding = ActivityMainBinding.inflate(getLayoutInflater());
Log.d(TAG, "Finish inflate");
@ -68,164 +122,60 @@ public class MainActivity extends AppCompatActivity {
// getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
try {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) {
int activeSubId = SubscriptionManager.getActiveDataSubscriptionId();
if (activeSubId == -1) {
MicroPhotoContext.selectSimCard(getApplicationContext(), 1);
}
}
ActionBar actionBar = getSupportActionBar();
Date date = new Date(BuildConfig.BUILD_TIMESTAMP);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
String caption = "MP";
switch (MicroPhotoService.getCustomAppId()) {
case 1:
caption = "RP";
break;
case 2:
caption = "N938";
break;
default:
break;
}
caption += " v" + MicroPhotoContext.getVersionName(getApplicationContext()) + " " + sdf.format(date);
sdf = new SimpleDateFormat("MM-dd HH:mm:ss");
caption += " / " + sdf.format(new Date());
actionBar.setTitle(caption);
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
ActionBar actionBar = getSupportActionBar();
initListener();
Date date = new Date(BuildConfig.BUILD_TIMESTAMP);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
actionBar.setTitle(actionBar.getTitle().toString() + " v" + MicroPhotoContext.getVersionName(getApplicationContext()) + " " + sdf.format(date));
Context appContext = getApplicationContext();
String appPath = MicroPhotoContext.buildMpAppDir(appContext);
File appPathFile = new File(appPath);
if (!appPathFile.exists()) {
try {
appPathFile.mkdirs();
} catch (Exception ex) {
ex.printStackTrace();
}
}
binding.logs.setText("");
binding.logs.setMovementMethod(ScrollingMovementMethod.getInstance());
binding.logs.setScrollbarFadingEnabled(false);
if (!MicroPhotoContext.hasMpAppConfig(appContext)) {
String mstPath = MicroPhotoContext.buildMpResAppDir(appContext);
File mstPathFile = new File(mstPath);
File mpdataFile = new File(mstPathFile, "mpdata");
if (mpdataFile.exists()) {
File dataFile = new File(appPathFile, "data");
if (dataFile.exists()) {
mHandler = new Handler(Looper.myLooper()) {
@Override
public void handleMessage(@NonNull Message msg) {
switch (msg.what) {
case MSG_WHAT_LOG_OBSERVER:
{
byte[] bytes = (byte[])msg.obj;
int bytesRead = msg.arg1;
String log = null;
try {
FilesUtils.delete(dataFile);
} catch (Exception ex) {
ex.printStackTrace();
}
}
try {
mpdataFile.renameTo(dataFile);
} catch (Exception ex) {
ex.printStackTrace();
}
}
else {
Intent resIntent = getPackageManager().getLaunchIntentForPackage(MicroPhotoContext.PACKAGE_NAME_MPRES);
if (resIntent != null) {
resIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
resIntent.putExtra("initres", 1);
String sn = MicroPhotoService.getSerialNumber();
if (!TextUtils.isEmpty(sn)) {
resIntent.putExtra("sn", sn);
log = new String(bytes, 0, bytesRead, "UTF-8");
} catch (Exception e) {
e.printStackTrace();
}
startActivity(resIntent);
}
}
}
Intent intent = getIntent();
final int noDelay = intent.getIntExtra("noDelay", 0);
int rebootFlag = intent.getIntExtra("reboot", 0);
String reason = intent.getStringExtra("reason");
if (!TextUtils.isEmpty(reason)) {
Log.w(TAG, "App Started with reason: " + reason);
}
if (rebootFlag == 1) {
Log.i(TAG, "After Reboot");
}
Log.d(TAG, "MainActivity: reboot=" + rebootFlag + " noDelay=" + noDelay);
MicroPhotoContext.AppConfig appConfig = loadConfigInfo();
binding.btnStartServ.setEnabled(!MicroPhotoService.isRunning);
binding.btnStopServ.setEnabled(MicroPhotoService.isRunning);
if (MicroPhotoService.isRunning) {
Intent intent2 = new Intent(MainActivity.this, MicroPhotoService.class);
try {
// stopService(intent2);
} catch (Exception ex) {
ex.printStackTrace();
}
}
if (MicroPhotoContext.hasMpAppConfig(appContext)) {
final Runnable runnable = new Runnable() {
@Override
public void run() {
if (!MicroPhotoService.isRunning && !TextUtils.isEmpty(appConfig.cmdid) && !TextUtils.isEmpty(appConfig.server) && appConfig.port != 0) {
if (binding.btnStartServ.isEnabled()) {
Log.i(TAG, "Perform AutoStart");
binding.btnStartServ.performClick();
if (log != null) {
binding.logs.append(log);
int offset = binding.logs.getLineCount() * binding.logs.getLineHeight();
if (offset > binding.logs.getHeight()) {
binding.logs.scrollTo(0, offset - binding.logs.getHeight() + binding.logs.getLineHeight());
}
}
}
};
long timeout = 500;
if (SystemClock.elapsedRealtime() < 180000) {
// In 3 minutes
timeout = 10000; // in 10 seconds
break;
}
Handler handler = new Handler();
handler.postDelayed(runnable, timeout);
Log.i(TAG, "Set AutoStart after " + Long.toString(timeout) + "ms");
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
};
@Override
protected void onDestroy() {
super.onDestroy();
}
@Override
protected void onResume() {
super.onResume();
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
try {
File file = MicroPhotoContext.getMpAppConfigFile(getApplicationContext());
if (file.lastModified() > mConfigModificationTime) {
loadConfigInfo();
}
} catch (Exception ex) {
ex.printStackTrace();
Intent intent = getIntent();
final int noDelay = intent.getIntExtra("noDelay", 0);
int rebootFlag = intent.getIntExtra("reboot", 0);
String reason = intent.getStringExtra("reason");
if (!TextUtils.isEmpty(reason)) {
Log.w(TAG, "App Started with reason: " + reason);
}
if (rebootFlag == 1) {
Log.i(TAG, "After Reboot");
}
}
protected MicroPhotoContext.AppConfig loadConfigInfo() {
final MicroPhotoContext.AppConfig appConfig = MicroPhotoContext.getMpAppConfig(getApplicationContext());
mConfigModificationTime = appConfig.modificationTime;
Log.d(TAG, "MainActivity: reboot=" + rebootFlag + " noDelay=" + noDelay);
final MicroPhotoContext.AppConfig appConfig = getAppConfig();
if (TextUtils.isEmpty(appConfig.cmdid)) {
appConfig.cmdid = MicroPhotoService.getSerialNumber();
binding.cmdid.setText(appConfig.cmdid);
@ -243,15 +193,9 @@ public class MainActivity extends AppCompatActivity {
}
}
protocolStr = appConfig.networkProtocol + "-";
for (int idx = 0; idx < binding.networkProtocol.getCount(); idx++) {
String item = binding.networkProtocol.getItemAtPosition(idx).toString();
if (item.startsWith(protocolStr)) {
binding.networkProtocol.setSelection(idx);
break;
}
if (appConfig.networkProtocol < binding.networkProtocol.getCount()) {
binding.networkProtocol.setSelection(appConfig.networkProtocol);
}
if (appConfig.encryption < binding.encryptions.getCount()) {
binding.encryptions.setSelection(appConfig.encryption);
}
@ -262,11 +206,6 @@ public class MainActivity extends AppCompatActivity {
binding.network.setSelection(appConfig.network);
}
return appConfig;
}
protected void initListener() {
this.binding.btnStartServ.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
@ -287,16 +226,18 @@ public class MainActivity extends AppCompatActivity {
// return;
}
Context appContext = getApplicationContext();
MicroPhotoContext.AppConfig curAppConfig = MicroPhotoContext.getMpAppConfig(appContext);
binding.logs.setText("");
MicroPhotoContext.AppConfig curAppConfig = retrieveAndSaveAppConfig();
if (TextUtils.isEmpty(curAppConfig.cmdid)) {
curAppConfig.cmdid = MicroPhotoService.getSerialNumber();
// TakeAndThrowPhoto(2, 0xFF);
try {
// Thread.sleep(20);
} catch (Exception ex) {
ex.printStackTrace();
}
startMicroPhotoService(appContext, curAppConfig, mMessenger);
startMicroPhotoService(MainActivity.this.getApplicationContext(), appConfig, mMessenger);
Log.i(TAG, "Service auto-started");
binding.btnStartServ.setEnabled(false);
binding.btnStopServ.setEnabled(true);
}
@ -400,7 +341,6 @@ public class MainActivity extends AppCompatActivity {
@Override
public void onClick(View view) {
MicroPhotoService.infoLog("Call stopTerminalService Manually");
MicroPhotoService.stopTerminalService(getApplicationContext());
binding.btnStartServ.setEnabled(true);
@ -416,13 +356,30 @@ public class MainActivity extends AppCompatActivity {
}
});
binding.btnLogs.setOnClickListener(new View.OnClickListener() {
if (MicroPhotoService.isRunning) {
Intent intent2 = new Intent(MainActivity.this, MicroPhotoService.class);
try {
stopService(intent2);
} catch (Exception ex) {
ex.printStackTrace();
}
}
Runnable runnable = new Runnable() {
@Override
public void onClick(View v) {
Intent intent = new Intent(MainActivity.this, LogActivity.class);
startActivity(intent);
public void run() {
if (!MicroPhotoService.isRunning && !TextUtils.isEmpty(appConfig.cmdid) && !TextUtils.isEmpty(appConfig.server) && appConfig.port != 0) {
if (binding.btnStartServ.isEnabled()) {
binding.btnStartServ.performClick();
}
}
}
});
};
mHandler.postDelayed(runnable, noDelay != 0 ? 1000 : 5000);
binding.btnStartServ.setEnabled(!MicroPhotoService.isRunning);
binding.btnStopServ.setEnabled(MicroPhotoService.isRunning);
binding.btnSendHb.setOnClickListener(new View.OnClickListener() {
@Override
@ -434,53 +391,8 @@ public class MainActivity extends AppCompatActivity {
binding.btnRestartApp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
restartSelfWithStartActivity();
// restartSelfWithAlarmManager();
}
private void restartSelfWithStartActivity() {
final Context context = getApplicationContext();
Intent intent = new Intent(context, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
intent.putExtra("reason", "Manual Restart From MainActivity");
context.startActivity(intent);
final Handler handler = new Handler();
finish();
handler.postDelayed(new Runnable() {
@Override
public void run() {
System.exit(0);
}
}, 0);
}
private void restartSelfWithAlarmManager() {
Intent intent = new Intent(MainActivity.this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
intent.putExtra("reason", "Manual Restart From MainActivity");
// Create PendingIntent
PendingIntent pendingIntent = PendingIntent.getActivity(
MainActivity.this, 12312, intent, PendingIntent.FLAG_UPDATE_CURRENT/* | PendingIntent.FLAG_IMMUTABLE*/);
AlarmManager alarmManager = (AlarmManager) MainActivity.this.getSystemService(Context.ALARM_SERVICE);
if (alarmManager != null) {
alarmManager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + 200, pendingIntent);
}
MainActivity.this.finish();
System.exit(0);
Context context = v.getContext().getApplicationContext();
MicroPhotoService.restartApp(context, context.getPackageName(), "Manual Restart From MainActivity");
}
});
@ -512,6 +424,9 @@ public class MainActivity extends AppCompatActivity {
binding.btnCameraInfo.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
MicroPhotoService.setOtgState(true);
// SysApi.setOtgState(true);
MicroPhotoService.setCam3V3Enable(true);
Runnable runnable = new Runnable() {
@ -522,19 +437,24 @@ public class MainActivity extends AppCompatActivity {
Log.d(TAG, cameraInfo);
MicroPhotoService.setCam3V3Enable(false);
MicroPhotoService.setOtgState(false);
MicroPhotoService.infoLog(cameraInfo);
binding.logs.append(cameraInfo + "\r\n");
Toast.makeText(view.getContext(), cameraInfo, Toast.LENGTH_LONG).show();
}
};
Handler handler = new Handler();
handler.postDelayed(runnable, 1500);
mHandler.postDelayed(runnable, 1500);
}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
}
public static void startMicroPhotoService(Context context, MicroPhotoContext.AppConfig curAppConfig, Messenger messenger) {
if (TextUtils.isEmpty(curAppConfig.cmdid) || TextUtils.isEmpty(curAppConfig.server) || curAppConfig.port == 0) {
@ -605,6 +525,63 @@ public class MainActivity extends AppCompatActivity {
MicroPhotoService.takePhoto(channel, preset, true, configFile.getAbsolutePath(), photoFile.getAbsolutePath());
}
@Override
protected void onResume() {
// call the superclass method first
super.onResume();
try {
String logFilePath = MicroPhotoContext.buildAppDir(this.getApplicationContext());
logFilePath += "logs";
File file = new File(logFilePath);
if (!file.exists()) {
file.mkdirs();
}
logFilePath += "/log.txt";
file = new File(logFilePath);
if (!file.exists()) {
file.createNewFile();
}
mLogFileObserver = new LogFileObserver(logFilePath);
mLogFileObserver.startWatching();
Log.i(TAG, "Log Observer Started");
int lines = binding.logs.getLineCount();
if (lines > MAX_LOG_LINES) {
int excessLineNumber = lines - MIN_LOG_LINES;
int eolIndex = -1;
CharSequence charSequence = binding.logs.getText();
for (int i = 0; i < excessLineNumber; i++) {
do {
eolIndex++;
} while (eolIndex < charSequence.length() && charSequence.charAt(eolIndex) != '\n');
}
if (eolIndex < charSequence.length()) {
binding.logs.getEditableText().delete(0, eolIndex + 1);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
protected void onPause() {
// call the superclass method first
super.onPause();
try {
if (mLogFileObserver != null) {
mLogFileObserver.stopWatching();
mLogFileObserver = null;
Log.i(TAG, "Log Observer Stopped");
}
} catch (Exception e) {
e.printStackTrace();
}
}
private MicroPhotoContext.AppConfig retrieveAndSaveAppConfig() {
MicroPhotoContext.AppConfig appConfig = new MicroPhotoContext.AppConfig();
@ -698,6 +675,4 @@ public class MainActivity extends AppCompatActivity {
}
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,76 @@
package com.xypower.mpapp;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Build;
import android.util.Log;
import android.widget.Toast;
public class ScreenActionReceiver extends BroadcastReceiver {
private String TAG = "ScreenActionReceiver";
@Override
public void onReceive(Context context, Intent intent) {
//LOG
StringBuilder sb = new StringBuilder();
sb.append("Action: " + intent.getAction() + "\n");
// sb.append("URI: " + intent.toUri(Intent.URI_INTENT_SCHEME).toString() + "\n");
String log = sb.toString();
Log.d(TAG, log);
Toast.makeText(context, log, Toast.LENGTH_SHORT).show();
String action = intent.getAction();
try {
if (Intent.ACTION_SCREEN_ON.equals(action)) {
Log.d(TAG, "screen is on...");
Toast.makeText(context, "screen ON", Toast.LENGTH_SHORT);
//Run the locker
context.startService(new Intent(context, FloatingWindow.class));
} else if (Intent.ACTION_SCREEN_OFF.equals(action)) {
Log.d(TAG, "screen is off...");
Toast.makeText(context, "screen OFF", Toast.LENGTH_SHORT);
} else if (Intent.ACTION_USER_PRESENT.equals(action)) {
Log.d(TAG, "screen is unlock...");
Toast.makeText(context, "screen UNLOCK", Toast.LENGTH_SHORT);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.startForegroundService(new Intent(context, FloatingWindow.class));
} else {
context.startService(new Intent(context, FloatingWindow.class));
}
} else if (Intent.ACTION_BOOT_COMPLETED.equals(action)) {
Log.d(TAG, "boot completed...");
Toast.makeText(context, "BOOTED..", Toast.LENGTH_SHORT);
//Run the locker
/* Intent i = new Intent(context, FloatingWindow.class);
context.startService(i);
*/
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// context.startForegroundService(new Intent(context, FloatingWindow.class));
} else {
// context.startService(new Intent(context, FloatingWindow.class));
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public IntentFilter getFilter(){
final IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_SCREEN_OFF);
filter.addAction(Intent.ACTION_SCREEN_ON);
return filter;
}
}

@ -769,10 +769,7 @@ public class Camera2VideoActivity extends AppCompatActivity {
@Override
public void run() {
Log.i("OSD", "Record Stop " + Long.toString(mDuration));
if (mGPUCameraRecorder != null) {
mGPUCameraRecorder.stop();
}
mGPUCameraRecorder.stop();
int aa = 0;
}
@ -812,7 +809,6 @@ public class Camera2VideoActivity extends AppCompatActivity {
.cameraId(Integer.toString(mCameraId))
.mute(true)
.duration(mDuration * 1000)
.rotation(mOrientation)
.build();
Log.i("OSD", "mGPUCameraRecorder created");

@ -3,6 +3,7 @@ package com.xypower.mpapp.video;
import android.Manifest;
import android.app.Activity;
import android.app.Dialog;
import android.content.ComponentName;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
@ -17,6 +18,7 @@ import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.graphics.drawable.BitmapDrawable;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
@ -34,6 +36,8 @@ import androidx.appcompat.app.AlertDialog;
import androidx.core.app.ActivityCompat;
import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.Fragment;
import androidx.legacy.app.FragmentCompat;
import androidx.legacy.app.FragmentCompat;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import android.os.Environment;
@ -50,6 +54,7 @@ import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.Toast;
import com.xypower.mpapp.MicroPhotoService;
import com.xypower.mpapp.R;
@ -71,7 +76,7 @@ import java.util.concurrent.TimeUnit;
* Use the {@link VideoFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class VideoFragment extends Fragment implements View.OnClickListener, MediaRecorder.OnInfoListener {
public class VideoFragment extends Fragment implements View.OnClickListener, MediaRecorder.OnInfoListener, FragmentCompat.OnRequestPermissionsResultCallback {
public static final String ACTION_FINISH = "com.xypower.mvapp.ACT_FINISH";
public static final String ACTION_MP_VIDEO_FINISHED = "com.xypower.mpapp.ACT_V_FINISHED";

@ -1,5 +0,0 @@
<vector android:height="24dp" android:tint="#00FF00"
android:viewportHeight="24" android:viewportWidth="24"
android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="@android:color/white" android:pathData="M20,4h-3.17L15,2L9,2L7.17,4L4,4c-1.1,0 -2,0.9 -2,2v12c0,1.1 0.9,2 2,2h16c1.1,0 2,-0.9 2,-2L22,6c0,-1.1 -0.9,-2 -2,-2zM20,18L4,18L4,6h4.05l1.83,-2h4.24l1.83,2L20,6v12zM12,7c-2.76,0 -5,2.24 -5,5s2.24,5 5,5 5,-2.24 5,-5 -2.24,-5 -5,-5zM12,15c-1.65,0 -3,-1.35 -3,-3s1.35,-3 3,-3 3,1.35 3,3 -1.35,3 -3,3z"/>
</vector>

@ -204,16 +204,6 @@
app:layout_constraintStart_toEndOf="@+id/btnSaveCfg"
app:layout_constraintTop_toTopOf="@+id/btnStartServ" />
<Button
android:id="@+id/btnSendHb"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_hb"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnChannels"
app:layout_constraintTop_toTopOf="@+id/btnStartServ" />
<Button
android:id="@+id/btnTakePhoto"
@ -260,17 +250,6 @@
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto3"
app:layout_constraintTop_toTopOf="@+id/btnTakePhoto" />
<Button
android:id="@+id/btnSendWs"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_ws"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto4"
app:layout_constraintTop_toTopOf="@+id/btnTakePhoto" />
<Button
android:id="@+id/takeVideoBtn"
android:layout_width="wrap_content"
@ -317,25 +296,14 @@
app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" />
<Button
android:id="@+id/btnSendBi"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_bi"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/takeVideoBtn4"
app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" />
<Button
android:id="@+id/btnLogs"
android:id="@+id/btnSendHb"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_hb"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin_small"
android:layout_marginTop="@dimen/activity_vertical_spacing_small"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
android:text="日志"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/takeVideoBtn" />
@ -347,8 +315,8 @@
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnLogs"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
app:layout_constraintStart_toEndOf="@+id/btnSendHb"
app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button
android:id="@+id/btnReboot"
@ -359,7 +327,7 @@
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnRestartApp"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button
android:id="@+id/btnCameraInfo"
@ -370,18 +338,7 @@
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnReboot"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
<Button
android:id="@+id/btnSendFault"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_fault"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnCameraInfo"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button
android:id="@+id/btnDowseCamera"
@ -395,4 +352,32 @@
app:layout_constraintStart_toStartOf="@+id/btnCameraInfo"
app:layout_constraintTop_toTopOf="@+id/btnCameraInfo" />
<androidx.constraintlayout.widget.Barrier
android:id="@+id/leftBarrier"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:barrierDirection="right"
app:constraint_referenced_ids="textViewCmdId,cmdid,protocol,networkProtocol,encryptions,btnSaveCfg,btnTakePhoto4,btnChannels,network,takeVideoBtn4"
tools:layout_editor_absoluteX="46dp" />
<TextView
android:id="@+id/logs"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_margin="4dp"
android:padding="4dp"
android:background="@drawable/textview_border"
android:orientation="horizontal"
android:lineSpacingMultiplier="1.25"
android:scrollbars="vertical"
android:singleLine="false"
android:text="Logs"
android:textColor="@color/black"
android:textIsSelectable="true"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toEndOf="@+id/leftBarrier"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -1,27 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="horizontal"
tools:context=".LogActivity">
<TextView
android:id="@+id/logs"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_margin="4dp"
android:padding="4dp"
android:background="@drawable/textview_border"
android:orientation="horizontal"
android:lineSpacingMultiplier="1.25"
android:scrollbars="vertical"
android:singleLine="false"
android:text="Logs"
android:textColor="@color/black"
android:textIsSelectable="true"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -211,16 +211,6 @@
android:layout_marginTop="8dp"
android:text="通道设置"
app:layout_constraintStart_toEndOf="@+id/simchange2"
app:layout_constraintTop_toTopOf="@+id/btnStartServ" />
<Button
android:id="@+id/btnLogs"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="4dp"
android:layout_marginTop="8dp"
android:text="日志"
app:layout_constraintStart_toEndOf="@+id/btnChannels"
app:layout_constraintTop_toBottomOf="@+id/btnStartServ" />
<Button
@ -353,6 +343,32 @@
app:layout_constraintStart_toStartOf="@+id/btnCameraInfo"
app:layout_constraintTop_toTopOf="@+id/btnCameraInfo" />
<androidx.constraintlayout.widget.Barrier
android:id="@+id/leftBarrier"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:barrierDirection="right"
app:constraint_referenced_ids="textViewCmdId,cmdid,protocol,networkProtocol,encryptions,btnSaveCfg,btnTakePhoto4,btnChannels"
tools:layout_editor_absoluteX="46dp" />
<TextView
android:id="@+id/logs"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_margin="4dp"
android:orientation="horizontal"
android:padding="4dp"
android:lineSpacingMultiplier="1.25"
android:scrollbars="vertical"
android:text="Logs"
android:textColor="@color/black"
android:singleLine="false"
android:textIsSelectable="true"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto4"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -1,8 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="networkProtocols">
<item>0-TCP</item>
<item>1-UDP</item>
<item>10-MQTT</item>
<item>TCP</item>
<item>UDP</item>
</string-array>
</resources>

@ -6,10 +6,8 @@
<item>65282-江苏</item>
<item>65283-湖南</item>
<item>65284-浙江</item>
<item>65285-河南统一</item>
<item>65285-河南</item>
<item>65286-郑州</item>
<item>65290-河南全景</item>
<item>65298-宁夏</item>
<item>65310-山西智洋</item>
</string-array>
</resources>

@ -7,9 +7,6 @@
<string name="main_packet_size_default">默认2K</string>
<string name="main_server">支持域名自动转IP</string>
<string name="main_send_hb">心跳</string>
<string name="main_send_ws">工作状态</string>
<string name="main_send_bi">基本信息</string>
<string name="main_send_fault">故障</string>
<string name="main_restart_app">重启APP</string>
<string name="main_reboot">重启设备</string>
<string name="main_camera_info">摄像头</string>

@ -6,8 +6,8 @@ android {
compileSdk 33
defaultConfig {
minSdk 28
targetSdk 28
minSdk COMPILE_MIN_SDK_VERSION as int
targetSdk TARGET_SDK_VERSION as int
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
consumerProguardFiles "consumer-rules.pro"
@ -30,10 +30,9 @@ android {
dependencies {
implementation 'androidx.core:core:1.6.0'
// implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.4.0'
// implementation 'com.linkedin.dexmaker:dexmaker:2.28.3'
implementation 'com.linkedin.dexmaker:dexmaker:2.28.3'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'

@ -6,6 +6,7 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.text.TextUtils;
import android.util.Log;
import android.util.Size;
@ -44,16 +45,6 @@ public class CameraUtils {
Integer orientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
builder.append(orientation == null ? "" : orientation.toString());
int[] capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean hasRaw = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) {
hasRaw = true;
break;
}
}
builder.append(" raw=" + (hasRaw ? "1" : "0"));
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] sizes = map.getOutputSizes(ImageFormat.YUV_420_888);

@ -33,23 +33,19 @@ public class FileDownloader {
URL url = new URL(urlString);
connection = (HttpURLConnection) url.openConnection();
connection.setRequestProperty("Accept-Encoding", "gzip");
connection.setConnectTimeout(10000);
connection.setReadTimeout(30000);
connection.setConnectTimeout(5000);
connection.setReadTimeout(120000);
connection.setDoInput(true);
connection.connect();
final File temp = new File(filePath);
if (temp.exists()) {
long fileSize = temp.length();
connection.setRequestProperty("Range", "bytes=" + Long.toString(fileSize) + "-");
}
// if (temp.exists())
// temp.delete();
// temp.createNewFile();
if (temp.exists())
temp.delete();
temp.createNewFile();
temp.setReadable(true, false);
temp.setWritable(true, false);
downloadFile = temp;
Log.d("download", "url " + urlString + "\n save to " + temp);
os = new FileOutputStream(temp, true);
os = new FileOutputStream(temp);
String encoding = connection.getContentEncoding();
is = connection.getInputStream();

@ -6,7 +6,6 @@ import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@ -93,22 +92,8 @@ public class FileUploader {
}
request.writeBytes(this.CRLF);
FileInputStream fis = null;
try {
fis = new FileInputStream(uploadFile);
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
int length = -1;
while ((length = fis.read(buffer)) != -1) {
request.write(buffer, 0, length);
}
} catch (Exception ex) {
ex.printStackTrace();
} finally {
FilesUtils.closeFriendly(fis);
}
// byte[] bytes = Files.readAllBytes(uploadFile.toPath());
// request.write(bytes);
byte[] bytes = Files.readAllBytes(uploadFile.toPath());
request.write(bytes);
}
/**

@ -1,14 +1,10 @@
package com.xypower.common;
import android.content.Context;
import android.content.res.AssetManager;
import android.text.TextUtils;
import android.util.Log;
import org.w3c.dom.Text;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
@ -17,7 +13,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.nio.channels.Channels;
import java.nio.channels.SeekableByteChannel;
@ -132,6 +127,7 @@ public class FilesUtils {
return intValue;
}
public static void writeTextFile(String path, String content) {
FileOutputStream fileOutputStream = null;
try {
@ -256,217 +252,4 @@ public class FilesUtils {
public static byte[] readAllBytes(String file) {
return readAllBytes(new File(file));
}
public static boolean delete(File file) {
if (!file.exists()) {
return false;
} else {
if (file.isFile())
return deleteSingleFile(file);
else
return deleteDirectory(file.getAbsolutePath());
}
}
private static boolean deleteSingleFile(File file) {
// 如果文件路径所对应的文件存在,并且是一个文件,则直接删除
if (file.exists() && file.isFile()) {
if (file.delete()) {
// Log.e("--Method--", "Copy_Delete.deleteSingleFile: 删除单个文件" + filePath$Name + "成功!");
return true;
} else {
return false;
}
} else {
return false;
}
}
/**
* @param filePath
* @return truefalse
*/
private static boolean deleteDirectory(String filePath) {
// 如果dir不以文件分隔符结尾自动添加文件分隔符
if (!filePath.endsWith(File.separator))
filePath = filePath + File.separator;
File dirFile = new File(filePath);
// 如果dir对应的文件不存在或者不是一个目录则退出
if ((!dirFile.exists()) || (!dirFile.isDirectory())) {
return false;
}
boolean flag = true;
// 删除文件夹中的所有文件包括子目录
File[] files = dirFile.listFiles();
for (File file : files) {
// 删除子文件
if (file.isFile()) {
flag = deleteSingleFile(file);
if (!flag)
break;
}
// 删除子目录
else if (file.isDirectory()) {
flag = deleteDirectory(file
.getAbsolutePath());
if (!flag)
break;
}
}
if (!flag) {
return false;
}
// 删除当前目录
if (dirFile.delete()) {
// Log.e("--Method--", "Copy_Delete.deleteDirectory: 删除目录" + filePath + "成功!");
return true;
} else {
return false;
}
}
public static void copyAssetsDir(Context context, String directory, String destPath) {
try {
AssetManager assetManager = context.getAssets();
String[] fileList = assetManager.list(directory);
if (fileList != null && fileList.length > 0) {
File file = new File(destPath);
if (!file.exists()) {
file.mkdirs();
}
if (!directory.endsWith(File.separator)) {
directory += File.separator;
}
if (!destPath.endsWith(File.separator)) {
destPath += File.separator;
}
for (String fileName : fileList) {
copyAssetsDir(context, directory + fileName, destPath + fileName);
}
} else {
// Try to file
copyAssetsFile(context, directory, destPath);
}
} catch (Exception e) {
e.printStackTrace();
}
// else {//如果是文件
// InputStream inputStream=context.getAssets().open(filePath);
// File file=new File(context.getFilesDir().getAbsolutePath()+ File.separator+filePath);
// Log.i("copyAssets2Phone","file:"+file);
// if(!file.exists() || file.length()==0) {
// FileOutputStream fos=new FileOutputStream(file);
// int len=-1;
// byte[] buffer=new byte[1024];
// while ((len=inputStream.read(buffer))!=-1){
// fos.write(buffer,0,len);
// }
// fos.flush();
// inputStream.close();
// fos.close();
// showToast(context,"模型文件复制完毕");
// } else {
// showToast(context,"模型文件已存在,无需复制");
// }
// }
}
public static void copyAssetsFile(Context context, String fileName, String destPath) {
InputStream inputStream = null;
FileOutputStream fos = null;
try {
inputStream = context.getAssets().open(fileName);
//getFilesDir() 获得当前APP的安装路径 /data/data/包名/files 目录
File file = new File(destPath);
if (file.exists()) {
file.delete();
}
File parentDir = file.getParentFile();
if (parentDir != null && !parentDir.exists()) {
parentDir.mkdirs();
}
if (parentDir != null && !parentDir.canWrite()) {
Log.e("FilesUtils", "No write permission to directory: " + parentDir.getAbsolutePath());
return;
}
fos = new FileOutputStream(file);
int len = -1;
byte[] buffer = new byte[1024];
while ((len = inputStream.read(buffer)) != -1) {
try {
fos.write(buffer, 0, len);
} catch (Exception ex) {
ex.printStackTrace();
}
}
fos.flush();
} catch (Exception e) {
e.printStackTrace();
} finally {
FilesUtils.closeFriendly(inputStream);
FilesUtils.closeFriendly(fos);
}
}
/**
* AssetsCRLFLF
*
* @param context
* @param fileName Assets
* @param destPath
*/
public static void copyAndNormalizeTextAssetsFile(Context context, String fileName, String destPath) {
InputStream inputStream = null;
BufferedReader reader = null;
BufferedWriter writer = null;
try {
inputStream = context.getAssets().open(fileName);
reader = new BufferedReader(new InputStreamReader(inputStream));
// 创建目标文件
File file = new File(destPath);
if (file.exists()) {
file.delete();
}
File parentDir = file.getParentFile();
if (parentDir != null && !parentDir.exists()) {
parentDir.mkdirs();
}
if (parentDir != null && !parentDir.canWrite()) {
Log.e("FilesUtils", "No write permission to directory: " + parentDir.getAbsolutePath());
return;
}
// 使用BufferedWriter写入文件同时处理行尾符
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file)));
String line;
// 逐行读取并写入由BufferedWriter自动处理行尾
while ((line = reader.readLine()) != null) {
writer.write(line);
writer.newLine(); // 使用平台默认的换行符在Android上是LF
}
writer.flush();
Log.d("FilesUtils", "File normalized and copied successfully: " + destPath);
} catch (Exception e) {
Log.e("FilesUtils", "Error normalizing file: " + e.getMessage(), e);
} finally {
closeFriendly(reader);
closeFriendly(writer);
closeFriendly(inputStream);
}
}
}

@ -0,0 +1,248 @@
package com.xypower.common;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.os.Handler;
import androidx.annotation.RequiresApi;
import android.util.Log;
import androidx.annotation.RequiresApi;
import com.android.dx.stock.ProxyBuilder;
import java.io.File;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
public class HotspotManager {
@RequiresApi(api = Build.VERSION_CODES.O)
public static class OreoWifiManager {
private static final String TAG = OreoWifiManager.class.getSimpleName();
private Context mContext;
private WifiManager mWifiManager;
private ConnectivityManager mConnectivityManager;
public OreoWifiManager(Context c) {
mContext = c;
mWifiManager = (WifiManager) mContext.getSystemService(Context.WIFI_SERVICE);
mConnectivityManager = (ConnectivityManager) mContext.getSystemService(ConnectivityManager.class);
}
/**
* This sets the Wifi SSID and password
* Call this before {@code startTethering} if app is a system/privileged app
* Requires: android.permission.TETHER_PRIVILEGED which is only granted to system apps
*/
public void configureHotspot(String name, String password) {
WifiConfiguration apConfig = new WifiConfiguration();
apConfig.SSID = name;
apConfig.preSharedKey = password;
apConfig.allowedKeyManagement.set(WifiConfiguration.KeyMgmt.WPA_PSK);
try {
Method setConfigMethod = mWifiManager.getClass().getMethod("setWifiApConfiguration", WifiConfiguration.class);
boolean status = (boolean) setConfigMethod.invoke(mWifiManager, apConfig);
Log.d(TAG, "setWifiApConfiguration - success? " + status);
} catch (Exception e) {
Log.e(TAG, "Error in configureHotspot");
e.printStackTrace();
}
}
/**
* Checks where tethering is on.
* This is determined by the getTetheredIfaces() method,
* that will return an empty array if not devices are tethered
*
* @return true if a tethered device is found, false if not found
*/
/*public boolean isTetherActive() {
try {
Method method = mConnectivityManager.getClass().getDeclaredMethod("getTetheredIfaces");
if (method == null) {
Log.e(TAG, "getTetheredIfaces is null");
} else {
String res[] = (String[]) method.invoke(mConnectivityManager, null);
Log.d(TAG, "getTetheredIfaces invoked");
Log.d(TAG, Arrays.toString(res));
if (res.length > 0) {
return true;
}
}
} catch (Exception e) {
Log.e(TAG, "Error in getTetheredIfaces");
e.printStackTrace();
}
return false;
}
*/
/**
* This enables tethering using the ssid/password defined in Settings App>Hotspot & tethering
* Does not require app to have system/privileged access
* Credit: Vishal Sharma - https://stackoverflow.com/a/52219887
*/
public boolean startTethering(final OnStartTetheringCallback callback) {
// On Pie if we try to start tethering while it is already on, it will
// be disabled. This is needed when startTethering() is called programmatically.
/*if (isTetherActive()) {
Log.d(TAG, "Tether already active, returning");
return false;
}*/
File outputDir = mContext.getCodeCacheDir();
Object proxy;
try {
proxy = ProxyBuilder.forClass(OnStartTetheringCallbackClass())
.dexCache(outputDir).handler(new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
switch (method.getName()) {
case "onTetheringStarted":
callback.onTetheringStarted();
break;
case "onTetheringFailed":
callback.onTetheringFailed();
break;
default:
ProxyBuilder.callSuper(proxy, method, args);
}
return null;
}
}).build();
} catch (Exception e) {
Log.e(TAG, "Error in enableTethering ProxyBuilder");
e.printStackTrace();
return false;
}
Method method = null;
try {
method = mConnectivityManager.getClass().getDeclaredMethod("startTethering", int.class, boolean.class, OnStartTetheringCallbackClass(), Handler.class);
if (method == null) {
Log.e(TAG, "startTetheringMethod is null");
} else {
method.invoke(mConnectivityManager, ConnectivityManager.TYPE_MOBILE, false, proxy, null);
Log.d(TAG, "startTethering invoked");
}
return true;
} catch (Exception e) {
Log.e(TAG, "Error in enableTethering");
e.printStackTrace();
}
return false;
}
public void stopTethering() {
try {
Method method = mConnectivityManager.getClass().getDeclaredMethod("stopTethering", int.class);
if (method == null) {
Log.e(TAG, "stopTetheringMethod is null");
} else {
method.invoke(mConnectivityManager, ConnectivityManager.TYPE_MOBILE);
Log.d(TAG, "stopTethering invoked");
}
} catch (Exception e) {
Log.e(TAG, "stopTethering error: " + e.toString());
e.printStackTrace();
}
}
private Class OnStartTetheringCallbackClass() {
try {
return Class.forName("android.net.ConnectivityManager$OnStartTetheringCallback");
} catch (ClassNotFoundException e) {
Log.e(TAG, "OnStartTetheringCallbackClass error: " + e.toString());
e.printStackTrace();
}
return null;
}
}
public static abstract class OnStartTetheringCallback {
/**
* Called when tethering has been successfully started.
*/
public abstract void onTetheringStarted();
/**
* Called when starting tethering failed.
*/
public abstract void onTetheringFailed();
}
@RequiresApi(api = Build.VERSION_CODES.O)
private static void setHotspotOnPhone(Context mContext, boolean isEnable) {
OreoWifiManager mTestOreoWifiManager = null;
if (mTestOreoWifiManager ==null) {
mTestOreoWifiManager = new OreoWifiManager(mContext);
}
if (isEnable){
OnStartTetheringCallback callback = new OnStartTetheringCallback() {
@Override
public void onTetheringStarted() {
}
@Override
public void onTetheringFailed() {
}
};
mTestOreoWifiManager.startTethering(callback);
}else{
mTestOreoWifiManager.stopTethering();
}
}
/*
public static void setWiFiApEnable(Context context, boolean isEnable) {
ConnectivityManager mConnectivityManager= (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
if (isEnable) {
mConnectivityManager.startTethering(ConnectivityManager.TETHERING_WIFI, false, new ConnectivityManager.OnStartTetheringCallback() {
@Override
public void onTetheringStarted() {
Log.d(TAG, "onTetheringStarted");
// Don't fire a callback here, instead wait for the next update from wifi.
}
@Override
public void onTetheringFailed() {
Log.d(TAG, "onTetheringFailed");
// TODO: Show error.
}
});
} else {
mConnectivityManager.stopTethering(ConnectivityManager.TETHERING_WIFI);
}
}
*/
public static void enableHotspot(Context context, boolean isEnable) {
// R: Adnroid 11
// O: Android 8
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// Android 11
setHotspotOnPhone(context, isEnable);
}/* else if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// Android 8
}
*/
}
}

@ -2,7 +2,6 @@ package com.xypower.common;
import android.content.Context;
import android.os.Environment;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
@ -108,32 +107,13 @@ public class JSONUtils {
return false;
}
public static JSONObject getConfigFile(String path, String fileName) {
JSONObject jsonObject = null;
File configFile = new File(Environment.getExternalStorageDirectory(), path);
if (!configFile.exists()) {
configFile.mkdirs();
}
configFile = new File(configFile, fileName);
if (!configFile.exists()) {
return jsonObject;
}
jsonObject = JSONUtils.loadJson(configFile.getAbsolutePath());
if (jsonObject == null) {
jsonObject = new JSONObject();
}
return jsonObject;
}
public static boolean updateConfigFile(String path, String fileName, String name, int fieldType, Object val) {
if (name == null) {
return false;
}
File configFile = new File(path.trim());
File configFile = new File(Environment.getExternalStorageDirectory(), path);
if (!configFile.exists()) {
if (val == null) {
// Should delete the config field

@ -1,8 +1,5 @@
package com.xypower.common;
import android.content.Context;
import java.io.FileInputStream;
import java.security.MessageDigest;
/* loaded from: ds_base_2.0.9_23030112.aar:classes.jar:com/dowse/base/util/MD5Util.class */
@ -32,25 +29,4 @@ public class MD5Util {
}
return r.toString();
}
public static String getFileMd5(String filePath) {
try (FileInputStream fis = new FileInputStream(filePath)) {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] buffer = new byte[8192]; // 使用大缓冲区提升性能:ml-citation{ref="5,7" data="citationList"}
int len;
while ((len = fis.read(buffer)) != -1) {
md.update(buffer, 0, len);
}
byte[] digest = md.digest();
StringBuilder sb = new StringBuilder();
for (byte b : digest) {
sb.append(String.format("%02x", b & 0xff)); // 处理字节转十六进制:ml-citation{ref="3,7" data="citationList"}
}
return sb.toString();
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
}

@ -1,8 +1,6 @@
package com.xypower.common;
import android.app.ActivityManager;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
@ -30,13 +28,7 @@ public class MicroPhotoContext {
public static final String PACKAGE_NAME_MPAPP = "com.xypower.mpapp";
public static final String PACKAGE_NAME_MPMASTER = "com.xypower.mpmaster";
public static final String PACKAGE_NAME_MPRES = "com.xypower.mpres";
public static final String SERVICE_NAME_MPSERVICE = PACKAGE_NAME_MPAPP + ".MicroPhotoService";
public static final String SERVICE_NAME_MPMASTER = PACKAGE_NAME_MPMASTER + ".MpMasterService";
public static final String ACTION_HEARTBEAT_MP = "com.xypower.mpapp.ACT_HB";
public static final String ACTION_TAKEPHOTO_MP = "com.xypower.mpapp.ACT_TP";
public static final String ACTION_RESTART_MP = "com.xypower.mpapp.ACT_RESTART";
public static final String ACTION_UPDATE_CONFIGS_MP = "com.xypower.mpapp.ACT_UPD_CFG";
@ -50,8 +42,6 @@ public class MicroPhotoContext {
public final static int DEFAULT_HEARTBEAT_FOR_SHARED_NW = 10; // minutes
public final static int DEFAULT_QUICK_HEARTBEAT = 60; // second
public static final long BUILD_TIME_WO_SID_20250418 = 1744905600000L;
public static class AppConfig {
public String cmdid;
public String server;
@ -63,7 +53,6 @@ public class MicroPhotoContext {
public int packetSize;
public int encryption; //0不加密 1明文 2加密
public int channels; //摄像头通道数目
public long modificationTime = 0;
}
public static class MasterConfig {
@ -92,7 +81,7 @@ public class MicroPhotoContext {
public static String getPrimaryStoragePath(Context context) {
try {
StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE);
Method getVolumePathsMethod = StorageManager.class.getMethod("getVolumePaths", (Class<?>[]) null);
Method getVolumePathsMethod = StorageManager.class.getMethod("getVolumePaths", (Class<?>[])null);
Object[] args = null;
String[] paths = (String[]) getVolumePathsMethod.invoke(sm, args);
// first element in paths[] is primary storage path
@ -108,7 +97,7 @@ public class MicroPhotoContext {
public static String getSecondaryStoragePath(Context context) {
try {
StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE);
Method getVolumePathsMethod = StorageManager.class.getMethod("getVolumePaths", (Class<?>[]) null);
Method getVolumePathsMethod = StorageManager.class.getMethod("getVolumePaths", (Class<?>[])null);
Object[] args = null;
String[] paths = (String[]) getVolumePathsMethod.invoke(sm, args);
// second element in paths[] is secondary storage path
@ -123,7 +112,7 @@ public class MicroPhotoContext {
public String getStorageState(Context context, String path) {
try {
StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE);
Method getVolumeStateMethod = StorageManager.class.getMethod("getVolumeState", new Class[]{String.class});
Method getVolumeStateMethod = StorageManager.class.getMethod("getVolumeState", new Class[] {String.class});
String state = (String) getVolumeStateMethod.invoke(sm, path);
return state;
} catch (Exception e) {
@ -149,13 +138,13 @@ public class MicroPhotoContext {
return str;
}
public static boolean isAppAlive(Context context, String packageName, String serviceClassName) {
public static boolean isAppAlive(Context context, String packageName) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningServiceInfo> services = am.getRunningServices(Integer.MAX_VALUE);
boolean isRunning = false;
for (ActivityManager.RunningServiceInfo rsi : services) {
if (packageName.equalsIgnoreCase(rsi.service.getPackageName()) && TextUtils.equals(serviceClassName, rsi.service.getClassName())) {
if (packageName.equalsIgnoreCase(rsi.service.getPackageName())) {
isRunning = true;
break;
}
@ -164,21 +153,6 @@ public class MicroPhotoContext {
return isRunning;
}
public static int getProcessIdOfService(Context context, String packageName, String serviceClassName) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningServiceInfo> services = am.getRunningServices(Integer.MAX_VALUE);
int pid = 0;
for (ActivityManager.RunningServiceInfo rsi : services) {
if (packageName.equalsIgnoreCase(rsi.service.getPackageName()) && TextUtils.equals(serviceClassName, rsi.service.getClassName())) {
pid = rsi.pid;
break;
}
}
return pid;
}
public static String buildAppDir(Context contxt) {
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
@ -250,30 +224,6 @@ public class MicroPhotoContext {
return path;
}
public static String buildMpResAppDir(Context contxt) {
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
path += PACKAGE_NAME_MPRES + File.separator;
File pathFile = new File(path);
if (!pathFile.exists() && !pathFile.mkdirs()) {
return null;
}
return path;
}
public static boolean hasMpAppConfig(Context context) {
boolean existed = true;
String appPath = MicroPhotoContext.buildMpAppDir(context);
File appPathFile = new File(appPath);
File appConfigFile = new File(appPathFile, "data/App.json");
return appConfigFile.exists();
}
public static AppConfig getMpAppConfig(Context context) {
String appPath = buildMpAppDir(context);
@ -281,39 +231,27 @@ public class MicroPhotoContext {
return getMpAppConfig(context, appPath + "data/App.json");
}
public static File getMpAppConfigFile(Context context) {
String appPath = buildMpAppDir(context);
return new File(appPath + "data/App.json");
}
public static AppConfig getMpAppConfig(Context context, String path) {
AppConfig appConfig = new AppConfig();
File file = new File(path);
try {
if (file.exists()) {
appConfig.modificationTime = file.lastModified();
String content = FilesUtils.readTextFile(path);
JSONObject jsonObject = TextUtils.isEmpty(content) ? new JSONObject() : new JSONObject(content);
appConfig.cmdid = jsonObject.optString(jsonObject.has("CMDID") ? "CMDID" : "cmdid", "");
appConfig.server = jsonObject.optString(jsonObject.has("server") ? "server" : "Server", "");
appConfig.port = jsonObject.optInt(jsonObject.has("port") ? "port" : "Port", 0);
appConfig.protocol = jsonObject.optInt(jsonObject.has("protocol") ? "protocol" : "Protocol", DEFAULT_PROTOCOL);
appConfig.networkProtocol = jsonObject.optInt(jsonObject.has("networkProtocol") ? "networkProtocol" : "NetworkProtocol", 0);
appConfig.network = jsonObject.optInt(jsonObject.has("network") ? "network" : "Network", 0);
appConfig.heartbeat = jsonObject.optInt("heartbeat", 0);
appConfig.packetSize = jsonObject.optInt("packetSize", 0);
appConfig.encryption = jsonObject.optInt("encryption", 0);
appConfig.channels = jsonObject.optInt("channels", 4);
if (appConfig.protocol == 0) {
appConfig.protocol = DEFAULT_PROTOCOL;
}
String content = FilesUtils.readTextFile(path);
JSONObject jsonObject = TextUtils.isEmpty(content) ? new JSONObject() : new JSONObject(content);
appConfig.cmdid = jsonObject.optString(jsonObject.has("cmdid") ? "cmdid" : "CMDID", "");
appConfig.server = jsonObject.optString(jsonObject.has("server") ? "server" : "Server", "");
appConfig.port = jsonObject.optInt(jsonObject.has("port") ? "port" : "Port", 0);
appConfig.protocol = jsonObject.optInt(jsonObject.has("protocol") ? "protocol" : "Protocol", DEFAULT_PROTOCOL);
appConfig.networkProtocol = jsonObject.optInt(jsonObject.has("networkProtocol") ? "networkProtocol" : "NetworkProtocol", 0);
appConfig.network = jsonObject.optInt(jsonObject.has("network") ? "network" : "Network", 0);
appConfig.heartbeat = jsonObject.optInt("heartbeat", 0);
appConfig.packetSize = jsonObject.optInt("packetSize", 0);
appConfig.encryption = jsonObject.optInt("encryption", 0);
appConfig.channels = jsonObject.optInt("channels", 4);
if (appConfig.protocol == 0) {
appConfig.protocol = DEFAULT_PROTOCOL;
}
} catch (JSONException e) {
e.printStackTrace();
@ -424,35 +362,21 @@ public class MicroPhotoContext {
}
}
// public static void restartMpApp(Context context, String reason) {
// /*
// Context context = MicroPhotoService.this.getApplicationContext();
// Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName());
//
// int noDelay = 1;
// intent.putExtra("noDelay", noDelay);
// PendingIntent restartIntent = PendingIntent.getActivity(context, 0, intent, 0);
// AlarmManager mgr = (AlarmManager)getSystemService(Context.ALARM_SERVICE);
// mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1000, restartIntent); // 1秒钟后重启应用
// System.exit(0);
//
// */
//
// restartApp(context, PACKAGE_NAME_MPAPP, reason);
// }
public static void restartMpApp(Context context, String reason, long delayedTimeMs) {
Intent intent = context.getPackageManager().getLaunchIntentForPackage(PACKAGE_NAME_MPAPP);
public static void restartMpApp(Context context, String reason) {
/*
Context context = MicroPhotoService.this.getApplicationContext();
Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName());
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
PendingIntent restartIntent = PendingIntent.getActivity(context, 100, intent, PendingIntent.FLAG_UPDATE_CURRENT);
AlarmManager mgr = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
mgr.set(AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + ((delayedTimeMs > 0) ? delayedTimeMs : 10), restartIntent);
PendingIntent restartIntent = PendingIntent.getActivity(context, 0, intent, 0);
AlarmManager mgr = (AlarmManager)getSystemService(Context.ALARM_SERVICE);
mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1000, restartIntent); // 1秒钟后重启应用
System.exit(0);
*/
restartApp(context, PACKAGE_NAME_MPAPP, reason);
}
public static void restartApp(Context context, String packageName, String reason) {
@ -469,36 +393,27 @@ public class MicroPhotoContext {
*/
// try {
// Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
// if (intent != null) {
// intent.putExtra("noDelay", 1);
// if (!TextUtils.isEmpty(reason)) {
// intent.putExtra("reason", reason);
// }
// intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
// context.startActivity(intent);
// }
// } catch (Exception e) {
// e.printStackTrace();
// }
SysApi.forceStopApp(context,packageName);
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
//// 然后启动目标应用
try {
if (TextUtils.equals(packageName, PACKAGE_NAME_MPAPP)) {
Intent intent = new Intent(ACTION_RESTART_MP);
intent.putExtra("noDelay", 1);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.setPackage(PACKAGE_NAME_MPAPP);
context.sendBroadcast(intent);
} else {
SysApi.forceStopApp(context, packageName);
}
Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
if (intent != null) {
intent.putExtra("noDelay", 1);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
context.startActivity(intent);
}
} catch (Exception e) {

@ -1,28 +1,19 @@
package com.xypower.common;
import android.annotation.SuppressLint;
import android.app.usage.NetworkStats;
import android.app.usage.NetworkStatsManager;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.net.ConnectivityManager;
import android.net.LinkProperties;
import android.net.Network;
import android.net.NetworkCapabilities;
import android.net.NetworkInfo;
import android.net.Uri;
import android.net.wifi.WifiManager;
import android.os.RemoteException;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import android.text.format.Formatter;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.URI;
import java.util.Enumeration;
import java.util.regex.Pattern;
@ -64,7 +55,7 @@ public class NetworkUtils {
}
public static String getMobileNetworkIp(Context context) {
ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
ConnectivityManager connectivityManager = (ConnectivityManager)context.getSystemService(Context.CONNECTIVITY_SERVICE);
@SuppressLint("MissingPermission") NetworkInfo[] networkInfos = connectivityManager.getAllNetworkInfo();
if (networkInfos == null || networkInfos.length == 0) {
@ -98,29 +89,6 @@ public class NetworkUtils {
}
public static String getMobileIPAddress() {
try {
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
while (interfaces.hasMoreElements()) {
NetworkInterface networkInterface = interfaces.nextElement();
if (networkInterface.isUp() && !networkInterface.isLoopback()) {
if (networkInterface.getName() != null && !networkInterface.getName().contains("ap")) {
Enumeration<InetAddress> addresses = networkInterface.getInetAddresses();
while (addresses.hasMoreElements()) {
InetAddress address = addresses.nextElement();
if (!address.isLoopbackAddress() && address.getAddress().length == 4) { // IPv4
return address.getHostAddress();
}
}
}
}
}
} catch (SocketException e) {
e.printStackTrace();
}
return null;
}
public static int addAPN(Context context, String name, String desc, String numeric, String user, String pwd) {
int id = -1;
String NUMERIC = getSIMInfo(context);
@ -196,40 +164,4 @@ public class NetworkUtils {
*/
public static class Usage {
public long mobleRxBytes;//移动 下载字节
public long mobleTxBytes;//移动 上传字节
public String uid;//包名
}
/**
*
* @param context
* @param startTime
* @param endTime
* @param uid uid
*/
public static Usage getApplicationQuerySummary(Context context, long startTime, long endTime, int uid) {
Usage usage = new Usage();
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
NetworkStatsManager nsm = (NetworkStatsManager) context.getSystemService(Context.NETWORK_STATS_SERVICE);
assert nsm != null;
try {
NetworkStats mobile = nsm.querySummary(ConnectivityManager.TYPE_MOBILE, null, startTime, endTime);
do {
NetworkStats.Bucket bucket = new NetworkStats.Bucket();
mobile.getNextBucket(bucket);
if(bucket.getUid() == uid) {
usage.mobleRxBytes += bucket.getRxBytes();
usage.mobleTxBytes += bucket.getTxBytes();
}
} while (mobile.hasNextBucket());
} catch (RemoteException e) {
e.printStackTrace();
}
}
return usage;
}
}

@ -6,9 +6,6 @@ import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
@ -18,7 +15,6 @@ public class ZipUtils {
public static void ZipFolder(File srcDirectory, File zipFile, FilenameFilter filter) {
ZipOutputStream outZip = null;
WritableByteChannel writableByteChannel = null;
FileInputStream inputStream = null;
FileOutputStream fileOutputStream = null;
@ -26,8 +22,8 @@ public class ZipUtils {
fileOutputStream = new FileOutputStream(zipFile);
outZip = new ZipOutputStream(fileOutputStream);
writableByteChannel = Channels.newChannel(outZip);
int len;
byte[] buffer = new byte[1024 * 256];
ZipEntry zipEntry = null;
File[] subFiles = srcDirectory.listFiles(filter);
@ -38,11 +34,8 @@ public class ZipUtils {
inputStream = new FileInputStream(subFile);
FileChannel fileChannel = inputStream.getChannel();
try {
fileChannel.transferTo(0, fileChannel.size(), writableByteChannel);
} finally {
FilesUtils.closeFriendly(fileChannel);
while ((len = inputStream.read(buffer)) != -1) {
outZip.write(buffer, 0, len);
}
FilesUtils.closeFriendly(inputStream);
@ -59,22 +52,20 @@ public class ZipUtils {
ex.printStackTrace();
}
FilesUtils.closeFriendly(fileOutputStream);
FilesUtils.closeFriendly(writableByteChannel);
FilesUtils.closeFriendly(outZip);
}
}
public static void ZipFolders(Map<String, File> srcDirectories, File zipFile, FilenameFilter filter) {
ZipOutputStream outZip = null;
WritableByteChannel writableByteChannel = null;
FileInputStream inputStream = null;
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(zipFile);
outZip = new ZipOutputStream(fileOutputStream);
writableByteChannel = Channels.newChannel(outZip);
int len;
byte[] buffer = new byte[1024 * 256];
ZipEntry zipEntry = null;
for (Map.Entry<String, File> srcDirectory : srcDirectories.entrySet()) {
@ -88,11 +79,8 @@ public class ZipUtils {
inputStream = new FileInputStream(subFile);
FileChannel fileChannel = inputStream.getChannel();
try {
fileChannel.transferTo(0, fileChannel.size(), writableByteChannel);
} finally {
FilesUtils.closeFriendly(fileChannel);
while ((len = inputStream.read(buffer)) != -1) {
outZip.write(buffer, 0, len);
}
FilesUtils.closeFriendly(inputStream);
@ -109,9 +97,7 @@ public class ZipUtils {
} catch (Exception ex) {
ex.printStackTrace();
}
FilesUtils.closeFriendly(fileOutputStream);
FilesUtils.closeFriendly(writableByteChannel);
FilesUtils.closeFriendly(outZip);
}
}
@ -119,13 +105,13 @@ public class ZipUtils {
public static void ZipFiles(List<String> srcFiles, File zipFile) {
ZipOutputStream outZip = null;
WritableByteChannel writableByteChannel = null;
FileInputStream inputStream = null;
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(zipFile);
outZip = new ZipOutputStream(fileOutputStream);
writableByteChannel = Channels.newChannel(outZip);
int len = 0;
byte[] buffer = new byte[1024 * 256];
for (String path : srcFiles) {
File file = new File(path);
@ -133,16 +119,12 @@ public class ZipUtils {
continue;
}
ZipEntry zipEntry = new ZipEntry(srcFiles.size() > 1 ? path.substring(1) : file.getName());
outZip.putNextEntry(zipEntry);
inputStream = new FileInputStream(file);
FileChannel fileChannel = inputStream.getChannel();
try {
fileChannel.transferTo(0, fileChannel.size(), writableByteChannel);
} finally {
FilesUtils.closeFriendly(fileChannel);
}
outZip.putNextEntry(zipEntry);
while ((len = inputStream.read(buffer)) != -1) {
outZip.write(buffer, 0, len);
}
outZip.closeEntry();
FilesUtils.closeFriendly(inputStream);
}
@ -159,7 +141,6 @@ public class ZipUtils {
}
}
FilesUtils.closeFriendly(fileOutputStream);
FilesUtils.closeFriendly(writableByteChannel);
FilesUtils.closeFriendly(outZip);
}
}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":0,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":5376,"resolutionCY":3024,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"burstCaptures":4,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":3,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":1920,"resolutionCY":1080,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":2,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"hdrStep":0,"ldrEnabled":0,"orientation":4,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"recognization":0,"requestTemplate":1,"resolutionCX":3264,"resolutionCY":2448,"sceneMode":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1 +0,0 @@
{"bsManufacturer":"\u4e0a\u6d77\u6b23\u5f71\u7535\u529b\u79d1\u6280\u80a1\u4efd\u6709\u9650\u516c\u53f8","channels":3,"encryption":0,"equipName":"\u56fe\u50cf\u5728\u7ebf\u76d1\u6d4b","heartbeat":10,"imgQuality":80,"model":"MSRDT-1-WP","network":0,"networkProtocol":0,"outputDbgInfo":0,"packetBase":1,"packetSize":32768,"port":6891,"postDataPaused":0,"productionDate":1717200000,"protocol":65298,"quality":80,"reportFault":0,"server":"61.169.135.146","timeForKeepingLogs":1296000,"timeForKeepingPhotos":1296000,"upgradePacketBase":1,"workStatusTimes":3}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save